diff --git a/core/build.gradle b/core/build.gradle index 75c848a7ab..4892ca6457 100644 --- a/core/build.gradle +++ b/core/build.gradle @@ -74,6 +74,8 @@ dependencies { exclude group: "com.github.spotbugs" } + implementation 'com.squareup.okhttp3:okhttp:4.11.0' + // --- Test Compile --- testImplementation group: "junit", name: "junit", version: junit_version testImplementation group: "org.hamcrest", name: "hamcrest-core", version: hamcrest_core_version // BSD 3-clause diff --git a/core/src/main/java/org/polypheny/db/adapter/Adapter.java b/core/src/main/java/org/polypheny/db/adapter/Adapter.java index 0cb0215400..41cd8b7f42 100644 --- a/core/src/main/java/org/polypheny/db/adapter/Adapter.java +++ b/core/src/main/java/org/polypheny/db/adapter/Adapter.java @@ -81,6 +81,8 @@ public abstract class Adapter { @Getter private final String adapterName; + public boolean canCache = false; + @Target(ElementType.TYPE) @Retention(RetentionPolicy.RUNTIME) diff --git a/core/src/main/java/org/polypheny/db/adapter/DataSource.java b/core/src/main/java/org/polypheny/db/adapter/DataSource.java index 96aea10ac9..01d4da6d15 100644 --- a/core/src/main/java/org/polypheny/db/adapter/DataSource.java +++ b/core/src/main/java/org/polypheny/db/adapter/DataSource.java @@ -23,9 +23,12 @@ import lombok.AllArgsConstructor; import lombok.Getter; import org.pf4j.ExtensionPoint; +import org.polypheny.db.catalog.Catalog.Collation; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; import org.polypheny.db.catalog.entity.CatalogGraphDatabase; import org.polypheny.db.catalog.entity.CatalogGraphPlacement; +import org.polypheny.db.ddl.DdlManager.ColumnTypeInformation; +import org.polypheny.db.ddl.DdlManager.FieldInformation; import org.polypheny.db.prepare.Context; import org.polypheny.db.type.PolyType; @@ -93,6 +96,16 @@ public String getDisplayType() { return typeStr; } + + public ColumnTypeInformation toColumnTypeInformation() { + return new ColumnTypeInformation( type, collectionsType, length, scale, dimension, cardinality, nullable ); + + } + + public FieldInformation toFieldInformation(){ + return new FieldInformation( name, toColumnTypeInformation(), PolyType.STRING_TYPES.contains( type ) ? Collation.getDefaultCollation() : null, null, physicalPosition ); + } + } diff --git a/core/src/main/java/org/polypheny/db/catalog/Catalog.java b/core/src/main/java/org/polypheny/db/catalog/Catalog.java index 3bb729294f..fe6ea7fa5d 100644 --- a/core/src/main/java/org/polypheny/db/catalog/Catalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/Catalog.java @@ -98,6 +98,7 @@ public abstract class Catalog implements ExtensionPoint { + public static final String HIDDEN_PREFIX = "__hidden__"; public static Adapter defaultStore; public static Adapter defaultSource; public static int defaultUserId = 0; @@ -490,9 +491,11 @@ protected final boolean isValidIdentifier( final String str ) { * @param ownerId The if of the owner * @param entityType The table type * @param modifiable Whether the content of the table can be modified + * @param cached + * @param hidden * @return The id of the inserted table */ - public abstract long addTable( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable ); + public abstract long addTable( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable, boolean cached, boolean hidden ); /** diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogTable.java index b15509935b..6a27c7960e 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogTable.java @@ -53,6 +53,8 @@ public class CatalogTable implements CatalogObject, Comparable { @Getter public final ImmutableList connectedViews; + public final boolean cached; + public final boolean hidden; public CatalogTable( @@ -66,7 +68,9 @@ public CatalogTable( final Long primaryKey, @NonNull final ImmutableList dataPlacements, boolean modifiable, - PartitionProperty partitionProperty ) { + PartitionProperty partitionProperty, + boolean cached, + boolean hidden ) { this.id = id; this.name = name; this.fieldIds = fieldIds; @@ -76,6 +80,8 @@ public CatalogTable( this.entityType = type; this.primaryKey = primaryKey; this.modifiable = modifiable; + this.cached = cached; + this.hidden = hidden; this.partitionProperty = partitionProperty; this.connectedViews = ImmutableList.of(); @@ -100,7 +106,9 @@ public CatalogTable( @NonNull final ImmutableList dataPlacements, boolean modifiable, PartitionProperty partitionProperty, - ImmutableList connectedViews ) { + ImmutableList connectedViews, + boolean cached, + boolean hidden ) { this.id = id; this.name = name; this.fieldIds = fieldIds; @@ -116,6 +124,8 @@ public CatalogTable( this.connectedViews = connectedViews; this.dataPlacements = ImmutableList.copyOf( dataPlacements ); + this.cached = cached; + this.hidden = hidden; if ( type == EntityType.ENTITY && !modifiable ) { throw new RuntimeException( "Tables of table type TABLE must be modifiable!" ); @@ -220,7 +230,7 @@ public CatalogTable getRenamed( String newName ) { dataPlacements, modifiable, partitionProperty, - connectedViews ); + connectedViews, false, false ); } @@ -237,7 +247,9 @@ public CatalogTable getConnectedViews( ImmutableList newConnectedViews ) { dataPlacements, modifiable, partitionProperty, - newConnectedViews ); + newConnectedViews, + cached, + hidden ); } @@ -254,7 +266,9 @@ public CatalogTable getTableWithColumns( ImmutableList newColumnIds ) { dataPlacements, modifiable, partitionProperty, - connectedViews ); + connectedViews, + cached, + hidden ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogView.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogView.java index 776018b7df..05559f0a0c 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogView.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogView.java @@ -65,7 +65,7 @@ public CatalogView( ImmutableMap> underlyingTables, String language ) { super( id, name, columnIds, schemaId, databaseId, ownerId, entityType, primaryKey, dataPlacements, - modifiable, partitionProperty, connectedViews ); + modifiable, partitionProperty, connectedViews, false, false ); this.query = query; this.algCollation = algCollation; this.underlyingTables = underlyingTables; diff --git a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java index 0e6741415f..a5bb969969 100644 --- a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java +++ b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java @@ -449,9 +449,11 @@ public static DdlManager getInstance() { * @param ifNotExists whether to silently ignore if the table already exists * @param stores list of data stores on which to create a full placement for this table * @param placementType which placement type should be used for the initial placements + * @param cached * @param statement the used statement + * @param hidden */ - public abstract void createTable( long schemaId, String tableName, List columns, List constraints, boolean ifNotExists, List stores, PlacementType placementType, Statement statement ) throws EntityAlreadyExistsException, ColumnNotExistsException, UnknownPartitionTypeException, UnknownColumnException, PartitionGroupNamesNotUniqueException; + public abstract void createTable( long schemaId, String tableName, List columns, List constraints, boolean ifNotExists, List stores, PlacementType placementType, boolean cached, Statement statement, boolean hidden ) throws EntityAlreadyExistsException, ColumnNotExistsException, UnknownPartitionTypeException, UnknownColumnException, PartitionGroupNamesNotUniqueException; /** * Create a new view diff --git a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java index 87a5bf4ec8..c46d14687f 100644 --- a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java +++ b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java @@ -34,17 +34,60 @@ package org.polypheny.db.tools; +import static org.polypheny.db.util.Static.RESOURCE; + import com.google.common.base.Preconditions; -import com.google.common.collect.*; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Iterables; +import com.google.common.collect.Lists; +import java.math.BigDecimal; +import java.util.AbstractList; +import java.util.ArrayDeque; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Deque; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.SortedSet; +import java.util.TreeSet; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; import lombok.Getter; import org.apache.calcite.linq4j.Ord; import org.apache.calcite.linq4j.function.Experimental; import org.bson.BsonValue; -import org.polypheny.db.algebra.*; +import org.polypheny.db.algebra.AlgCollation; +import org.polypheny.db.algebra.AlgCollations; +import org.polypheny.db.algebra.AlgDistribution; +import org.polypheny.db.algebra.AlgFieldCollation; +import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.algebra.constant.SemiJoinType; -import org.polypheny.db.algebra.core.*; +import org.polypheny.db.algebra.core.Aggregate; +import org.polypheny.db.algebra.core.AggregateCall; +import org.polypheny.db.algebra.core.AlgFactories; import org.polypheny.db.algebra.core.AlgFactories.ScanFactory; +import org.polypheny.db.algebra.core.CorrelationId; +import org.polypheny.db.algebra.core.Filter; +import org.polypheny.db.algebra.core.Intersect; +import org.polypheny.db.algebra.core.Join; +import org.polypheny.db.algebra.core.JoinAlgType; +import org.polypheny.db.algebra.core.Match; +import org.polypheny.db.algebra.core.Minus; +import org.polypheny.db.algebra.core.Modify.Operation; +import org.polypheny.db.algebra.core.Project; +import org.polypheny.db.algebra.core.Scan; +import org.polypheny.db.algebra.core.SemiJoin; +import org.polypheny.db.algebra.core.Sort; +import org.polypheny.db.algebra.core.Union; +import org.polypheny.db.algebra.core.Values; import org.polypheny.db.algebra.fun.AggFunction; import org.polypheny.db.algebra.logical.document.LogicalDocumentProject; import org.polypheny.db.algebra.logical.document.LogicalDocumentScan; @@ -53,6 +96,7 @@ import org.polypheny.db.algebra.logical.lpg.LogicalLpgProject; import org.polypheny.db.algebra.logical.lpg.LogicalLpgScan; import org.polypheny.db.algebra.logical.relational.LogicalFilter; +import org.polypheny.db.algebra.logical.relational.LogicalModify; import org.polypheny.db.algebra.logical.relational.LogicalProject; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; import org.polypheny.db.algebra.operators.OperatorName; @@ -63,8 +107,24 @@ import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.nodes.Operator; -import org.polypheny.db.plan.*; -import org.polypheny.db.rex.*; +import org.polypheny.db.plan.AlgOptCluster; +import org.polypheny.db.plan.AlgOptPredicateList; +import org.polypheny.db.plan.AlgOptSchema; +import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptUtil; +import org.polypheny.db.plan.Context; +import org.polypheny.db.plan.Contexts; +import org.polypheny.db.prepare.Prepare.CatalogReader; +import org.polypheny.db.rex.RexBuilder; +import org.polypheny.db.rex.RexCall; +import org.polypheny.db.rex.RexCorrelVariable; +import org.polypheny.db.rex.RexExecutor; +import org.polypheny.db.rex.RexInputRef; +import org.polypheny.db.rex.RexLiteral; +import org.polypheny.db.rex.RexNode; +import org.polypheny.db.rex.RexShuttle; +import org.polypheny.db.rex.RexSimplify; +import org.polypheny.db.rex.RexUtil; import org.polypheny.db.runtime.Hook; import org.polypheny.db.runtime.PolyCollections.PolyDictionary; import org.polypheny.db.schema.ModelTrait; @@ -72,17 +132,21 @@ import org.polypheny.db.schema.graph.PolyNode; import org.polypheny.db.transaction.Statement; import org.polypheny.db.type.PolyType; -import org.polypheny.db.util.*; +import org.polypheny.db.util.DateString; +import org.polypheny.db.util.Holder; +import org.polypheny.db.util.ImmutableBitSet; +import org.polypheny.db.util.ImmutableIntList; +import org.polypheny.db.util.ImmutableNullableList; +import org.polypheny.db.util.Litmus; +import org.polypheny.db.util.NlsString; +import org.polypheny.db.util.Pair; +import org.polypheny.db.util.TimeString; +import org.polypheny.db.util.TimestampString; +import org.polypheny.db.util.Util; +import org.polypheny.db.util.ValidatorUtil; import org.polypheny.db.util.mapping.Mapping; import org.polypheny.db.util.mapping.Mappings; -import javax.annotation.Nonnull; -import java.math.BigDecimal; -import java.util.*; -import java.util.stream.Collectors; - -import static org.polypheny.db.util.Static.RESOURCE; - /** * Builder for relational expressions. @@ -2501,6 +2565,13 @@ public void clear() { } + public AlgBuilder insert( AlgOptTable table ) { + LogicalModify modify = LogicalModify.create( table, (CatalogReader) algOptSchema, stack.pop().alg, Operation.INSERT, null, null, false ); + stack.add( new Frame( modify ) ); + return this; + } + + /** * Information necessary to create a call to an aggregate function. * diff --git a/core/src/main/java/org/polypheny/db/util/BsonUtil.java b/core/src/main/java/org/polypheny/db/util/BsonUtil.java index f4aabc18d7..c26cc9964b 100644 --- a/core/src/main/java/org/polypheny/db/util/BsonUtil.java +++ b/core/src/main/java/org/polypheny/db/util/BsonUtil.java @@ -20,6 +20,7 @@ import java.io.InputStream; import java.io.PushbackInputStream; import java.math.BigDecimal; +import java.math.BigInteger; import java.sql.Date; import java.sql.Time; import java.sql.Timestamp; @@ -354,6 +355,8 @@ private static BsonValue handleBigInt( Object obj ) { if ( obj instanceof Long ) { return new BsonInt64( (Long) obj ); + } else if ( obj instanceof BigInteger ) { + return new BsonInt64(( (BigInteger) obj ).longValue()); } else { return new BsonInt64( (Integer) obj ); } diff --git a/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java b/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java index e44764358a..d025ef2fff 100644 --- a/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java +++ b/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java @@ -348,7 +348,7 @@ public CatalogTable getTable( long databaseId, String schemaName, String tableNa @Override - public long addTable( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable ) { + public long addTable( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable, boolean cached, boolean hidden ) { throw new NotImplementedException(); } diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 0578ec1e46..0f4c4245bd 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -243,7 +243,8 @@ public void addAdapter( String uniqueName, String adapterName, AdapterType adapt exportedColumns = ((DataSource) adapter).getExportedColumns(); } catch ( Exception e ) { AdapterManager.getInstance().removeAdapter( adapter.getAdapterId() ); - throw new RuntimeException( "Could not deploy adapter", e ); + // throw new RuntimeException( "Could not deploy adapter", e ); + throw new RuntimeException( "Could not deploy adapter: " + e.getMessage(), e ); } // Create table, columns etc. for ( Map.Entry> entry : exportedColumns.entrySet() ) { @@ -257,7 +258,7 @@ public void addAdapter( String uniqueName, String adapterName, AdapterType adapt tableName += i; } - long tableId = catalog.addTable( tableName, 1, 1, EntityType.SOURCE, !((DataSource) adapter).isDataReadOnly() ); + long tableId = catalog.addTable( tableName, 1, 1, EntityType.SOURCE, !((DataSource) adapter).isDataReadOnly(), adapter.canCache, false ); List primaryKeyColIds = new ArrayList<>(); int colPos = 1; String physicalSchemaName = null; @@ -2171,7 +2172,7 @@ private List getUnderlyingColumns( AlgNode algNode, AlgDataType fieldList @Override - public void createTable( long schemaId, String name, List fields, List constraints, boolean ifNotExists, List stores, PlacementType placementType, Statement statement ) throws EntityAlreadyExistsException { + public void createTable( long schemaId, String name, List fields, List constraints, boolean ifNotExists, List stores, PlacementType placementType, boolean cached, Statement statement, boolean hidden ) throws EntityAlreadyExistsException { name = adjustNameIfNeeded( name, schemaId ); try { @@ -2209,7 +2210,9 @@ public void createTable( long schemaId, String name, List fiel schemaId, statement.getPrepareContext().getCurrentUserId(), EntityType.ENTITY, - true ); + true, + cached, + hidden ); // Initially create DataPlacement containers on every store the table should be placed. stores.forEach( store -> catalog.addDataPlacement( store.getAdapterId(), tableId ) ); diff --git a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java index 10fbe8baba..36faafce86 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java +++ b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java @@ -638,7 +638,7 @@ public void copyPartitionData( Transaction transaction, CatalogAdapter store, Ca CatalogPrimaryKey primaryKey = Catalog.getInstance().getPrimaryKey( sourceTable.primaryKey ); // Check Lists - List targetColumnPlacements = new LinkedList<>(); + List targetColumnPlacements = new ArrayList<>(); for ( CatalogColumn catalogColumn : columns ) { targetColumnPlacements.add( Catalog.getInstance().getColumnPlacement( store.id, catalogColumn.id ) ); } diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java index 6325554016..6e7fe4e40f 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java @@ -61,12 +61,15 @@ import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; +import org.polypheny.db.catalog.entity.CatalogDataPlacement; import org.polypheny.db.catalog.entity.CatalogGraphDatabase; import org.polypheny.db.catalog.entity.CatalogGraphMapping; import org.polypheny.db.catalog.entity.CatalogGraphPlacement; +import org.polypheny.db.catalog.entity.CatalogPartition; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.CatalogSchema; import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.QueryLanguage; @@ -185,6 +188,12 @@ public RoutedAlgBuilder handleScan( long partitionId, NamespaceType namespaceType ) { + CatalogTable table = Catalog.getInstance().getTable( tableId ); + + if ( table.cached ) { + return handleCached( builder, statement, storeUniqueName, physicalSchemaName, namespaceType, table ); + } + AlgNode node = builder.scan( ImmutableList.of( PolySchemaBuilder.buildAdapterSchemaName( storeUniqueName, logicalSchemaName, physicalSchemaName ), logicalTableName + "_" + partitionId ) ).build(); @@ -208,6 +217,32 @@ public RoutedAlgBuilder handleScan( } + private RoutedAlgBuilder handleCached( RoutedAlgBuilder builder, Statement statement, String storeUniqueName, String physicalSchemaName, NamespaceType namespaceType, CatalogTable table ) { + //todo add cache status later + CatalogTable cached; + try { + cached = Catalog.getInstance().getTable( table.namespaceId, Catalog.HIDDEN_PREFIX + table.name ); + } catch ( UnknownTableException e ) { + throw new RuntimeException( e ); + } + + CatalogDataPlacement placement = Catalog.getInstance().getDataPlacements( cached.id ).get( 0 ); + CatalogPartition partition = Catalog.getInstance().getPartitionsByTable( cached.id ).get( 0 ); + + return handleScan( + builder, + statement, + cached.id, + placement.getAdapterName(), + cached.getNamespaceName(), + cached.name, + physicalSchemaName, + PolySchemaBuilder.buildAdapterSchemaName( storeUniqueName, cached.getNamespaceName(), physicalSchemaName ), + partition.id, + namespaceType ); + } + + private AlgDataType getDocumentRowType() { // label table for cross model queries final AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); diff --git a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/util/CottontailTypeUtil.java b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/util/CottontailTypeUtil.java index 7bb90f93b2..b7498a4051 100644 --- a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/util/CottontailTypeUtil.java +++ b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/util/CottontailTypeUtil.java @@ -19,6 +19,7 @@ import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.math.BigDecimal; +import java.math.BigInteger; import java.util.ArrayList; import java.util.Calendar; import java.util.GregorianCalendar; @@ -312,6 +313,9 @@ public static CottontailGrpc.Literal toData( Object value, PolyType actualType, if ( value instanceof Long ) { return builder.setLongData( ((Long) value) ).build(); } + if ( value instanceof BigInteger ) { + return builder.setLongData(( (BigInteger) value ).longValue()).build(); + } break; } case INTEGER: diff --git a/plugins/ethereum-adapter/build.gradle b/plugins/ethereum-adapter/build.gradle index ab2c2553e3..ee0abd44ce 100644 --- a/plugins/ethereum-adapter/build.gradle +++ b/plugins/ethereum-adapter/build.gradle @@ -2,19 +2,24 @@ group "org.polypheny" dependencies { + implementation project(path: ':core') compileOnly project(":core") - // Apache 2.0 + implementation project(':webui') + + // Apache 2.0 implementation(group: "org.web3j", name: "core", version: web3j_version) { exclude(group: "org.slf4j") } // Apache 2.0 // Apache 2.0 + implementation 'org.json:json:20210307' // --- Test Compile --- testImplementation project(path: ":core", configuration: "tests") // BSD 3-clause + } diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/CacheException.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/CacheException.java new file mode 100644 index 0000000000..1d11e15c30 --- /dev/null +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/CacheException.java @@ -0,0 +1,30 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.ethereum; + +public class CacheException extends RuntimeException { + + public CacheException( String message ) { + super( message ); + } + + + public CacheException( String message, Throwable cause ) { + super( message, cause ); + } + +} diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/CachingStatus.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/CachingStatus.java new file mode 100644 index 0000000000..4c0ff95937 --- /dev/null +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/CachingStatus.java @@ -0,0 +1,37 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.ethereum; + +import java.math.BigInteger; + +public class CachingStatus { + + public float percent; + public ProcessingState state; + public BigInteger fromBlock; + public BigInteger toBlock; + public BigInteger currentBlock; + public BigInteger currentEndBlock; + public int sourceAdapterId; + public String errorMessage; + + + public enum ProcessingState { + INITIALIZED, PROCESSING, DONE, ERROR + } + +} diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/ContractCache.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/ContractCache.java new file mode 100644 index 0000000000..100a4383c4 --- /dev/null +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/ContractCache.java @@ -0,0 +1,157 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.ethereum; + +import java.math.BigInteger; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.stream.Collectors; +import lombok.extern.slf4j.Slf4j; +import org.polypheny.db.adapter.DataSource.ExportedColumn; +import org.polypheny.db.adapter.ethereum.CachingStatus.ProcessingState; +import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.ddl.DdlManager.FieldInformation; +import org.web3j.protocol.Web3j; +import org.web3j.protocol.http.HttpService; + +/** + * ContractCache serves as a contract-level cache that holds a set of smart contracts along with their corresponding events. + */ +@Slf4j +public class ContractCache { + + public final int sourceAdapterId; + private final int targetAdapterId; + private final Map> columns; + private final int batchSizeInBlocks; + private final BigInteger fromBlock; + private final BigInteger toBlock; + private BigInteger currentBlock; + private boolean hasError = false; + private String errorMessage; + + + private final Map cache = new ConcurrentHashMap<>(); // a cache for each event + protected final Web3j web3j; + + + public ContractCache( int sourceAdapterId, int targetAdapterId, String clientUrl, int batchSizeInBlocks, BigInteger fromBlock, BigInteger toBlock, Map> eventsPerContract, Map> columns ) { + this.sourceAdapterId = sourceAdapterId; + this.targetAdapterId = targetAdapterId; + this.columns = columns; + this.batchSizeInBlocks = batchSizeInBlocks; + this.fromBlock = fromBlock; + this.currentBlock = fromBlock; + this.toBlock = toBlock; + this.web3j = Web3j.build( new HttpService( clientUrl ) ); + eventsPerContract.forEach( ( address, events ) -> this.cache.put( address, new EventCache( events, web3j ) ) ); + } + + + public void initializeCaching() { + // register table in schema + this.createSchema(); + // start caching + this.startCaching(); + } + + + private void createSchema() { + Map> columnInformations = columns.entrySet() + .stream() + .collect( + Collectors.toMap( + table -> Catalog.HIDDEN_PREFIX + table.getKey(), // we prepend this to hide the table to the user + table -> table.getValue() + .stream() + .map( ExportedColumn::toFieldInformation ) + .collect( Collectors.toList() ) ) ); + + EventCacheManager.getInstance().createTables( columnInformations, targetAdapterId ); + } + + + public void startCaching() { + log.debug( "start to cache" ); + currentBlock = fromBlock; + + while ( currentBlock.compareTo( toBlock ) <= 0 ) { + BigInteger endBlock = currentBlock.add( BigInteger.valueOf( batchSizeInBlocks ) ); + if ( endBlock.compareTo( toBlock ) > 0 ) { + endBlock = toBlock; + } + + log.debug( "from-to: " + currentBlock + " to " + endBlock ); + + for ( Map.Entry entry : cache.entrySet() ) { + String address = entry.getKey(); + EventCache eventCache = entry.getValue(); + try { + eventCache.addToCache( address, currentBlock, endBlock ); + } catch ( CacheException e ) { + log.error( "Error occurred while adding to cache: " + e.getMessage() ); + hasError = true; + errorMessage = e.getMessage(); + throw e; + } catch ( Throwable t ) { + log.error( "Unexpected error during caching: " + t.getMessage(), t ); + hasError = true; + errorMessage = t.getMessage(); + return; + } + } + + currentBlock = endBlock.add( BigInteger.ONE ); // avoid overlapping block numbers + } + } + + + public CachingStatus getStatus() { + CachingStatus status = new CachingStatus(); + BigInteger totalBlocks = toBlock.subtract( fromBlock ).add( BigInteger.ONE ); + status.fromBlock = fromBlock; + status.toBlock = toBlock; + status.currentBlock = currentBlock; + status.currentEndBlock = currentBlock.add( BigInteger.valueOf( batchSizeInBlocks ) ); + status.sourceAdapterId = sourceAdapterId; + + if ( currentBlock.add( BigInteger.valueOf( batchSizeInBlocks ) ).compareTo( toBlock ) > 0 ) { + status.percent = 100; + status.state = CachingStatus.ProcessingState.DONE; + status.currentBlock = null; + status.currentEndBlock = null; + } else { + BigInteger processedBlocks = currentBlock.subtract( fromBlock ); + status.percent = Math.round( (processedBlocks.floatValue() / totalBlocks.floatValue() * 100) * 100 ) / 100f; + + if ( status.percent == 0 ) { + status.state = CachingStatus.ProcessingState.INITIALIZED; + } else { + status.state = CachingStatus.ProcessingState.PROCESSING; + } + } + + if ( hasError ) { + status.state = ProcessingState.ERROR; + status.errorMessage = errorMessage; + } + + return status; + } + +} diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumDataSource.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumDataSource.java new file mode 100644 index 0000000000..bd96412e9b --- /dev/null +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumDataSource.java @@ -0,0 +1,595 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.ethereum; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStreamReader; +import java.math.BigInteger; +import java.net.HttpURLConnection; +import java.net.URL; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.Callable; +import java.util.stream.Collectors; +import lombok.Getter; +import lombok.extern.slf4j.Slf4j; +import org.json.JSONArray; +import org.json.JSONObject; +import org.pf4j.Extension; +import org.polypheny.db.adapter.Adapter.AdapterProperties; +import org.polypheny.db.adapter.Adapter.AdapterSettingBoolean; +import org.polypheny.db.adapter.Adapter.AdapterSettingInteger; +import org.polypheny.db.adapter.Adapter.AdapterSettingString; +import org.polypheny.db.adapter.DataSource; +import org.polypheny.db.adapter.DeployMode; +import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.CatalogAdapter; +import org.polypheny.db.catalog.entity.CatalogColumnPlacement; +import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; +import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.exceptions.UnknownAdapterException; +import org.polypheny.db.information.InformationGroup; +import org.polypheny.db.information.InformationTable; +import org.polypheny.db.prepare.Context; +import org.polypheny.db.schema.Schema; +import org.polypheny.db.schema.SchemaPlus; +import org.polypheny.db.schema.Table; +import org.polypheny.db.transaction.PolyXid; +import org.polypheny.db.type.PolyType; +import org.web3j.abi.datatypes.Event; +import org.web3j.protocol.Web3j; +import org.web3j.protocol.http.HttpService; + +@Slf4j +@Extension +@AdapterProperties( + name = "Ethereum", + description = "An adapter for querying the Ethereum blockchain. It uses the ethereum JSON-RPC API. Currently, this adapter only supports read operations.", + usedModes = DeployMode.REMOTE) +@AdapterSettingString(name = "ClientUrl", description = "The URL of the ethereum JSON RPC client", defaultValue = "https://mainnet.infura.io/v3/4d06589e97064040b5da99cf4051ef04", position = 1) +@AdapterSettingInteger(name = "Blocks", description = "The number of Blocks to fetch when processing a query", defaultValue = 10, position = 2, modifiable = true) +@AdapterSettingBoolean(name = "ExperimentalFiltering", description = "Experimentally filter Past Block", defaultValue = false, position = 3, modifiable = true) +@AdapterSettingBoolean(name = "EventDataRetrieval", description = "Enables or disables the retrieval of event data. When set to true, all subsequent adapter settings will be taken into account.", defaultValue = true, position = 4, modifiable = true) +@AdapterSettingString(name = "SmartContractAddresses", description = "Comma sepretaed addresses of the smart contracts", defaultValue = "0x1f9840a85d5aF5bf1D1762F925BDADdC4201F984, 0x6b175474e89094c44da98b954eedeac495271d0f", position = 5, modifiable = true) // Event Data: Add annotation +@AdapterSettingString(name = "EtherscanApiKey", description = "Etherscan API Token", defaultValue = "PJBVZ3BE1AI5AKIMXGK1HNC59PCDH7CQSP", position = 6, modifiable = true) +@AdapterSettingString(name = "fromBlock", description = "Fetch block from (Smart Contracts)", defaultValue = "17669045", position = 7, modifiable = true) +@AdapterSettingString(name = "toBlock", description = "Fetch block to (Smart Contracts)", defaultValue = "17669155", position = 8, modifiable = true) +@AdapterSettingBoolean(name = "Caching", description = "Cache event data", defaultValue = true, position = 9, modifiable = true) +@AdapterSettingInteger(name = "batchSizeInBlocks", description = "Batch size for caching in blocks", defaultValue = 50, position = 10, modifiable = true) +@AdapterSettingString(name = "CachingAdapterTargetName", description = "Adapter Target Name", defaultValue = "hsqldb", position = 11, modifiable = true) +@AdapterSettingBoolean(name = "UseManualABI", description = "Cache event data", defaultValue = false, position = 12, modifiable = true) +@AdapterSettingString(name = "ContractABI", description = "Contract ABI", defaultValue = "", position = 13, modifiable = true, required = false) +@AdapterSettingString(name = "ContractName", description = "Contract name", defaultValue = "", position = 14, modifiable = true, required = false) +public class EthereumDataSource extends DataSource { + + public static final String SCHEMA_NAME = "public"; + @Getter + private final boolean eventDataRetrieval; + private String clientURL; + @Getter + private int blocks; + @Getter + private boolean experimentalFiltering; + private EthereumSchema currentSchema; + @Getter + final List smartContractAddresses; + private final String etherscanApiKey; + @Getter + private final BigInteger fromBlock; + @Getter + private final BigInteger toBlock; + private final int batchSizeInBlocks; + + private final Map eventDataMap; + private Boolean caching; + private String cachingAdapterTargetName; + + private Map> map; + + private final boolean useManualABI; + private final String contractABI; + private final String contractName; + + + public EthereumDataSource( final int storeId, final String uniqueName, final Map settings ) { + super( storeId, uniqueName, settings, true ); + setClientURL( settings.get( "ClientUrl" ) ); + this.blocks = Integer.parseInt( settings.get( "Blocks" ) ); + this.experimentalFiltering = Boolean.parseBoolean( settings.get( "ExperimentalFiltering" ) ); + this.eventDataRetrieval = Boolean.parseBoolean( settings.get( "EventDataRetrieval" ) ); + String smartContractAddressesStr = settings.get( "SmartContractAddresses" ); + this.smartContractAddresses = Arrays.stream( smartContractAddressesStr.split( "," ) ) + .map( String::trim ) + .collect( Collectors.toList() ); + this.etherscanApiKey = settings.get( "EtherscanApiKey" ); + this.fromBlock = new BigInteger( settings.get( "fromBlock" ) ); + this.toBlock = new BigInteger( settings.get( "toBlock" ) ); + this.batchSizeInBlocks = Integer.parseInt( settings.get( "batchSizeInBlocks" ) ); + this.eventDataMap = new HashMap<>(); + this.caching = Boolean.parseBoolean( settings.get( "Caching" ) ); + this.canCache = this.caching; + this.cachingAdapterTargetName = settings.get( "CachingAdapterTargetName" ); + this.useManualABI = Boolean.parseBoolean( settings.get( "UseManualABI" ) ); + this.contractABI = settings.get( "ContractABI" ); + this.contractName = settings.get( "ContractName" ); + // todo DL + new Thread( () -> { + createInformationPage(); + enableInformationPage(); + } ).start(); + + //createInformationPage(); + //enableInformationPage(); + } + + + private void setClientURL( String clientURL ) { + Web3j web3j = Web3j.build( new HttpService( clientURL ) ); + try { + BigInteger latest = web3j.ethBlockNumber().send().getBlockNumber(); + } catch ( Exception e ) { + throw new RuntimeException( "Unable to connect the client URL '" + clientURL + "'" ); + } + web3j.shutdown(); + this.clientURL = clientURL; + } + + + @Override + public void createNewSchema( SchemaPlus rootSchema, String name ) { + currentSchema = new EthereumSchema( this.clientURL ); + } + + + @Override + public Table createTableSchema( CatalogTable combinedTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { + return currentSchema.createBlockchainTable( combinedTable, columnPlacementsOnStore, this ); + } + + + @Override + public Schema getCurrentSchema() { + return currentSchema; + } + + + @Override + public void truncate( Context context, CatalogTable table ) { + throw new RuntimeException( "Blockchain adapter does not support truncate" ); + } + + + @Override + public Map> getExportedColumns() { + // Ensure that this block of code is called only once by checking if 'map' is null before proceeding + if ( map != null ) { + return map; + } + + Map> map = new HashMap<>(); + + String[] blockColumns = { "number", "hash", "parent_hash", "nonce", "sha3uncles", "logs_bloom", "transactions_root", "state_root", "receipts_root", "author", "miner", "mix_hash", "difficulty", "total_difficulty", "extra_data", "size", "gas_limit", "gas_used", "timestamp" }; + PolyType[] blockTypes = { PolyType.DECIMAL, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.DECIMAL, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.DECIMAL, PolyType.DECIMAL, PolyType.VARCHAR, PolyType.DECIMAL, PolyType.DECIMAL, PolyType.DECIMAL, PolyType.TIMESTAMP }; + createExportedColumns( "block", map, blockColumns, blockTypes ); + + String[] transactionColumns = { "hash", "nonce", "block_hash", "block_number", "transaction_index", "from", "to", "value", "gas_price", "gas", "input", "creates", "public_key", "raw", "r", "s" }; + PolyType[] transactionTypes = { PolyType.VARCHAR, PolyType.DECIMAL, PolyType.VARCHAR, PolyType.DECIMAL, PolyType.DECIMAL, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.DECIMAL, PolyType.DECIMAL, PolyType.DECIMAL, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR }; + createExportedColumns( "transaction", map, transactionColumns, transactionTypes ); + + if ( !eventDataRetrieval ) { + this.map = map; + return map; + } + + String[] commonEventColumns = { "removed", "log_index", "transaction_index", "transaction_hash", "block_hash", "block_number", "address" }; + PolyType[] commonEventTypes = { PolyType.BOOLEAN, PolyType.DECIMAL, PolyType.DECIMAL, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.DECIMAL, PolyType.VARCHAR }; + createExportedColumnsForEvents( map, commonEventColumns, commonEventTypes ); + + if ( caching == Boolean.TRUE ) { + // Disable caching to prevent multiple unnecessary attempts to cache the same data. + caching = false; + this.map = map; + Map> columns = new HashMap<>( map ); // create new map instance for caching + columns.remove( "block" ); + columns.remove( "transaction" ); + + // todo DL: fix concurrency issues (dirty solution right now) + new Thread( () -> { + try { + Thread.sleep( 1200 ); + } catch ( InterruptedException e ) { + throw new RuntimeException( e ); + } + try { + Map> eventsPerContract = eventDataMap.values().stream() + .collect( Collectors.groupingBy( + EventData::getSmartContractAddress, + Collectors.toList() + ) ); + CatalogAdapter cachingAdapter = Catalog.getInstance().getAdapter( cachingAdapterTargetName ); + EventCacheManager.getInstance() + .register( getAdapterId(), cachingAdapter.id, clientURL, batchSizeInBlocks, fromBlock, toBlock, eventsPerContract, columns ) + .initializeCaching(); + } catch ( UnknownAdapterException e ) { + throw new RuntimeException( e ); + } + } ).start(); + } + + return map; + } + + + @Override + public boolean prepare( PolyXid xid ) { + log.debug( "Blockchain Store does not support prepare()." ); + return true; + } + + + @Override + public void commit( PolyXid xid ) { + log.debug( "Blockchain Store does not support commit()." ); + } + + + @Override + public void rollback( PolyXid xid ) { + log.debug( "Blockchain Store does not support rollback()." ); + } + + + @Override + public void shutdown() { + removeInformationPage(); + } + + + @Override + protected void reloadSettings( List updatedSettings ) { + if ( updatedSettings.contains( "ClientUrl" ) ) { + setClientURL( settings.get( "ClientUrl" ) ); + } + if ( updatedSettings.contains( "Blocks" ) ) { + this.blocks = Integer.parseInt( settings.get( "Blocks" ) ); + } + if ( updatedSettings.contains( "ExperimentalFiltering" ) ) { + this.experimentalFiltering = Boolean.parseBoolean( settings.get( "ExperimentalFiltering" ) ); + } + } + + + protected void createInformationPage() { + for ( Map.Entry> entry : getExportedColumns().entrySet() ) { + InformationGroup group = new InformationGroup( + informationPage, + entry.getValue().get( 0 ).physicalSchemaName + "." + entry.getValue().get( 0 ).physicalTableName ); + + InformationTable table = new InformationTable( + group, + Arrays.asList( "Position", "Column Name", "Type", "Primary" ) ); + for ( ExportedColumn exportedColumn : entry.getValue() ) { + table.addRow( + exportedColumn.physicalPosition, + exportedColumn.name, + exportedColumn.getDisplayType(), + exportedColumn.primary ? "✔" : "" + ); + } + informationElements.add( table ); + informationGroups.add( group ); + } + } + + + private void createExportedColumns( String physicalTableName, Map> map, String[] columns, PolyType[] types ) { + PolyType collectionsType = null; + Integer length = 300; + Integer scale = null; + Integer dimension = null; + Integer cardinality = null; + int position = 0; + List cols = new ArrayList<>(); + for ( String col : columns ) { + cols.add( new ExportedColumn( + col, + types[position], + collectionsType, + length, + scale, + dimension, + cardinality, + false, + SCHEMA_NAME, + physicalTableName, + col, + position, + position == 0 ) ); + position++; + + } + map.put( physicalTableName, cols ); + } + + + private void createExportedColumnsForEvents( Map> map, String[] commonEventColumns, PolyType[] commonEventTypes ) { + for ( String address : smartContractAddresses ) { + String contractName = null; + List contractEvents = null; + if ( useManualABI && !contractABI.isEmpty() && !this.contractName.isEmpty() ) { + if ( smartContractAddresses.size() > 1 ) { + throw new IllegalArgumentException( "Only one smart contract address should be provided when using a manual ABI." ); + } + JSONArray abiArray = new JSONArray( contractABI ); + contractEvents = getEventsFromABIArray( abiArray ); + contractName = this.contractName; + } else { + try { + contractName = callWithExponentialBackoff( () -> getContractName( address ) ); + contractEvents = callWithExponentialBackoff( () -> getEventsFromABI( etherscanApiKey, address ) ); + } catch ( Exception e ) { + throw new RuntimeException( e ); + } + } + + for ( JSONObject event : contractEvents ) { + if ( event.getBoolean( "anonymous" ) ) { + continue; + } + String eventName = event.getString( "name" ); // to match it later with catalogTable.name + String compositeKey = contractName + "_" + eventName; // e.g. Uni_Transfer & Dai_Transfer + JSONArray abiInputs = event.getJSONArray( "inputs" ); // indexed and non-indexed values (topics + data) + + eventDataMap.put( compositeKey.toLowerCase(), new EventData( eventName, contractName, address, abiInputs ) ); + } + } + + PolyType collectionsType = null; + Integer scale = null; + Integer dimension = null; + Integer cardinality = null; + + // Event Data: Creating columns for each event for specified smart contract based on ABI + for ( Map.Entry eventEntry : eventDataMap.entrySet() ) { + // String eventName = eventEntry.getValue().getOriginalKey(); // Get the original event name + String compositeEventName = eventEntry.getValue().getCompositeName(); + JSONArray abiInputs = eventEntry.getValue().getAbiInputs(); // Get the data + List eventDataCols = new ArrayList<>(); + int inputPosition = 0; + + for ( int i = 0; i < abiInputs.length(); i++ ) { + JSONObject inputObject = abiInputs.getJSONObject( i ); + String col = inputObject.getString( "name" ); + PolyType type = convertToPolyType( inputObject.getString( "type" ) ); // convert event types to polytype + eventDataCols.add( new ExportedColumn( + col, + type, + collectionsType, + getLengthForType( type ), + scale, + dimension, + cardinality, + false, + SCHEMA_NAME, + compositeEventName, // event name + col, + inputPosition, + inputPosition == 0 + ) ); + inputPosition++; + } + + // Adding common columns + for ( int i = 0; i < commonEventColumns.length; i++ ) { + String columnName = commonEventColumns[i]; + PolyType columnType = commonEventTypes[i]; + eventDataCols.add( new ExportedColumn( + columnName, + columnType, + collectionsType, + getLengthForType( columnType ), + scale, + dimension, + cardinality, + false, + SCHEMA_NAME, + compositeEventName, // event name + columnName, + inputPosition, + inputPosition == 0 + ) ); + inputPosition++; + } + + map.put( compositeEventName, eventDataCols ); + } + + } + + + protected List getEventsFromABI( String etherscanApiKey, String contractAddress ) { + List events = new ArrayList<>(); + try { + URL url = new URL( "https://api.etherscan.io/api?module=contract&action=getabi&address=" + contractAddress + "&apikey=" + etherscanApiKey ); + HttpURLConnection connection = (HttpURLConnection) url.openConnection(); + connection.setRequestMethod( "GET" ); + int responseCode = connection.getResponseCode(); + if ( responseCode == HttpURLConnection.HTTP_OK ) { + BufferedReader in = new BufferedReader( new InputStreamReader( connection.getInputStream() ) ); + String inputLine; + StringBuilder response = new StringBuilder(); + + while ( (inputLine = in.readLine()) != null ) { + response.append( inputLine ); + } + in.close(); + + JSONObject jsonObject = new JSONObject( response.toString() ); + String apiStatus = jsonObject.getString( "status" ); + + if ( "0".equals( apiStatus ) ) { + String errorMessage = jsonObject.getString( "message" ); + throw new RuntimeException( "Etherscan API error getting abi from contract: " + errorMessage ); + } + + String abi = jsonObject.getString( "result" ); + JSONArray abiArray = new JSONArray( abi ); // Convert ABI string to JSON Array + for ( int i = 0; i < abiArray.length(); i++ ) { + JSONObject obj = abiArray.getJSONObject( i ); + // Check if the current object is an event + if ( obj.getString( "type" ).equals( "event" ) ) { + events.add( obj ); + } + } + } + + } catch ( IOException e ) { + throw new RuntimeException( "Network or IO error occurred", e ); + } + + return events; + } + + + protected List getEventsFromABIArray( JSONArray abiArray ) { + List events = new ArrayList<>(); + + // Loop through the ABI + for ( int i = 0; i < abiArray.length(); i++ ) { + JSONObject item = abiArray.getJSONObject( i ); + + // Check if the item is of type 'event' + if ( item.has( "type" ) && "event".equals( item.getString( "type" ) ) ) { + events.add( item ); + } + } + + return events; + } + + + private String getContractName( String contractAddress ) { + try { + URL url = new URL( "https://api.etherscan.io/api?module=contract&action=getsourcecode&address=" + contractAddress + "&apikey=" + etherscanApiKey ); + HttpURLConnection connection = (HttpURLConnection) url.openConnection(); + connection.setRequestMethod( "GET" ); + int responseCode = connection.getResponseCode(); + if ( responseCode == HttpURLConnection.HTTP_OK ) { + BufferedReader in = new BufferedReader( new InputStreamReader( connection.getInputStream() ) ); + String inputLine; + StringBuilder response = new StringBuilder(); + + while ( (inputLine = in.readLine()) != null ) { + response.append( inputLine ); + } + in.close(); + + JSONObject jsonObject = new JSONObject( response.toString() ); + String apiStatus = jsonObject.getString( "status" ); + + if ( "0".equals( apiStatus ) ) { + String errorMessage = jsonObject.getString( "message" ); + throw new RuntimeException( "Etherscan API error getting contract name: " + errorMessage ); + } + + JSONArray resultArray = jsonObject.getJSONArray( "result" ); + if ( resultArray.length() > 0 ) { + JSONObject contractObject = resultArray.getJSONObject( 0 ); + return contractObject.getString( "ContractName" ); + } + } + + } catch ( IOException e ) { + throw new RuntimeException( "Network or IO error occurred", e ); + } + return null; + } + + + protected Event getEventFromCatalogTable( String catalogTableName ) { + if ( catalogTableName.equals( "block" ) || catalogTableName.equals( "transaction" ) ) { + return null; + } + return eventDataMap.get( catalogTableName ).getEvent(); + } + + + protected String getSmartContractAddressFromCatalogTable( String catalogTableName ) { + if ( catalogTableName.equals( "block" ) || catalogTableName.equals( "transaction" ) ) { + return null; + } + return eventDataMap.get( catalogTableName ).getSmartContractAddress(); + + } + + + private Integer getLengthForType( PolyType type ) { + switch ( type ) { + case VARCHAR: + return 300; + case VARBINARY: + return 32; + case DECIMAL: + return 100; + default: + return null; + } + } + + + static PolyType convertToPolyType( String type ) { + if ( type.equals( "bool" ) ) { + return PolyType.BOOLEAN; + } else if ( type.equals( "address" ) || type.equals( "string" ) ) { + return PolyType.VARCHAR; + } else if ( type.startsWith( "int" ) || type.startsWith( "uint" ) ) { + return PolyType.DECIMAL; + } else if ( type.equals( "bytes" ) || type.startsWith( "bytes" ) ) { + return PolyType.VARCHAR; // for dynamic and fixed-size + } + throw new RuntimeException( "Could not find a matching PolyType" ); + } + + + public T callWithExponentialBackoff( Callable callable ) throws Exception { + int maxRetries = 5; + long waitTime = 1000; // 1 second + + for ( int retry = 0; retry < maxRetries; retry++ ) { + try { + return callable.call(); + } catch ( Exception e ) { + if ( retry == maxRetries - 1 ) { + throw e; // If this was our last retry, rethrow the exception + } + try { + Thread.sleep( waitTime ); + } catch ( InterruptedException ie ) { + Thread.currentThread().interrupt(); // Restore the interrupted status + } + waitTime *= 2; // Double the delay for the next retry + } + } + throw new Exception( "Exponential backoff failed after " + maxRetries + " attempts." ); + } + + +} diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumEnumerator.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumEnumerator.java index a17a6c09b7..7f7e628286 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumEnumerator.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumEnumerator.java @@ -27,7 +27,7 @@ import org.apache.calcite.avatica.util.DateTimeUtils; import org.apache.calcite.linq4j.Enumerator; import org.apache.commons.lang3.time.FastDateFormat; - +import org.web3j.abi.datatypes.Event; /** * Enumerator that reads from a Blockchain. @@ -58,12 +58,12 @@ class EthereumEnumerator implements Enumerator { private E current; - EthereumEnumerator( String clientUrl, int blocks, AtomicBoolean cancelFlag, boolean stream, String[] filterValues, EthereumMapper mapper, Predicate blockNumberPredicate, RowConverter rowConverter ) { + EthereumEnumerator( String clientUrl, int blocks, AtomicBoolean cancelFlag, boolean stream, String[] filterValues, EthereumMapper mapper, Predicate blockNumberPredicate, RowConverter rowConverter, String contractAddress, BigInteger fromBlock, BigInteger toBlock, Event event ) { this.clientUrl = clientUrl; this.cancelFlag = cancelFlag; this.rowConverter = rowConverter; this.filterValues = filterValues; - this.reader = mapper.makeReader( clientUrl, blocks, blockNumberPredicate ); + this.reader = mapper.makeReader( clientUrl, blocks, blockNumberPredicate, contractAddress, fromBlock, toBlock, event ); this.blocks = blocks; } diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumFieldType.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumFieldType.java index 7ec2acad0f..eacfdf1eef 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumFieldType.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumFieldType.java @@ -40,7 +40,8 @@ enum EthereumFieldType { DOUBLE( Primitive.DOUBLE ), DATE( java.sql.Date.class, "date" ), TIME( java.sql.Time.class, "time" ), - TIMESTAMP( java.sql.Timestamp.class, "timestamp" ); + TIMESTAMP( java.sql.Timestamp.class, "timestamp" ), + DECIMAL( java.math.BigDecimal.class, "decimal" ); private static final Map MAP = new HashMap<>(); @@ -89,6 +90,8 @@ public static EthereumFieldType getBlockchainFieldType( PolyType type ) { return EthereumFieldType.TIME; case TIMESTAMP: return EthereumFieldType.TIMESTAMP; + case DECIMAL: + return EthereumFieldType.DECIMAL; default: throw new RuntimeException( "Unsupported datatype: " + type.name() ); } diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumMapper.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumMapper.java index db67e69734..bb76b9bce4 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumMapper.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumMapper.java @@ -19,18 +19,22 @@ import java.math.BigInteger; import java.util.function.Predicate; import org.web3j.protocol.core.methods.response.EthBlock; +import org.web3j.abi.datatypes.Event; public enum EthereumMapper { BLOCK, - TRANSACTION; + TRANSACTION, + EVENTDATA; static EthereumMapper getMapper( String tableName ) { if ( tableName.equals( "block" ) ) { return BLOCK; + } else if ( tableName.equals( "transaction" ) ) { + return TRANSACTION; } - return TRANSACTION; + return EVENTDATA; } @@ -95,10 +99,13 @@ public String[] map( Object obj ) { } - public BlockReader makeReader( String clientUrl, int blocks, Predicate blockNumberPredicate ) { + public BlockReader makeReader( String clientUrl, int blocks, Predicate blockNumberPredicate, String contractAddress, BigInteger fromBlock, BigInteger toBlock, Event event ) { if ( this == BLOCK ) { return new BlockReader( clientUrl, blocks, blockNumberPredicate ); + } else if ( this == TRANSACTION ) { + return new TransactionReader( clientUrl, blocks, blockNumberPredicate ); } - return new TransactionReader( clientUrl, blocks, blockNumberPredicate ); + return new EventDataReader( clientUrl, blocks, blockNumberPredicate, contractAddress, fromBlock, toBlock, event ); // Event Data; } + } diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java index e18f0bbe9c..2d9cdb8a85 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java @@ -18,37 +18,11 @@ import com.google.common.collect.ImmutableMap; -import java.math.BigInteger; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; import java.util.Map; -import lombok.Getter; -import lombok.extern.slf4j.Slf4j; -import org.pf4j.Extension; import org.pf4j.Plugin; import org.pf4j.PluginWrapper; -import org.polypheny.db.adapter.Adapter.AdapterProperties; -import org.polypheny.db.adapter.Adapter.AdapterSettingBoolean; -import org.polypheny.db.adapter.Adapter.AdapterSettingInteger; -import org.polypheny.db.adapter.Adapter.AdapterSettingString; -import org.polypheny.db.adapter.DataSource; -import org.polypheny.db.adapter.DeployMode; import org.polypheny.db.catalog.Adapter; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; -import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; -import org.polypheny.db.catalog.entity.CatalogTable; -import org.polypheny.db.information.InformationGroup; -import org.polypheny.db.information.InformationTable; -import org.polypheny.db.prepare.Context; -import org.polypheny.db.schema.Schema; -import org.polypheny.db.schema.SchemaPlus; -import org.polypheny.db.schema.Table; -import org.polypheny.db.transaction.PolyXid; -import org.polypheny.db.type.PolyType; -import org.web3j.protocol.Web3j; -import org.web3j.protocol.http.HttpService; +import org.polypheny.db.processing.TransactionExtension; public class EthereumPlugin extends Plugin { @@ -73,202 +47,8 @@ public void start() { "Blocks", "10", "ExperimentalFiltering", "false" ); - + TransactionExtension.REGISTER.add( new EthereumStarter() ); // add extension to transaction manager Adapter.addAdapter( EthereumDataSource.class, ADAPTER_NAME, settings ); } - - @Override - public void stop() { - Adapter.removeAdapter( EthereumDataSource.class, ADAPTER_NAME ); - } - - - @Slf4j - @Extension - @AdapterProperties( - name = "Ethereum", - description = "An adapter for querying the Ethereum blockchain. It uses the ethereum JSON-RPC API. Currently, this adapter only supports read operations.", - usedModes = DeployMode.REMOTE) - @AdapterSettingString(name = "ClientUrl", description = "The URL of the ethereum JSON RPC client", defaultValue = "https://mainnet.infura.io/v3/4d06589e97064040b5da99cf4051ef04", position = 1) - @AdapterSettingInteger(name = "Blocks", description = "The number of Blocks to fetch when processing a query", defaultValue = 10, position = 2, modifiable = true) - @AdapterSettingBoolean(name = "ExperimentalFiltering", description = "Experimentally filter Past Block", defaultValue = false, position = 3, modifiable = true) - public static class EthereumDataSource extends DataSource { - - private String clientURL; - @Getter - private int blocks; - @Getter - private boolean experimentalFiltering; - private EthereumSchema currentSchema; - - - public EthereumDataSource( final int storeId, final String uniqueName, final Map settings ) { - super( storeId, uniqueName, settings, true ); - setClientURL( settings.get( "ClientUrl" ) ); - this.blocks = Integer.parseInt( settings.get( "Blocks" ) ); - this.experimentalFiltering = Boolean.parseBoolean( settings.get( "ExperimentalFiltering" ) ); - createInformationPage(); - enableInformationPage(); - } - - - private void setClientURL( String clientURL ) { - Web3j web3j = Web3j.build( new HttpService( clientURL ) ); - try { - BigInteger latest = web3j.ethBlockNumber().send().getBlockNumber(); - } catch ( Exception e ) { - throw new RuntimeException( "Unable to connect the client URL '" + clientURL + "'" ); - } - web3j.shutdown(); - this.clientURL = clientURL; - } - - - @Override - public void createNewSchema( SchemaPlus rootSchema, String name ) { - currentSchema = new EthereumSchema( this.clientURL ); - } - - - @Override - public Table createTableSchema( CatalogTable combinedTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { - return currentSchema.createBlockchainTable( combinedTable, columnPlacementsOnStore, this ); - } - - - @Override - public Schema getCurrentSchema() { - return currentSchema; - } - - - @Override - public void truncate( Context context, CatalogTable table ) { - throw new RuntimeException( "Blockchain adapter does not support truncate" ); - } - - - @Override - public Map> getExportedColumns() { - Map> map = new HashMap<>(); - String[] blockColumns = { "number", "hash", "parent_hash", "nonce", "sha3uncles", "logs_bloom", "transactions_root", "state_root", "receipts_root", "author", "miner", "mix_hash", "difficulty", "total_difficulty", "extra_data", "size", "gas_limit", "gas_used", "timestamp" }; - PolyType[] blockTypes = { PolyType.BIGINT, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.BIGINT, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.BIGINT, PolyType.BIGINT, PolyType.VARCHAR, PolyType.BIGINT, PolyType.BIGINT, PolyType.BIGINT, PolyType.TIMESTAMP }; - String[] transactionColumns = { "hash", "nonce", "block_hash", "block_number", "transaction_index", "from", "to", "value", "gas_price", "gas", "input", "creates", "public_key", "raw", "r", "s" }; - PolyType[] transactionTypes = { PolyType.VARCHAR, PolyType.BIGINT, PolyType.VARCHAR, PolyType.BIGINT, PolyType.BIGINT, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.BIGINT, PolyType.BIGINT, PolyType.BIGINT, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR }; - - PolyType type = PolyType.VARCHAR; - PolyType collectionsType = null; - Integer length = 300; - Integer scale = null; - Integer dimension = null; - Integer cardinality = null; - int position = 0; - List blockCols = new ArrayList<>(); - for ( String blockCol : blockColumns ) { - blockCols.add( new ExportedColumn( - blockCol, - blockTypes[position], - collectionsType, - length, - scale, - dimension, - cardinality, - false, - "public", - "block", - blockCol, - position, - position == 0 ) ); - position++; - - } - map.put( "block", blockCols ); - List transactCols = new ArrayList<>(); - position = 0; - for ( String transactCol : transactionColumns ) { - transactCols.add( new ExportedColumn( - transactCol, - transactionTypes[position], - collectionsType, - length, - scale, - dimension, - cardinality, - false, - "public", - "transaction", - transactCol, - position, - position == 0 ) ); - position++; - } - map.put( "transaction", transactCols ); - return map; - } - - - @Override - public boolean prepare( PolyXid xid ) { - log.debug( "Blockchain Store does not support prepare()." ); - return true; - } - - - @Override - public void commit( PolyXid xid ) { - log.debug( "Blockchain Store does not support commit()." ); - } - - - @Override - public void rollback( PolyXid xid ) { - log.debug( "Blockchain Store does not support rollback()." ); - } - - - @Override - public void shutdown() { - removeInformationPage(); - } - - - @Override - protected void reloadSettings( List updatedSettings ) { - if ( updatedSettings.contains( "ClientUrl" ) ) { - setClientURL( settings.get( "ClientUrl" ) ); - } - if ( updatedSettings.contains( "Blocks" ) ) { - this.blocks = Integer.parseInt( settings.get( "Blocks" ) ); - } - if ( updatedSettings.contains( "ExperimentalFiltering" ) ) { - this.experimentalFiltering = Boolean.parseBoolean( settings.get( "ExperimentalFiltering" ) ); - } - } - - - protected void createInformationPage() { - for ( Map.Entry> entry : getExportedColumns().entrySet() ) { - InformationGroup group = new InformationGroup( - informationPage, - entry.getValue().get( 0 ).physicalSchemaName + "." + entry.getValue().get( 0 ).physicalTableName ); - - InformationTable table = new InformationTable( - group, - Arrays.asList( "Position", "Column Name", "Type", "Primary" ) ); - for ( ExportedColumn exportedColumn : entry.getValue() ) { - table.addRow( - exportedColumn.physicalPosition, - exportedColumn.name, - exportedColumn.getDisplayType(), - exportedColumn.primary ? "✔" : "" - ); - } - informationElements.add( table ); - informationGroups.add( group ); - } - } - - } - } \ No newline at end of file diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumSchema.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumSchema.java index 676d7ed723..e7c65857fe 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumSchema.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumSchema.java @@ -21,7 +21,6 @@ import java.util.LinkedList; import java.util.List; import java.util.Map; -import org.polypheny.db.adapter.ethereum.EthereumPlugin.EthereumDataSource; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeImpl; @@ -61,12 +60,42 @@ public Table createBlockchainTable( CatalogTable catalogTable, List i ).toArray(); - EthereumMapper mapper = catalogTable.name.equals( "block" ) ? EthereumMapper.BLOCK : EthereumMapper.TRANSACTION; - EthereumTable table = new EthereumTable( clientUrl, AlgDataTypeImpl.proto( fieldInfo.build() ), fieldTypes, fields, mapper, ethereumDataSource, catalogTable.id ); + EthereumMapper mapper = catalogTable.name.startsWith( "block" ) ? EthereumMapper.BLOCK : catalogTable.name.startsWith( "transaction" ) ? EthereumMapper.TRANSACTION : EthereumMapper.EVENTDATA; + EthereumTable.Builder tableBuilder = new EthereumTable.Builder( + clientUrl, + AlgDataTypeImpl.proto( fieldInfo.build() ), + fieldTypes, + fields, + mapper, + ethereumDataSource, + catalogTable.id + ); + boolean eventDataRetrieval = ethereumDataSource.isEventDataRetrieval(); + if (eventDataRetrieval) { + String originalName = getOriginalName(catalogTable.name); // remove the last digit for key search in eventDataMap + + tableBuilder + .contractAddress(ethereumDataSource.getSmartContractAddressFromCatalogTable(originalName)) + .fromBlock(ethereumDataSource.getFromBlock()) + .toBlock(ethereumDataSource.getToBlock()) + .event(ethereumDataSource.getEventFromCatalogTable(originalName)); + } + EthereumTable table = tableBuilder.build(); tableMap.put( catalogTable.name, table ); return table; } + public static String getOriginalName(String catalogName) { + if (catalogName == null || catalogName.isEmpty()) { + return catalogName; + } + char lastChar = catalogName.charAt(catalogName.length() - 1); + if (Character.isDigit(lastChar)) { + return catalogName.substring(0, catalogName.length() - 1); + } + return catalogName; + } + @Override public Map getTableMap() { diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumStarter.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumStarter.java new file mode 100644 index 0000000000..6ec7651743 --- /dev/null +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumStarter.java @@ -0,0 +1,35 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.ethereum; + +import org.polypheny.db.iface.Authenticator; +import org.polypheny.db.processing.TransactionExtension; +import org.polypheny.db.transaction.TransactionManager; +import org.polypheny.db.webui.HttpServer; +import org.polypheny.db.webui.HttpServer.HandlerType; + +public class EthereumStarter implements TransactionExtension { + + @Override + public void initExtension( TransactionManager manager, Authenticator authenticator ) { + EventCacheManager.getAndSet( manager ); + EventCacheManager eventCacheManager = EventCacheManager.getInstance(); + HttpServer server = HttpServer.getInstance(); + server.addRoute( "getEventCacheStatus", ( request, crud ) -> eventCacheManager.getAllStreamStatus(), Void.class, HandlerType.GET ); + } + +} diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumTable.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumTable.java index 6c095c2e93..434fdc1efd 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumTable.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumTable.java @@ -25,7 +25,6 @@ import org.apache.calcite.linq4j.Enumerable; import org.apache.calcite.linq4j.Enumerator; import org.polypheny.db.adapter.DataContext; -import org.polypheny.db.adapter.ethereum.EthereumPlugin.EthereumDataSource; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeField; @@ -34,6 +33,7 @@ import org.polypheny.db.schema.FilterableTable; import org.polypheny.db.schema.impl.AbstractTable; import org.polypheny.db.util.Pair; +import org.web3j.abi.datatypes.Event; public class EthereumTable extends AbstractTable implements FilterableTable { @@ -43,23 +43,89 @@ public class EthereumTable extends AbstractTable implements FilterableTable { protected final EthereumDataSource ethereumDataSource; protected final EthereumMapper mapper; protected List fieldTypes; + protected final String contractAddress; + protected final BigInteger fromBlock; + protected final BigInteger toBlock; + protected final Event event; - public EthereumTable( - String clientUrl, - AlgProtoDataType protoRowType, - List fieldTypes, - int[] fields, - EthereumMapper mapper, - EthereumDataSource ethereumDataSource, - Long tableId ) { - this.clientUrl = clientUrl; - this.protoRowType = protoRowType; - this.fieldTypes = fieldTypes; - this.fields = fields; - this.ethereumDataSource = ethereumDataSource; - this.mapper = mapper; - this.tableId = tableId; + public EthereumTable( Builder builder ) { + this.clientUrl = builder.clientUrl; + this.protoRowType = builder.protoRowType; + this.fieldTypes = builder.fieldTypes; + this.fields = builder.fields; + this.ethereumDataSource = builder.ethereumDataSource; + this.mapper = builder.mapper; + this.tableId = builder.tableId; + this.contractAddress = builder.contractAddress; + this.fromBlock = builder.fromBlock; + this.toBlock = builder.toBlock; + this.event = builder.event; + } + + + // Utilize nested Builder pattern to provide the flexibility of toggling between fetching event data and not fetching it. + public static class Builder { + + protected final String clientUrl; + protected final AlgProtoDataType protoRowType; + protected final int[] fields; + protected final EthereumDataSource ethereumDataSource; + protected final EthereumMapper mapper; + protected List fieldTypes; + protected Long tableId; + + private String contractAddress = null; + private BigInteger fromBlock = null; + private BigInteger toBlock = null; + private Event event = null; + + + public Builder( String clientUrl, + AlgProtoDataType protoRowType, + List fieldTypes, + int[] fields, + EthereumMapper mapper, + EthereumDataSource ethereumDataSource, + Long tableId ) { + this.clientUrl = clientUrl; + this.protoRowType = protoRowType; + this.fieldTypes = fieldTypes; + this.fields = fields; + this.ethereumDataSource = ethereumDataSource; + this.mapper = mapper; + this.tableId = tableId; + } + + + public Builder contractAddress( String val ) { + this.contractAddress = val; + return this; + } + + + public Builder fromBlock( BigInteger val ) { + this.fromBlock = val; + return this; + } + + + public Builder toBlock( BigInteger val ) { + this.toBlock = val; + return this; + } + + + public Builder event( Event val ) { + this.event = val; + return this; + } + + + public EthereumTable build() { + return new EthereumTable( this ); + } + } @@ -104,7 +170,11 @@ public Enumerator enumerator() { null, mapper, finalBlockNumberPredicate, - (EthereumEnumerator.RowConverter) EthereumEnumerator.converter( fieldTypes, fields ) ); + (EthereumEnumerator.RowConverter) EthereumEnumerator.converter( fieldTypes, fields ), + contractAddress, + fromBlock, + toBlock, + event ); } }; } @@ -119,7 +189,11 @@ public Enumerator enumerator() { null, mapper, finalBlockNumberPredicate, - (EthereumEnumerator.RowConverter) EthereumEnumerator.converter( fieldTypes, fields ) ); + (EthereumEnumerator.RowConverter) EthereumEnumerator.converter( fieldTypes, fields ), + contractAddress, + fromBlock, + toBlock, + event ); } }; } diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java new file mode 100644 index 0000000000..24b30a4a03 --- /dev/null +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java @@ -0,0 +1,151 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.ethereum; + + +import java.io.IOException; +import java.math.BigInteger; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import org.web3j.abi.EventEncoder; +import org.web3j.abi.FunctionReturnDecoder; +import org.web3j.abi.TypeReference; +import org.web3j.abi.datatypes.Event; +import org.web3j.abi.datatypes.Type; +import org.web3j.protocol.Web3j; +import org.web3j.protocol.core.DefaultBlockParameter; +import org.web3j.protocol.core.Response; +import org.web3j.protocol.core.methods.request.EthFilter; +import org.web3j.protocol.core.methods.response.EthLog; +import org.web3j.protocol.core.methods.response.Log; + +public class EventCache { + + private final Map>> cache = new ConcurrentHashMap<>(); // a cache for each event + private final List events; + protected final Web3j web3j; + + + public EventCache( List events, Web3j web3j ) { + this.web3j = web3j; + this.events = events; + events.forEach( event -> this.cache.put( event, new ArrayList<>() ) ); + } + + + public void addToCache( String address, BigInteger startBlock, BigInteger endBlock ) { + for ( EventData event : events ) { + addLogsToCache( address, event, startBlock, endBlock ); + if ( cache.get( event ).size() == 0 ) { + continue; + } + EventCacheManager.getInstance().writeToStore( event.getCompositeName(), cache.get( event ) ); // write event data into the store + cache.get( event ).clear(); // clear cache batch + } + } + + + private void addLogsToCache( String address, EventData eventData, BigInteger startBlock, BigInteger endBlock ) { + EthFilter filter = new EthFilter( + DefaultBlockParameter.valueOf( startBlock ), + DefaultBlockParameter.valueOf( endBlock ), + address + ); + + Event event = eventData.getEvent(); + filter.addSingleTopic( EventEncoder.encode( event ) ); + + try { + EthLog ethLog = web3j.ethGetLogs( filter ).send(); // Get the EthLog response + + if ( ethLog.hasError() ) { + Response.Error error = ethLog.getError(); + throw new CacheException( "Error occurred while fetching logs for block range: " + startBlock + " to " + endBlock + ". Please retry starting from block " + startBlock + " and continue to your intended final block. Error Message: " + error.getMessage() ); + } + List rawLogs = ethLog.getLogs(); + List> structuredLogs = normalizeLogs( event, rawLogs ); + cache.put( eventData, structuredLogs ); + + } catch ( IOException e ) { + throw new CacheException( "IO Error fetching logs", e ); + } + } + + + private List> normalizeLogs( Event event, List rawLogs ) { + List> structuredLogs = new ArrayList<>(); + for ( EthLog.LogResult rawLogResult : rawLogs ) { + Log rawLog = (Log) rawLogResult.get(); + + if ( rawLog.getLogIndex() == null || + rawLog.getTransactionIndex() == null || + rawLog.getBlockNumber() == null ) { + continue; // don't add pending logs because of primary key constraint + } + + List structuredLog = new ArrayList<>(); + + // Add all indexed values first (topics) + for ( int i = 0; i < event.getParameters().size(); i++ ) { + TypeReference paramType = event.getParameters().get( i ); + if ( paramType.isIndexed() ) { + structuredLog.add( extractIndexedValue( rawLog, paramType, i ) ); + } + } + + // Then add all non-indexed values (data) + int nonIndexedPosition = 0; // Separate index for non-indexed parameters + for ( int i = 0; i < event.getParameters().size(); i++ ) { + TypeReference paramType = event.getParameters().get( i ); + if ( !paramType.isIndexed() ) { + structuredLog.add( extractNonIndexedValue( rawLog, paramType, nonIndexedPosition, event ) ); + nonIndexedPosition++; + } + } + + // Add other log information + structuredLog.add( rawLog.isRemoved() ); + structuredLog.add( rawLog.getLogIndex() ); + structuredLog.add( rawLog.getTransactionIndex() ); + structuredLog.add( rawLog.getTransactionHash() ); + structuredLog.add( rawLog.getBlockHash() ); + structuredLog.add( rawLog.getBlockNumber() ); + structuredLog.add( rawLog.getAddress() ); + + structuredLogs.add( structuredLog ); + } + + return structuredLogs; + } + + + private Object extractIndexedValue( Log rawLog, TypeReference paramType, int position ) { + // Get the indexed parameter from the log based on its position + String topics = rawLog.getTopics().get( position + 1 ); // The first topic is usually the event signature + return FunctionReturnDecoder.decodeIndexedValue( topics, paramType ); + } + + + private Object extractNonIndexedValue( Log rawLog, TypeReference paramType, int position, Event event ) { + List decodedValue = FunctionReturnDecoder.decode( rawLog.getData(), event.getNonIndexedParameters() ); + return decodedValue.get( position ); + } + +} + diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java new file mode 100644 index 0000000000..aadaf26e47 --- /dev/null +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java @@ -0,0 +1,224 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.ethereum; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.concurrent.ConcurrentHashMap; +import java.util.stream.Collectors; +import org.polypheny.db.PolyImplementation; +import org.polypheny.db.adapter.AdapterManager; +import org.polypheny.db.adapter.DataSource.ExportedColumn; +import org.polypheny.db.adapter.DataStore; +import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.algebra.AlgRoot; +import org.polypheny.db.algebra.constant.Kind; +import org.polypheny.db.algebra.logical.relational.LogicalValues; +import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.algebra.type.AlgDataTypeField; +import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.Catalog.ConstraintType; +import org.polypheny.db.catalog.Catalog.PlacementType; +import org.polypheny.db.catalog.exceptions.EntityAlreadyExistsException; +import org.polypheny.db.catalog.exceptions.GenericCatalogException; +import org.polypheny.db.catalog.exceptions.UnknownColumnException; +import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; +import org.polypheny.db.catalog.exceptions.UnknownPartitionTypeException; +import org.polypheny.db.catalog.exceptions.UnknownSchemaException; +import org.polypheny.db.catalog.exceptions.UnknownUserException; +import org.polypheny.db.ddl.DdlManager; +import org.polypheny.db.ddl.DdlManager.ConstraintInformation; +import org.polypheny.db.ddl.DdlManager.FieldInformation; +import org.polypheny.db.ddl.exception.ColumnNotExistsException; +import org.polypheny.db.ddl.exception.PartitionGroupNamesNotUniqueException; +import org.polypheny.db.plan.AlgOptSchema; +import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.rex.RexDynamicParam; +import org.polypheny.db.tools.AlgBuilder; +import org.polypheny.db.transaction.Statement; +import org.polypheny.db.transaction.Transaction; +import org.polypheny.db.transaction.TransactionException; +import org.polypheny.db.transaction.TransactionManager; +import org.web3j.abi.datatypes.Address; +import org.web3j.abi.datatypes.Bool; +import org.web3j.abi.datatypes.Bytes; +import org.web3j.abi.datatypes.DynamicBytes; +import org.web3j.abi.datatypes.Int; +import org.web3j.abi.datatypes.Uint; + + +public class EventCacheManager implements Runnable { + + private static EventCacheManager INSTANCE = null; + + private final TransactionManager transactionManager; + + + public Map caches = new ConcurrentHashMap<>(); + + + /** + * This gets called only once at the start of Polypheny to create a single instance of the manager + * after that the method will throw and the {@link #getInstance()} method is used to retrieve the initially create instance. + * + * @param manager is used to create new transactions, which are required to create new queries. + */ + public static synchronized void getAndSet( TransactionManager manager ) { + if ( INSTANCE != null ) { + throw new RuntimeException( String.format( "The %s was already set.", EventCacheManager.class.getSimpleName() ) ); + } + INSTANCE = new EventCacheManager( manager ); + } + + + public static EventCacheManager getInstance() { + if ( INSTANCE == null ) { + throw new RuntimeException( String.format( "The %s was not correctly initialized.", EventCacheManager.class.getSimpleName() ) ); + } + return INSTANCE; + } + + + private EventCacheManager( TransactionManager transactionManager ) { + this.transactionManager = transactionManager; + } + + + public ContractCache register( int sourceAdapterId, int targetAdapterId, String clientUrl, int batchSizeInBlocks, BigInteger fromBlock, BigInteger toBlock, Map> eventsPerContract, Map> columns ) { + ContractCache cache = new ContractCache( sourceAdapterId, targetAdapterId, clientUrl, batchSizeInBlocks, fromBlock, toBlock, eventsPerContract, columns ); + this.caches.put( sourceAdapterId, cache ); + return cache; + } + + + void createTables( Map> tableInformations, int targetAdapterId ) { + try { + long namespaceId = Catalog.getInstance().getSchema( Catalog.defaultDatabaseId, "public" ).id; // get the default schema + Transaction transaction = getTransaction(); // get the transaction + DataStore store = AdapterManager.getInstance().getStore( targetAdapterId ); // get the target store from the adapater + + // For each table, a new table is created with their constraints (e.g., primary key). + for ( Entry> table : tableInformations.entrySet() ) { + ConstraintInformation primaryConstraint = new ConstraintInformation( table.getKey() + "primary", ConstraintType.PRIMARY, List.of( "log_index", "transaction_index", "block_number" ) ); + DdlManager.getInstance().createTable( namespaceId, table.getKey(), table.getValue(), List.of( primaryConstraint ), false, List.of( store ), PlacementType.AUTOMATIC, false, transaction.createStatement(), true ); + } + + try { + transaction.commit(); + } catch ( TransactionException e ) { + throw new RuntimeException( e ); + } + } catch ( EntityAlreadyExistsException | ColumnNotExistsException | UnknownPartitionTypeException | UnknownColumnException | PartitionGroupNamesNotUniqueException | UnknownSchemaException e ) { + throw new RuntimeException( e ); + } + } + + + private Transaction getTransaction() { + try { + return transactionManager.startTransaction( Catalog.defaultDatabaseId, Catalog.defaultUserId, false, "Ethereum Plugin" ); + } catch ( UnknownSchemaException | UnknownDatabaseException | GenericCatalogException | UnknownUserException e ) { + throw new RuntimeException( e ); + } + } + + + void writeToStore( String tableName, List> logResults ) { + if ( logResults.isEmpty() ) { + return; + } + Transaction transaction = getTransaction(); + Statement statement = transaction.createStatement(); + + AlgBuilder builder = AlgBuilder.create( statement ); + + AlgOptSchema algOptSchema = transaction.getCatalogReader(); + AlgOptTable table = algOptSchema.getTableForMember( Collections.singletonList( Catalog.HIDDEN_PREFIX + tableName ) ); + + AlgDataType rowType = table.getTable().getRowType( transaction.getTypeFactory() ); + builder.push( LogicalValues.createOneRow( builder.getCluster() ) ); + builder.project( rowType.getFieldList().stream().map( f -> new RexDynamicParam( f.getType(), f.getIndex() ) ).collect( Collectors.toList() ), rowType.getFieldNames() ); + builder.insert( table ); + // todo DL: we should re-use this for all batches (ignore right now) + + AlgNode node = builder.build(); // Construct the algebraic node + AlgRoot root = AlgRoot.of( node, Kind.INSERT ); // Wrap the node into an AlgRoot as required by Polypheny + + // Add the dynamic parameters to the context + int i = 0; + for ( AlgDataTypeField field : rowType.getFieldList() ) { + long idx = field.getIndex(); + AlgDataType type = field.getType(); + + // Extracting the values for the current field from the log results + List fieldValues = new ArrayList<>(); + for ( List logResult : logResults ) { + Object value = logResult.get( i ); + value = convertValueBasedOnType( value ); + fieldValues.add( value ); + } + i++; + statement.getDataContext().addParameterValues( idx, type, fieldValues ); // take the correct indexedParameters - at the moment we only add one row at a time, could refactor to add the whole batch + } + + // execute the transaction (query will be executed) + PolyImplementation implementation = statement.getQueryProcessor().prepareQuery( root, false ); // implements the code basically + implementation.getRows( statement, -1 ); // Executes the query, with -1 meaning to fill in the whole batch + try { + transaction.commit(); + } catch ( TransactionException e ) { + throw new RuntimeException( e ); + } + } + + + protected Map getAllStreamStatus() { + // return status of process + return caches.values().stream().collect( Collectors.toMap( c -> c.sourceAdapterId, ContractCache::getStatus ) ); + } + + + private Object convertValueBasedOnType( Object value ) { + if ( value instanceof Address ) { + return value.toString(); + } else if ( value instanceof Bool ) { + return ((Bool) value).getValue(); + } else if ( value instanceof DynamicBytes ) { + return ((DynamicBytes) value).getValue().toString(); + } else if ( value instanceof Bytes ) { // Similarly for Bytes and its subclasses (e.g. Bytes1...Bytes32) + return value.toString(); + } else if ( value instanceof Uint ) { // Similarly for Uint and its subclasses (e.g. Uint256) + BigInteger bigIntValue = ((Uint) value).getValue(); + return bigIntValue == null ? null : new BigDecimal( bigIntValue ); + } else if ( value instanceof Int ) { // Similarly for Int and its subclasses + BigInteger bigIntValue = ((Int) value).getValue(); + return bigIntValue == null ? null : new BigDecimal( bigIntValue ); + } + return value; // return the original value if none of the conditions match + } + + + @Override + public void run() { + } + +} \ No newline at end of file diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventData.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventData.java new file mode 100644 index 0000000000..171614d905 --- /dev/null +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventData.java @@ -0,0 +1,520 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.ethereum; + +import java.util.ArrayList; +import java.util.List; +import lombok.Getter; +import org.json.JSONArray; +import org.json.JSONObject; +import org.web3j.abi.TypeReference; +import org.web3j.abi.datatypes.Address; +import org.web3j.abi.datatypes.Bool; +import org.web3j.abi.datatypes.DynamicBytes; +import org.web3j.abi.datatypes.Event; +import org.web3j.abi.datatypes.Utf8String; +import org.web3j.abi.datatypes.generated.Uint8; +import org.web3j.abi.datatypes.generated.Uint16; +import org.web3j.abi.datatypes.generated.Uint24; +import org.web3j.abi.datatypes.generated.Uint32; +import org.web3j.abi.datatypes.generated.Uint40; +import org.web3j.abi.datatypes.generated.Uint48; +import org.web3j.abi.datatypes.generated.Uint56; +import org.web3j.abi.datatypes.generated.Uint64; +import org.web3j.abi.datatypes.generated.Uint72; +import org.web3j.abi.datatypes.generated.Uint80; +import org.web3j.abi.datatypes.generated.Uint88; +import org.web3j.abi.datatypes.generated.Uint96; +import org.web3j.abi.datatypes.generated.Uint104; +import org.web3j.abi.datatypes.generated.Uint112; +import org.web3j.abi.datatypes.generated.Uint120; +import org.web3j.abi.datatypes.generated.Uint128; +import org.web3j.abi.datatypes.generated.Uint136; +import org.web3j.abi.datatypes.generated.Uint144; +import org.web3j.abi.datatypes.generated.Uint152; +import org.web3j.abi.datatypes.generated.Uint160; +import org.web3j.abi.datatypes.generated.Uint168; +import org.web3j.abi.datatypes.generated.Uint176; +import org.web3j.abi.datatypes.generated.Uint184; +import org.web3j.abi.datatypes.generated.Uint192; +import org.web3j.abi.datatypes.generated.Uint200; +import org.web3j.abi.datatypes.generated.Uint208; +import org.web3j.abi.datatypes.generated.Uint216; +import org.web3j.abi.datatypes.generated.Uint224; +import org.web3j.abi.datatypes.generated.Uint232; +import org.web3j.abi.datatypes.generated.Uint240; +import org.web3j.abi.datatypes.generated.Uint248; +import org.web3j.abi.datatypes.generated.Uint256; +import org.web3j.abi.datatypes.generated.Int8; +import org.web3j.abi.datatypes.generated.Int16; +import org.web3j.abi.datatypes.generated.Int24; +import org.web3j.abi.datatypes.generated.Int32; +import org.web3j.abi.datatypes.generated.Int40; +import org.web3j.abi.datatypes.generated.Int48; +import org.web3j.abi.datatypes.generated.Int56; +import org.web3j.abi.datatypes.generated.Int64; +import org.web3j.abi.datatypes.generated.Int72; +import org.web3j.abi.datatypes.generated.Int80; +import org.web3j.abi.datatypes.generated.Int88; +import org.web3j.abi.datatypes.generated.Int96; +import org.web3j.abi.datatypes.generated.Int104; +import org.web3j.abi.datatypes.generated.Int112; +import org.web3j.abi.datatypes.generated.Int120; +import org.web3j.abi.datatypes.generated.Int128; +import org.web3j.abi.datatypes.generated.Int136; +import org.web3j.abi.datatypes.generated.Int144; +import org.web3j.abi.datatypes.generated.Int152; +import org.web3j.abi.datatypes.generated.Int160; +import org.web3j.abi.datatypes.generated.Int168; +import org.web3j.abi.datatypes.generated.Int176; +import org.web3j.abi.datatypes.generated.Int184; +import org.web3j.abi.datatypes.generated.Int192; +import org.web3j.abi.datatypes.generated.Int200; +import org.web3j.abi.datatypes.generated.Int208; +import org.web3j.abi.datatypes.generated.Int216; +import org.web3j.abi.datatypes.generated.Int224; +import org.web3j.abi.datatypes.generated.Int232; +import org.web3j.abi.datatypes.generated.Int240; +import org.web3j.abi.datatypes.generated.Int248; +import org.web3j.abi.datatypes.generated.Int256; +import org.web3j.abi.datatypes.generated.Bytes1; +import org.web3j.abi.datatypes.generated.Bytes2; +import org.web3j.abi.datatypes.generated.Bytes3; +import org.web3j.abi.datatypes.generated.Bytes4; +import org.web3j.abi.datatypes.generated.Bytes5; +import org.web3j.abi.datatypes.generated.Bytes6; +import org.web3j.abi.datatypes.generated.Bytes7; +import org.web3j.abi.datatypes.generated.Bytes8; +import org.web3j.abi.datatypes.generated.Bytes9; +import org.web3j.abi.datatypes.generated.Bytes10; +import org.web3j.abi.datatypes.generated.Bytes11; +import org.web3j.abi.datatypes.generated.Bytes12; +import org.web3j.abi.datatypes.generated.Bytes13; +import org.web3j.abi.datatypes.generated.Bytes14; +import org.web3j.abi.datatypes.generated.Bytes15; +import org.web3j.abi.datatypes.generated.Bytes16; +import org.web3j.abi.datatypes.generated.Bytes17; +import org.web3j.abi.datatypes.generated.Bytes18; +import org.web3j.abi.datatypes.generated.Bytes19; +import org.web3j.abi.datatypes.generated.Bytes20; +import org.web3j.abi.datatypes.generated.Bytes21; +import org.web3j.abi.datatypes.generated.Bytes22; +import org.web3j.abi.datatypes.generated.Bytes23; +import org.web3j.abi.datatypes.generated.Bytes24; +import org.web3j.abi.datatypes.generated.Bytes25; +import org.web3j.abi.datatypes.generated.Bytes26; +import org.web3j.abi.datatypes.generated.Bytes27; +import org.web3j.abi.datatypes.generated.Bytes28; +import org.web3j.abi.datatypes.generated.Bytes29; +import org.web3j.abi.datatypes.generated.Bytes30; +import org.web3j.abi.datatypes.generated.Bytes31; +import org.web3j.abi.datatypes.generated.Bytes32; + + +public class EventData { + + @Getter + private String originalKey; + @Getter + private Event event; + @Getter + private String smartContractAddress; + @Getter + private String compositeName; + @Getter + private JSONArray abiInputs; + + + public EventData( String originalKey, String contractName, String smartContractAddress, JSONArray abiInputs ) { + this.originalKey = originalKey; + this.compositeName = contractName.toLowerCase() + "_" + originalKey.toLowerCase(); + this.abiInputs = abiInputs; + List> typeReferences = createTypeReferences( abiInputs ); + this.event = new Event( originalKey, typeReferences ); // create event based on event name (original key and inputs) + this.smartContractAddress = smartContractAddress; + } + + + private static List> createTypeReferences( JSONArray abiInputs ) { + List> typeReferences = new ArrayList<>(); + for ( int i = 0; i < abiInputs.length(); i++ ) { + JSONObject inputObject = abiInputs.getJSONObject( i ); + String type = inputObject.getString( "type" ); + boolean indexed = inputObject.getBoolean( "indexed" ); + + switch ( type ) { + case "address": + typeReferences.add( new TypeReference
( indexed ) { + } ); + break; + case "bool": + typeReferences.add( new TypeReference( indexed ) { + } ); + break; + case "string": + typeReferences.add( new TypeReference( indexed ) { + } ); + break; + case "unit": + typeReferences.add( new TypeReference( indexed ) { + } ); + break; + case "int": + typeReferences.add( new TypeReference( indexed ) { + } ); + break; + case "bytes": // dynamic-sized byte array + typeReferences.add( new TypeReference( indexed ) { + } ); + break; + default: + // cover all cases; doing this dynamically would need external library + if ( type.startsWith( "uint" ) ) { + int bitSize = Integer.parseInt( type.substring( 4 ) ); // Get the bit size, e.g., 8 from uint8 + typeReferences.add( createUintTypeReference( bitSize, indexed ) ); + } else if ( type.startsWith( "int" ) ) { + int bitSize = Integer.parseInt( type.substring( 4 ) ); // Get the bit size, e.g., 8 from int8 + typeReferences.add( createIntTypeReference( bitSize, indexed ) ); + } else if ( type.startsWith( "bytes" ) && !type.equals( "bytes" ) ) { // fixed-sized byte array + int size = Integer.parseInt( type.substring( 5 ) ); // Get size, e.g., 1 from bytes1 + typeReferences.add( createBytesTypeReference( size, indexed ) ); + } + break; + } + } + + return typeReferences; + + } + + + private static TypeReference createUintTypeReference( int bitSize, boolean indexed ) { + switch ( bitSize ) { + case 8: + return new TypeReference( indexed ) { + }; + case 16: + return new TypeReference( indexed ) { + }; + case 24: + return new TypeReference( indexed ) { + }; + case 32: + return new TypeReference( indexed ) { + }; + case 40: + return new TypeReference( indexed ) { + }; + case 48: + return new TypeReference( indexed ) { + }; + case 56: + return new TypeReference( indexed ) { + }; + case 64: + return new TypeReference( indexed ) { + }; + case 72: + return new TypeReference( indexed ) { + }; + case 80: + return new TypeReference( indexed ) { + }; + case 88: + return new TypeReference( indexed ) { + }; + case 96: + return new TypeReference( indexed ) { + }; + case 104: + return new TypeReference( indexed ) { + }; + case 112: + return new TypeReference( indexed ) { + }; + case 120: + return new TypeReference( indexed ) { + }; + case 128: + return new TypeReference( indexed ) { + }; + case 136: + return new TypeReference( indexed ) { + }; + case 144: + return new TypeReference( indexed ) { + }; + case 152: + return new TypeReference( indexed ) { + }; + case 160: + return new TypeReference( indexed ) { + }; + case 168: + return new TypeReference( indexed ) { + }; + case 176: + return new TypeReference( indexed ) { + }; + case 184: + return new TypeReference( indexed ) { + }; + case 192: + return new TypeReference( indexed ) { + }; + case 200: + return new TypeReference( indexed ) { + }; + case 208: + return new TypeReference( indexed ) { + }; + case 216: + return new TypeReference( indexed ) { + }; + case 224: + return new TypeReference( indexed ) { + }; + case 232: + return new TypeReference( indexed ) { + }; + case 240: + return new TypeReference( indexed ) { + }; + case 248: + return new TypeReference( indexed ) { + }; + case 256: + return new TypeReference( indexed ) { + }; + default: + throw new IllegalArgumentException( "Unsupported bit size: " + bitSize ); + } + } + + + private static TypeReference createIntTypeReference( int bitSize, boolean indexed ) { + switch ( bitSize ) { + case 8: + return new TypeReference( indexed ) { + }; + case 16: + return new TypeReference( indexed ) { + }; + case 24: + return new TypeReference( indexed ) { + }; + case 32: + return new TypeReference( indexed ) { + }; + case 40: + return new TypeReference( indexed ) { + }; + case 48: + return new TypeReference( indexed ) { + }; + case 56: + return new TypeReference( indexed ) { + }; + case 64: + return new TypeReference( indexed ) { + }; + case 72: + return new TypeReference( indexed ) { + }; + case 80: + return new TypeReference( indexed ) { + }; + case 88: + return new TypeReference( indexed ) { + }; + case 96: + return new TypeReference( indexed ) { + }; + case 104: + return new TypeReference( indexed ) { + }; + case 112: + return new TypeReference( indexed ) { + }; + case 120: + return new TypeReference( indexed ) { + }; + case 128: + return new TypeReference( indexed ) { + }; + case 136: + return new TypeReference( indexed ) { + }; + case 144: + return new TypeReference( indexed ) { + }; + case 152: + return new TypeReference( indexed ) { + }; + case 160: + return new TypeReference( indexed ) { + }; + case 168: + return new TypeReference( indexed ) { + }; + case 176: + return new TypeReference( indexed ) { + }; + case 184: + return new TypeReference( indexed ) { + }; + case 192: + return new TypeReference( indexed ) { + }; + case 200: + return new TypeReference( indexed ) { + }; + case 208: + return new TypeReference( indexed ) { + }; + case 216: + return new TypeReference( indexed ) { + }; + case 224: + return new TypeReference( indexed ) { + }; + case 232: + return new TypeReference( indexed ) { + }; + case 240: + return new TypeReference( indexed ) { + }; + case 248: + return new TypeReference( indexed ) { + }; + case 256: + return new TypeReference( indexed ) { + }; + default: + throw new IllegalArgumentException( "Unsupported bit size: " + bitSize ); + } + } + + + private static TypeReference createBytesTypeReference( int size, boolean indexed ) { + switch ( size ) { + case 1: + return new TypeReference( indexed ) { + }; + case 2: + return new TypeReference( indexed ) { + }; + case 3: + return new TypeReference( indexed ) { + }; + case 4: + return new TypeReference( indexed ) { + }; + case 5: + return new TypeReference( indexed ) { + }; + case 6: + return new TypeReference( indexed ) { + }; + case 7: + return new TypeReference( indexed ) { + }; + case 8: + return new TypeReference( indexed ) { + }; + case 9: + return new TypeReference( indexed ) { + }; + case 10: + return new TypeReference( indexed ) { + }; + case 11: + return new TypeReference( indexed ) { + }; + case 12: + return new TypeReference( indexed ) { + }; + case 13: + return new TypeReference( indexed ) { + }; + case 14: + return new TypeReference( indexed ) { + }; + case 15: + return new TypeReference( indexed ) { + }; + case 16: + return new TypeReference( indexed ) { + }; + case 17: + return new TypeReference( indexed ) { + }; + case 18: + return new TypeReference( indexed ) { + }; + case 19: + return new TypeReference( indexed ) { + }; + case 20: + return new TypeReference( indexed ) { + }; + case 21: + return new TypeReference( indexed ) { + }; + case 22: + return new TypeReference( indexed ) { + }; + case 23: + return new TypeReference( indexed ) { + }; + case 24: + return new TypeReference( indexed ) { + }; + case 25: + return new TypeReference( indexed ) { + }; + case 26: + return new TypeReference( indexed ) { + }; + case 27: + return new TypeReference( indexed ) { + }; + case 28: + return new TypeReference( indexed ) { + }; + case 29: + return new TypeReference( indexed ) { + }; + case 30: + return new TypeReference( indexed ) { + }; + case 31: + return new TypeReference( indexed ) { + }; + case 32: + return new TypeReference( indexed ) { + }; + default: + throw new IllegalArgumentException( "Size not supported for Bytes type." ); + } + + } + +} + + + diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventDataReader.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventDataReader.java new file mode 100644 index 0000000000..2b1ac0b861 --- /dev/null +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventDataReader.java @@ -0,0 +1,125 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.ethereum; + +import java.io.IOException; +import java.math.BigInteger; +import java.util.ArrayList; +import java.util.List; +import java.util.Arrays; +import java.util.function.Predicate; +import org.web3j.abi.FunctionReturnDecoder; +import org.web3j.abi.datatypes.Type; +import org.web3j.protocol.core.Response; +import org.web3j.protocol.core.methods.response.EthLog; +import org.web3j.protocol.core.methods.response.Log; +import org.web3j.protocol.core.methods.request.EthFilter; +import org.web3j.protocol.core.DefaultBlockParameter; +import org.web3j.abi.datatypes.Event; +import org.web3j.abi.TypeReference; +import org.web3j.abi.EventEncoder; + + +public class EventDataReader extends BlockReader { + + private List logs; + private int currentLogIndex = 0; + private Event event; + + + EventDataReader( String clientUrl, int blocks, Predicate blockNumberPrecate, String contractAddress, BigInteger fromBlock, BigInteger toBlock, Event event ) { + super( clientUrl, blocks, blockNumberPrecate ); + this.event = event; + + EthFilter filter = new EthFilter( + DefaultBlockParameter.valueOf( fromBlock ), + DefaultBlockParameter.valueOf( toBlock ), + contractAddress + ); + + filter.addSingleTopic( EventEncoder.encode( event ) ); + + try { + EthLog ethLog = web3j.ethGetLogs( filter ).send(); + + if ( ethLog.hasError() ) { + Response.Error error = ethLog.getError(); + throw new RuntimeException( "Error fetching logs: " + error.getMessage() ); + } + + logs = ethLog.getLogs(); + + } catch ( IOException e ) { + throw new RuntimeException( "IO Error fetching logs", e ); + } + } + + + @Override + public String[] readNext() throws IOException { + if ( this.blockReads <= 0 || currentLogIndex >= logs.size() ) { + return null; + } + + EthLog.LogResult logResult = logs.get( currentLogIndex ); + Log log = (Log) logResult.get(); + + currentLogIndex++; + if ( currentLogIndex >= logs.size() ) { + this.blockReads--; + } + + // Decode the data field of the log (non-indexed parameters) + String data = log.getData(); + List decodedData = FunctionReturnDecoder.decode( data, event.getNonIndexedParameters() ); + + // Decode the topics of the log (index parameters) + List topics = log.getTopics(); + topics.remove( 0 ); // The first topic is the event signature, so we skip it + List decodedTopics = new ArrayList<>(); + for ( int i = 0; i < topics.size(); i++ ) { + String topic = topics.get( i ); + TypeReference parameterType = event.getIndexedParameters().get( i ); + Type decodedTopic = FunctionReturnDecoder.decodeIndexedValue( topic, parameterType ); + decodedTopics.add( decodedTopic ); + } + + // Combine the decoded topics and data into a single array + List allDecodedParameters = new ArrayList<>(); + allDecodedParameters.addAll( decodedTopics ); + allDecodedParameters.addAll( decodedData ); + + // Convert the decoded parameters to a String array + String[] result = new String[allDecodedParameters.size()]; + for ( int i = 0; i < allDecodedParameters.size(); i++ ) { + result[i] = allDecodedParameters.get( i ).getValue().toString(); + } + + // Add additional columns + String[] extendedResult = Arrays.copyOf( result, result.length + 7 ); + extendedResult[result.length] = Boolean.toString( log.isRemoved() ); + extendedResult[result.length + 1] = log.getLogIndex().toString(); + extendedResult[result.length + 2] = log.getTransactionIndex().toString(); + extendedResult[result.length + 3] = log.getTransactionHash(); + extendedResult[result.length + 4] = log.getBlockHash(); + extendedResult[result.length + 5] = log.getBlockNumber().toString(); + extendedResult[result.length + 6] = log.getAddress(); + + return extendedResult; + } + +} diff --git a/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java b/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java index 9b24309a30..50b41cd50e 100644 --- a/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java +++ b/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java @@ -790,16 +790,16 @@ private void insertDefaultData() throws GenericCatalogException, UnknownUserExce CatalogAdapter csv = getAdapter( "hr" ); if ( !testMode ) { if ( !tableNames.containsKey( new Object[]{ databaseId, schemaId, "depts" } ) ) { - addTable( "depts", schemaId, systemId, EntityType.SOURCE, false ); + addTable( "depts", schemaId, systemId, EntityType.SOURCE, false, false, false ); } if ( !tableNames.containsKey( new Object[]{ databaseId, schemaId, "emps" } ) ) { - addTable( "emps", schemaId, systemId, EntityType.SOURCE, false ); + addTable( "emps", schemaId, systemId, EntityType.SOURCE, false, false, false ); } if ( !tableNames.containsKey( new Object[]{ databaseId, schemaId, "emp" } ) ) { - addTable( "emp", schemaId, systemId, EntityType.SOURCE, false ); + addTable( "emp", schemaId, systemId, EntityType.SOURCE, false, false, false ); } if ( !tableNames.containsKey( new Object[]{ databaseId, schemaId, "work" } ) ) { - addTable( "work", schemaId, systemId, EntityType.SOURCE, false ); + addTable( "work", schemaId, systemId, EntityType.SOURCE, false, false, false ); addDefaultCsvColumns( csv ); } } @@ -1285,7 +1285,7 @@ public void addGraphLogistics( long id, List stores, boolean onlyPlac // table id nodes -> id, node, labels long nodesId; if ( !onlyPlacement ) { - nodesId = addTable( "_nodes_", id, Catalog.defaultUserId, EntityType.ENTITY, true ); + nodesId = addTable( "_nodes_", id, Catalog.defaultUserId, EntityType.ENTITY, true, false, false ); } else { nodesId = getTable( id, "_nodes_" ).id; } @@ -1331,7 +1331,7 @@ public void addGraphLogistics( long id, List stores, boolean onlyPlac // table id nodes -> id, node, labels long nodesPropertyId; if ( !onlyPlacement ) { - nodesPropertyId = addTable( "_n_properties_", id, Catalog.defaultUserId, EntityType.ENTITY, true ); + nodesPropertyId = addTable( "_n_properties_", id, Catalog.defaultUserId, EntityType.ENTITY, true, false, false ); } else { nodesPropertyId = getTable( id, "_n_properties_" ).id; } @@ -1390,7 +1390,7 @@ public void addGraphLogistics( long id, List stores, boolean onlyPlac // table id relationships -> id, rel, labels long edgesId; if ( !onlyPlacement ) { - edgesId = addTable( "_edges_", id, Catalog.defaultUserId, EntityType.ENTITY, true ); + edgesId = addTable( "_edges_", id, Catalog.defaultUserId, EntityType.ENTITY, true, false, false ); } else { edgesId = getTable( id, "_edges_" ).id; } @@ -1505,7 +1505,7 @@ public void addGraphLogistics( long id, List stores, boolean onlyPlac // table id nodes -> id, node, labels long edgesPropertyId; if ( !onlyPlacement ) { - edgesPropertyId = addTable( "_properties_", id, Catalog.defaultUserId, EntityType.ENTITY, true ); + edgesPropertyId = addTable( "_properties_", id, Catalog.defaultUserId, EntityType.ENTITY, true, false, false ); } else { edgesPropertyId = getTable( id, "_properties_" ).id; } @@ -1850,7 +1850,7 @@ public CatalogTable getTable( String databaseName, String schemaName, String tab * {@inheritDoc} */ @Override - public long addTable( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable ) { + public long addTable( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable, boolean cached, boolean hidden ) { long id = entityIdBuilder.getAndIncrement(); CatalogSchema schema = getSchema( namespaceId ); if ( !schema.caseSensitive ) { @@ -1884,7 +1884,9 @@ public long addTable( String name, long namespaceId, int ownerId, EntityType ent ImmutableList.of(), modifiable, partitionProperty, - ImmutableList.of() ); + ImmutableList.of(), + cached, + hidden ); updateEntityLogistics( name, namespaceId, id, schema, table ); if ( schema.namespaceType != NamespaceType.DOCUMENT ) { @@ -2202,7 +2204,9 @@ public void setTableOwner( long tableId, int ownerId ) { old.dataPlacements, old.modifiable, old.partitionProperty, - old.connectedViews ); + old.connectedViews, + old.cached, + old.hidden ); } synchronized ( this ) { @@ -2255,7 +2259,10 @@ public void setPrimaryKey( long tableId, Long keyId ) { keyId, old.dataPlacements, old.modifiable, - old.partitionProperty, old.connectedViews ); + old.partitionProperty, + old.connectedViews, + old.cached, + old.hidden ); } synchronized ( this ) { @@ -2503,7 +2510,7 @@ public long addCollectionLogistics( long schemaId, String name, List throw new RuntimeException( e ); } } else { - tableId = addTable( name, schemaId, Catalog.defaultUserId, EntityType.ENTITY, true ); + tableId = addTable( name, schemaId, Catalog.defaultUserId, EntityType.ENTITY, true, false, false ); } stores.forEach( store -> addDataPlacement( store.getAdapterId(), tableId ) ); @@ -3274,7 +3281,7 @@ public void deleteColumn( long columnId ) { old.dataPlacements, old.modifiable, old.partitionProperty, - old.connectedViews ); + old.connectedViews, false, false ); } synchronized ( this ) { columnNames.remove( new Object[]{ column.databaseId, column.schemaId, column.tableId, column.name } ); @@ -4293,7 +4300,7 @@ public void partitionTable( long tableId, PartitionType partitionType, long part old.dataPlacements, old.modifiable, partitionProperty, - old.connectedViews ); + old.connectedViews, false, false ); synchronized ( this ) { tables.replace( tableId, table ); @@ -4349,7 +4356,7 @@ public void mergeTable( long tableId ) { old.dataPlacements, old.modifiable, partitionProperty, - old.connectedViews ); + old.connectedViews, false, false ); synchronized ( this ) { tables.replace( tableId, table ); @@ -4378,7 +4385,7 @@ public void updateTablePartitionProperties( long tableId, PartitionProperty part old.dataPlacements, old.modifiable, partitionProperty, - old.connectedViews ); + old.connectedViews, false, false ); synchronized ( this ) { tables.replace( tableId, table ); @@ -4852,7 +4859,9 @@ public void updateDataPlacementsOnTable( long tableId, List newDataPlac ImmutableList.copyOf( newDataPlacements ), old.modifiable, old.partitionProperty, - old.connectedViews ); + old.connectedViews, + old.cached, + old.hidden ); } synchronized ( this ) { diff --git a/plugins/mapdb-catalog/src/test/java/org/polypheny/db/test/CatalogTest.java b/plugins/mapdb-catalog/src/test/java/org/polypheny/db/test/CatalogTest.java index 6cdd9bea5c..a0e64426de 100644 --- a/plugins/mapdb-catalog/src/test/java/org/polypheny/db/test/CatalogTest.java +++ b/plugins/mapdb-catalog/src/test/java/org/polypheny/db/test/CatalogTest.java @@ -97,7 +97,7 @@ public void testLayout() throws UnknownDatabaseException, UnknownSchemaException CatalogSchema schema = catalog.getSchema( databaseId, "test_schema" ); assertEquals( schemaId, schema.id ); - long tableId = catalog.addTable( "test_table", schemaId, userId, EntityType.ENTITY, true ); + long tableId = catalog.addTable( "test_table", schemaId, userId, EntityType.ENTITY, true, false, false ); CatalogTable table = catalog.getTable( schemaId, "test_table" ); assertEquals( tableId, table.id ); @@ -176,7 +176,7 @@ public void testTable() throws GenericCatalogException { List ids = new ArrayList<>(); for ( String name : names ) { - ids.add( catalog.addTable( name, schemaId, userId, EntityType.ENTITY, true ) ); + ids.add( catalog.addTable( name, schemaId, userId, EntityType.ENTITY, true, false, false ) ); } // test renaming table @@ -234,7 +234,7 @@ public void testColumn() throws GenericCatalogException { long schemaId = catalog.addNamespace( "schema1", databaseId, userId, NamespaceType.RELATIONAL ); - long tableId = catalog.addTable( "table1", schemaId, userId, EntityType.ENTITY, true ); + long tableId = catalog.addTable( "table1", schemaId, userId, EntityType.ENTITY, true, false, false ); List columnNames = new ArrayList<>( Arrays.asList( "column1", "column2", "column3", "column4", "column5" ) ); List columnIds = new ArrayList<>(); @@ -314,7 +314,7 @@ public void testColumnPlacement() throws UnknownAdapterException { long databaseId = catalog.addDatabase( "APP", userId, user.name, 0, "" ); long schemaId = catalog.addNamespace( "schema1", databaseId, userId, NamespaceType.RELATIONAL ); - long tableId = catalog.addTable( "table1", schemaId, userId, EntityType.ENTITY, true ); + long tableId = catalog.addTable( "table1", schemaId, userId, EntityType.ENTITY, true, false, false ); long columnId = catalog.addColumn( "column1", tableId, 0, PolyType.BIGINT, null, null, null, null, null, false, null ); CatalogColumn column = catalog.getColumn( columnId ); @@ -345,7 +345,7 @@ public void testKey() throws GenericCatalogException { long databaseId = catalog.addDatabase( "APP", userId, user.name, 0, "" ); long schemaId = catalog.addNamespace( "schema1", databaseId, userId, NamespaceType.RELATIONAL ); - long tableId = catalog.addTable( "table1", schemaId, userId, EntityType.ENTITY, true ); + long tableId = catalog.addTable( "table1", schemaId, userId, EntityType.ENTITY, true, false, false ); long columnId1 = catalog.addColumn( "column1", tableId, 0, PolyType.BIGINT, null, null, null, null, null, false, null ); CatalogColumn column1 = catalog.getColumn( columnId1 ); @@ -403,7 +403,7 @@ public void testKey() throws GenericCatalogException { } // test foreign key - long tableId2 = catalog.addTable( "table2", schemaId, userId, EntityType.ENTITY, true ); + long tableId2 = catalog.addTable( "table2", schemaId, userId, EntityType.ENTITY, true, false, false ); long columnId3 = catalog.addColumn( "column3", tableId2, 0, PolyType.BIGINT, null, null, null, null, null, false, null ); CatalogColumn column3 = catalog.getColumn( columnId3 ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java index 08efa10ed4..047fb85e95 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java @@ -248,7 +248,7 @@ public void execute( Context context, Statement statement, QueryParameters param ifNotExists, stores, placementType, - statement ); + false, statement, false ); if ( partitionType != null ) { DdlManager.getInstance().addPartitioning( diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/map/SchemaToJsonMapperTest.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/map/SchemaToJsonMapperTest.java index 3a98c95de6..af0adc232b 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/map/SchemaToJsonMapperTest.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/map/SchemaToJsonMapperTest.java @@ -63,7 +63,7 @@ public void exportTest() { ImmutableList.of(), true, PartitionProperty.builder().build(), - ImmutableList.of() ); + ImmutableList.of(), false, false ); Catalog catalog = Catalog.getInstance(); Arrays.asList( new CatalogColumn( 5, "sid", 4, 1, 1, 1, PolyType.INTEGER, null, null, null, null, null, false, null, null ), diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index a2c77cc86e..cf0ac77149 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -395,6 +395,9 @@ void getSchemaTree( final Context ctx ) { ArrayList collectionTree = new ArrayList<>(); List tables = catalog.getTables( schema.id, null ); for ( CatalogTable table : tables ) { + if ( table.hidden ) { + continue; + } String icon = "fa fa-table"; if ( table.entityType == EntityType.SOURCE ) { icon = "fa fa-plug";