From a86b3618de3d0fa953e18f55543ae386c693d040 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Attila=20S=C3=BCle?= Date: Fri, 16 Sep 2022 21:23:08 +0200 Subject: [PATCH 01/26] merge-columns SqlAlterTableMergeColumns could be called --- sql-language/src/main/codegen/config.fmpp | 2 + .../src/main/codegen/includes/parserImpls.ftl | 9 ++ .../src/main/codegen/templates/Parser.jj | 1 + .../altertable/SqlAlterTableMergeColumns.java | 136 ++++++++++++++++++ .../java/org/polypheny/db/webui/Crud.java | 96 ++++++++++++- .../org/polypheny/db/webui/HttpServer.java | 2 + .../models/requests/MergeColumnsRequest.java | 30 ++++ 7 files changed, 275 insertions(+), 1 deletion(-) create mode 100644 sql-language/src/main/java/org/polypheny/db/sql/sql/ddl/altertable/SqlAlterTableMergeColumns.java create mode 100644 webui/src/main/java/org/polypheny/db/webui/models/requests/MergeColumnsRequest.java diff --git a/sql-language/src/main/codegen/config.fmpp b/sql-language/src/main/codegen/config.fmpp index 21de6736c0..0e279e2724 100644 --- a/sql-language/src/main/codegen/config.fmpp +++ b/sql-language/src/main/codegen/config.fmpp @@ -84,6 +84,7 @@ data: { "org.polypheny.db.sql.sql.ddl.alterschema.SqlAlterSchemaRename" "org.polypheny.db.sql.sql.ddl.altertable.SqlAlterSourceTableAddColumn" "org.polypheny.db.sql.sql.ddl.altertable.SqlAlterTableAddColumn" + "org.polypheny.db.sql.sql.ddl.altertable.SqlAlterTableMergeColumns" "org.polypheny.db.sql.sql.ddl.altertable.SqlAlterTableAddForeignKey" "org.polypheny.db.sql.sql.ddl.altertable.SqlAlterTableAddIndex" "org.polypheny.db.sql.sql.ddl.altertable.SqlAlterTableAddPartitions" @@ -161,6 +162,7 @@ data: { "CATALOG_NAME" "CENTURY" "CONFIG" + "COLUMNS" "CHAIN" "CHARACTER_SET_CATALOG" "CHARACTER_SET_NAME" diff --git a/sql-language/src/main/codegen/includes/parserImpls.ftl b/sql-language/src/main/codegen/includes/parserImpls.ftl index b7fc5f85a5..1c4732e32a 100644 --- a/sql-language/src/main/codegen/includes/parserImpls.ftl +++ b/sql-language/src/main/codegen/includes/parserImpls.ftl @@ -651,6 +651,15 @@ SqlAlterTable SqlAlterTable(Span s) : { return new SqlAlterTableMergePartitions(s.end(this), table); } + | + + columnList = ParenthesizedSimpleIdentifierList() + + name = SimpleIdentifier() + type = DataType() + { + return new SqlAlterTableMergeColumns(s.end(this), table, columnList, name, type); + } ) } diff --git a/sql-language/src/main/codegen/templates/Parser.jj b/sql-language/src/main/codegen/templates/Parser.jj index be8c402c3c..cc4e761b37 100644 --- a/sql-language/src/main/codegen/templates/Parser.jj +++ b/sql-language/src/main/codegen/templates/Parser.jj @@ -6350,6 +6350,7 @@ SqlPostfixOperator PostfixRowOperator() : | < MEASURES: "MEASURES" > | < MEMBER: "MEMBER" > | < MERGE: "MERGE" > +| < COLUMNS: "COLUMNS" > | < MESSAGE_LENGTH: "MESSAGE_LENGTH" > | < MESSAGE_OCTET_LENGTH: "MESSAGE_OCTET_LENGTH" > | < MESSAGE_TEXT: "MESSAGE_TEXT" > diff --git a/sql-language/src/main/java/org/polypheny/db/sql/sql/ddl/altertable/SqlAlterTableMergeColumns.java b/sql-language/src/main/java/org/polypheny/db/sql/sql/ddl/altertable/SqlAlterTableMergeColumns.java new file mode 100644 index 0000000000..2f30303c66 --- /dev/null +++ b/sql-language/src/main/java/org/polypheny/db/sql/sql/ddl/altertable/SqlAlterTableMergeColumns.java @@ -0,0 +1,136 @@ +/* + * Copyright 2019-2022 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.sql.sql.ddl.altertable; + + +import java.util.List; +import java.util.Objects; +import lombok.extern.slf4j.Slf4j; +import org.polypheny.db.catalog.Catalog.TableType; +import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.languages.ParserPos; +import org.polypheny.db.languages.QueryParameters; +import org.polypheny.db.nodes.Node; +import org.polypheny.db.prepare.Context; +import org.polypheny.db.sql.sql.SqlDataTypeSpec; +import org.polypheny.db.sql.sql.SqlIdentifier; +import org.polypheny.db.sql.sql.SqlNode; +import org.polypheny.db.sql.sql.SqlNodeList; +import org.polypheny.db.sql.sql.SqlWriter; +import org.polypheny.db.sql.sql.ddl.SqlAlterTable; +import org.polypheny.db.transaction.Statement; +import org.polypheny.db.util.ImmutableNullableList; + + +/** + * Parse tree for {@code ALTER TABLE name MERGE COLUMNS name} statement. + */ +@Slf4j +public class SqlAlterTableMergeColumns extends SqlAlterTable { + + private final SqlIdentifier table; + private final SqlNodeList columnsToMerge; + private final SqlIdentifier newColumnName; // Can be null + + private final SqlDataTypeSpec type; + + public SqlAlterTableMergeColumns( + ParserPos pos, + SqlIdentifier table, + SqlNodeList columnsToMerge, + SqlIdentifier newColumnName, + SqlDataTypeSpec type ) { + super( pos ); + this.table = Objects.requireNonNull( table ); + this.columnsToMerge = columnsToMerge; + this.newColumnName = newColumnName; + this.type = type; + } + + + @Override + public List getOperandList() { + return ImmutableNullableList.of( table, columnsToMerge ); + } + + + @Override + public List getSqlOperandList() { + return ImmutableNullableList.of( table, columnsToMerge ); + } + + + @Override + public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { + writer.keyword( "ALTER" ); + writer.keyword( "TABLE" ); + table.unparse( writer, leftPrec, rightPrec ); + writer.keyword( "MERGE" ); + writer.keyword( "COLUMNS" ); + columnsToMerge.unparse( writer, leftPrec, rightPrec ); + if ( newColumnName != null ) { + writer.keyword( "AFTER" ); + newColumnName.unparse( writer, leftPrec, rightPrec ); + } + } + + + @Override + public void execute( Context context, Statement statement, QueryParameters parameters ) { + CatalogTable catalogTable = getCatalogTable( context, table ); + + if ( catalogTable.tableType != TableType.TABLE ) { + throw new RuntimeException( "Not possible to use ALTER TABLE because " + catalogTable.name + " is not a table." ); + } + + /* + if ( columnsToMerge.names.size() != 1 ) { + throw new RuntimeException( "No FQDN allowed here: " + columnsToMerge.toString() ); + } + */ + + // Make sure that all adapters are of type store (and not source) + for ( int storeId : catalogTable.dataPlacements ) { + getDataStoreInstance( storeId ); + } + + /* + try { + DdlManager.getInstance().addColumn( + column.getSimple(), + catalogTable, + beforeColumnName == null ? null : beforeColumnName.getSimple(), + newColumnName == null ? null : newColumnName.getSimple(), + ColumnTypeInformation.fromDataTypeSpec( type ), + nullable, + defaultValue, + statement ); + + + } catch ( NotNullAndDefaultValueException e ) { + throw CoreUtil.newContextException( column.getPos(), RESOURCE.notNullAndNoDefaultValue( column.getSimple() ) ); + } catch ( ColumnAlreadyExistsException e ) { + throw CoreUtil.newContextException( column.getPos(), RESOURCE.columnExists( column.getSimple() ) ); + } catch ( ColumnNotExistsException e ) { + throw CoreUtil.newContextException( table.getPos(), RESOURCE.columnNotFoundInTable( e.columnName, e.tableName ) ); + } + + */ + } + +} + diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index b6ce68f080..a498ab58e2 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -214,6 +214,7 @@ import org.polypheny.db.webui.models.requests.ExploreData; import org.polypheny.db.webui.models.requests.ExploreTables; import org.polypheny.db.webui.models.requests.HubRequest; +import org.polypheny.db.webui.models.requests.MergeColumnsRequest; import org.polypheny.db.webui.models.requests.PartitioningRequest; import org.polypheny.db.webui.models.requests.PartitioningRequest.ModifyPartitionRequest; import org.polypheny.db.webui.models.requests.QueryExplorationRequest; @@ -1722,7 +1723,6 @@ void addColumn( final Context ctx ) { ctx.json( result ); } - /** * Delete a column of a table */ @@ -1751,6 +1751,100 @@ void dropColumn( final Context ctx ) { ctx.json( result ); } + /** + * Add a column to an existing table + */ + void mergeColumns( final Context ctx ) { + MergeColumnsRequest request = ctx.bodyAsClass( MergeColumnsRequest.class ); + Transaction transaction = getTransaction(); + + String[] t = request.tableId.split( "\\." ); + String tableId = String.format( "\"%s\".\"%s\"", t[0], t[1] ); + + // TODO: should be made more sophisticated. + DbColumn newColumn = new DbColumn(request.newColumnName, "varchar", true, + Arrays.stream( request.columnsToMerge ).mapToInt( o -> o.precision ).sum(), null, null); + newColumn.collectionsType = ""; + + String as = ""; + String dataType = newColumn.dataType; + if ( newColumn.as != null ) { + //for data sources + as = "AS \"" + newColumn.as + "\""; + dataType = ""; + } + String listOfColumnsToMerge = + Arrays.stream( request.columnsToMerge ) + .map( s -> "\"" + s.name + "\"") + .collect( Collectors.joining(", ")); + // TODO: try without toUpperCase + String query = String.format( "ALTER TABLE %s MERGE COLUMNS (%s) IN \"%s\" %s %s", + tableId, listOfColumnsToMerge, newColumn.name, as, dataType.toUpperCase() ); + + //we don't want precision, scale etc. for source columns + if ( newColumn.as == null ) { + if (newColumn.precision != null ) { + query = query + "(" + newColumn.precision; + if ( newColumn.scale != null ) { + query = query + "," + newColumn.scale; + } + query = query + ")"; + } + if ( !newColumn.collectionsType.equals( "" ) ) { + query = query + " " + newColumn.collectionsType; + int dimension = newColumn.dimension == null ? -1 : newColumn.dimension; + int cardinality = newColumn.cardinality == null ? -1 : newColumn.cardinality; + query = query + String.format( "(%d,%d)", dimension, cardinality ); + } + if ( !newColumn.nullable ) { + query = query + " NOT NULL"; + } + } + if ( newColumn.defaultValue != null && !newColumn.defaultValue.equals( "" ) ) { + query = query + " DEFAULT "; + if ( newColumn.collectionsType != null && !newColumn.collectionsType.equals( "" ) ) { + //handle the case if the user says "ARRAY[1,2,3]" or "[1,2,3]" + if ( !newColumn.defaultValue.startsWith( newColumn.collectionsType ) ) { + query = query + newColumn.collectionsType; + } + query = query + newColumn.defaultValue; + } else { + switch ( newColumn.dataType ) { + case "BIGINT": + case "INTEGER": + case "SMALLINT": + case "TINYINT": + case "FLOAT": + case "DOUBLE": + case "DECIMAL": + newColumn.defaultValue = newColumn.defaultValue.replace( ",", "." ); + BigDecimal b = new BigDecimal( newColumn.defaultValue ); + query = query + b.toString(); + break; + case "VARCHAR": + query = query + String.format( "'%s'", newColumn.defaultValue ); + break; + default: + query = query + newColumn.defaultValue; + } + } + } + Result result; + try { + int affectedRows = executeSqlUpdate( transaction, query ); + transaction.commit(); + result = new Result( affectedRows ).setGeneratedQuery( query ); + } catch ( TransactionException | QueryExecutionException e ) { + log.error( "Caught exception while adding a column", e ); + result = new Result( e ); + try { + transaction.rollback(); + } catch ( TransactionException ex ) { + log.error( "Could not rollback", ex ); + } + } + ctx.json( result ); + } /** * Get artificially generated index/foreign key/constraint names for placeholders in the UI diff --git a/webui/src/main/java/org/polypheny/db/webui/HttpServer.java b/webui/src/main/java/org/polypheny/db/webui/HttpServer.java index 6b9d316f18..03f0eca2be 100644 --- a/webui/src/main/java/org/polypheny/db/webui/HttpServer.java +++ b/webui/src/main/java/org/polypheny/db/webui/HttpServer.java @@ -241,6 +241,8 @@ private void crudRoutes( Javalin webuiServer, Crud crud ) { webuiServer.post( "/dropColumn", crud::dropColumn ); + webuiServer.post( "/mergeColumns", crud::mergeColumns ); + webuiServer.post( "/getTables", crud::getTables ); webuiServer.post( "/renameTable", crud::renameTable ); diff --git a/webui/src/main/java/org/polypheny/db/webui/models/requests/MergeColumnsRequest.java b/webui/src/main/java/org/polypheny/db/webui/models/requests/MergeColumnsRequest.java new file mode 100644 index 0000000000..bdc675f512 --- /dev/null +++ b/webui/src/main/java/org/polypheny/db/webui/models/requests/MergeColumnsRequest.java @@ -0,0 +1,30 @@ +/* + * Copyright 2019-2021 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.webui.models.requests; + + +import org.polypheny.db.webui.models.DbColumn; + + +public class MergeColumnsRequest extends UIRequest { + + public DbColumn[] columnsToMerge; + public String newColumnName; + // for data sources + public String tableType; + +} From 66cb08033e0f16aa946d38a41b3356f849560fba Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Attila=20S=C3=BCle?= Date: Mon, 19 Sep 2022 13:44:12 +0200 Subject: [PATCH 02/26] merge-columns fist tries in DataMigrator --- .../java/org/polypheny/db/ddl/DdlManager.java | 3 + .../polypheny/db/processing/DataMigrator.java | 6 ++ .../org/polypheny/db/ddl/DdlManagerImpl.java | 57 ++++++++++++++ .../db/processing/DataMigratorImpl.java | 76 +++++++++++++++++++ .../altertable/SqlAlterTableMergeColumns.java | 36 ++++----- .../java/org/polypheny/db/webui/Crud.java | 11 +-- 6 files changed, 164 insertions(+), 25 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java index 761a2f3c15..c55d531656 100644 --- a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java +++ b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java @@ -204,6 +204,9 @@ public static DdlManager getInstance() { */ public abstract void addForeignKey( CatalogTable catalogTable, CatalogTable refTable, List columnNames, List refColumnNames, String constraintName, ForeignKeyOption onUpdate, ForeignKeyOption onDelete ) throws UnknownColumnException, GenericCatalogException; + // TODO: add comments + public abstract void mergeColumns( CatalogTable catalogTable, List columnNamesToMerge, String newColumnName, ColumnTypeInformation type, Statement statement ) throws UnknownColumnException, ColumnAlreadyExistsException, ColumnNotExistsException; + /** * Adds an index to a table * diff --git a/core/src/main/java/org/polypheny/db/processing/DataMigrator.java b/core/src/main/java/org/polypheny/db/processing/DataMigrator.java index e217157629..4bfbb66300 100644 --- a/core/src/main/java/org/polypheny/db/processing/DataMigrator.java +++ b/core/src/main/java/org/polypheny/db/processing/DataMigrator.java @@ -75,6 +75,12 @@ void copyPartitionData( List sourcePartitionIds, List targetPartitionIds ); + void mergeColumns( + Transaction transaction, + CatalogAdapter store, + List sourceColumns, + CatalogColumn targetColumn); + AlgRoot buildInsertStatement( Statement statement, List to, long partitionId ); //is used within copyData diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 2f50a8569c..d0468c7e8f 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -564,6 +564,63 @@ public void addForeignKey( CatalogTable catalogTable, CatalogTable refTable, Lis catalog.addForeignKey( catalogTable.id, columnIds, refTable.id, referencesIds, constraintName, onUpdate, onDelete ); } + public void mergeColumns( CatalogTable catalogTable, List columnNamesToMerge, String newColumnName, ColumnTypeInformation type, Statement statement ) throws UnknownColumnException, ColumnAlreadyExistsException, ColumnNotExistsException { + + if ( catalog.checkIfExistsColumn( catalogTable.id, newColumnName ) ) { + throw new ColumnAlreadyExistsException( newColumnName, catalogTable.name ); + } + + CatalogColumn afterColumn = getCatalogColumn( catalogTable.id, columnNamesToMerge.get( columnNamesToMerge.size()-1 ) ); + int position = updateAdjacentPositions( catalogTable, null, afterColumn ); + + long columnId = catalog.addColumn( + newColumnName, + catalogTable.id, + position, + type.type, + type.collectionType, + type.precision, + type.scale, + type.dimension, + type.cardinality, + true, // TODO: value is missing + Collation.getDefaultCollation() + ); + + // TODO: get DEFAULT from parameter + // Add default value + addDefaultValue( "DEFAULT", columnId ); + CatalogColumn addedColumn = catalog.getColumn ( columnId ); + + // Ask router on which stores this column shall be placed + List stores = RoutingManager.getInstance().getCreatePlacementStrategy().getDataStoresForNewColumn( addedColumn ); + DataMigrator dataMigrator = statement.getTransaction().getDataMigrator(); + + // Build catalog columns + List sourceCatalogColumns = new LinkedList<>(); + for ( String columnName : columnNamesToMerge ) { + sourceCatalogColumns.add( catalog.getColumn( catalogTable.id, columnName ) ); + } + CatalogColumn targetCatalogColumn = catalog.getColumn( catalogTable.id, newColumnName ); + + // Add column on underlying data stores and insert default value + for ( DataStore store : stores ) { + catalog.addColumnPlacement( + store.getAdapterId(), + addedColumn.id, + PlacementType.AUTOMATIC, + null, // Will be set later + null, // Will be set later + null // Will be set later + );//Not a valid partitionID --> placeholder + AdapterManager.getInstance().getStore( store.getAdapterId() ).addColumn( statement.getPrepareContext(), catalogTable, addedColumn ); + // Call migrator + dataMigrator.mergeColumns( statement.getTransaction(), catalog.getAdapter( store.getAdapterId() ), sourceCatalogColumns, targetCatalogColumn); + } + + // Reset plan cache implementation cache & routing cache + statement.getQueryProcessor().resetCaches(); + } @Override public void addIndex( CatalogTable catalogTable, String indexMethodName, List columnNames, String indexName, boolean isUnique, DataStore location, Statement statement ) throws UnknownColumnException, UnknownIndexMethodException, GenericCatalogException, UnknownTableException, UnknownUserException, UnknownSchemaException, UnknownKeyException, UnknownDatabaseException, TransactionException, AlterSourceException, IndexExistsException, MissingColumnPlacementException { diff --git a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java index 36b3586362..3433d15fde 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java +++ b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java @@ -374,6 +374,52 @@ private AlgRoot buildUpdateStatement( Statement statement, List qualifiedTableName = ImmutableList.of( + PolySchemaBuilder.buildAdapterSchemaName( + to.adapterUniqueName, + to.getLogicalSchemaName(), + to.physicalSchemaName ), + to.getLogicalTableName() + "_" + partitionId ); + AlgOptTable physical = statement.getTransaction().getCatalogReader().getTableForMember( qualifiedTableName ); + ModifiableTable modifiableTable = physical.unwrap( ModifiableTable.class ); + + AlgOptCluster cluster = AlgOptCluster.create( + statement.getQueryProcessor().getPlanner(), + new RexBuilder( statement.getTransaction().getTypeFactory() ) ); + AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); + + AlgBuilder builder = AlgBuilder.create( statement, cluster ); + builder.scan( qualifiedTableName ); + + List columnNames = new LinkedList<>(); + List values = new LinkedList<>(); + + CatalogColumn catalogColumn = Catalog.getInstance().getColumn( to.columnId ); + columnNames.add( to.getLogicalColumnName() ); + values.add( new RexDynamicParam( catalogColumn.getAlgDataType( typeFactory ), (int) catalogColumn.id ) ); + + builder.projectPlus( values ); + + AlgNode node = modifiableTable.toModificationAlg( + cluster, + physical, + statement.getTransaction().getCatalogReader(), + builder.build(), + Operation.UPDATE, + columnNames, + values, + false + ); + AlgRoot algRoot = AlgRoot.of( node, Kind.UPDATE ); + AlgStructuredTypeFlattener typeFlattener = new AlgStructuredTypeFlattener( + AlgBuilder.create( statement, algRoot.alg.getCluster() ), + algRoot.alg.getCluster().getRexBuilder(), + algRoot.alg::getCluster, + true ); + return algRoot.withAlg( typeFlattener.rewrite( algRoot.alg ) ); + } + @Override public AlgRoot getSourceIterator( Statement statement, Map> placementDistribution ) { @@ -681,4 +727,34 @@ public void copyPartitionData( Transaction transaction, CatalogAdapter store, Ca } } + + @Override + public void mergeColumns( Transaction transaction, CatalogAdapter store, List sourceColumns, CatalogColumn targetColumn ) { + CatalogTable table = Catalog.getInstance().getTable( sourceColumns.get( 0 ).tableId ); + + // Check Lists + List sourceColumnPlacements = new LinkedList<>(); + for ( CatalogColumn catalogColumn : sourceColumns ) { + sourceColumnPlacements.add( Catalog.getInstance().getColumnPlacement( store.id, catalogColumn.id ) ); + } + + CatalogColumnPlacement targetColumnPlacement = Catalog.getInstance().getColumnPlacement( store.id, targetColumn.id ); + + Statement sourceStatement = transaction.createStatement(); + Statement targetStatement = transaction.createStatement(); + + Map> placementDistribution = new HashMap<>(); + placementDistribution.put( + table.partitionProperty.partitionIds.get( 0 ), + selectSourcePlacements( table, sourceColumns, -1) ); + + Map> subDistribution = new HashMap<>( placementDistribution ); + subDistribution.keySet().retainAll( Arrays.asList( table.partitionProperty.partitionIds.get( 0 ) ) ); + + AlgRoot sourceAlg = getSourceIterator( sourceStatement, subDistribution ); + AlgRoot targetAlg = buildUpdateStatementForMerge( sourceStatement, targetColumnPlacement, table.partitionProperty.partitionIds.get( 0 ) ); + + executeQuery( sourceColumns, sourceAlg, sourceStatement, targetStatement, targetAlg, false, false ); + } + } diff --git a/sql-language/src/main/java/org/polypheny/db/sql/sql/ddl/altertable/SqlAlterTableMergeColumns.java b/sql-language/src/main/java/org/polypheny/db/sql/sql/ddl/altertable/SqlAlterTableMergeColumns.java index 2f30303c66..4a897466b5 100644 --- a/sql-language/src/main/java/org/polypheny/db/sql/sql/ddl/altertable/SqlAlterTableMergeColumns.java +++ b/sql-language/src/main/java/org/polypheny/db/sql/sql/ddl/altertable/SqlAlterTableMergeColumns.java @@ -17,11 +17,20 @@ package org.polypheny.db.sql.sql.ddl.altertable; +import static org.polypheny.db.util.Static.RESOURCE; + import java.util.List; import java.util.Objects; +import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; import org.polypheny.db.catalog.Catalog.TableType; import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.exceptions.ColumnAlreadyExistsException; +import org.polypheny.db.catalog.exceptions.UnknownColumnException; +import org.polypheny.db.ddl.DdlManager; +import org.polypheny.db.ddl.DdlManager.ColumnTypeInformation; +import org.polypheny.db.ddl.exception.ColumnNotExistsException; +import org.polypheny.db.ddl.exception.NotNullAndDefaultValueException; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.languages.QueryParameters; import org.polypheny.db.nodes.Node; @@ -33,6 +42,7 @@ import org.polypheny.db.sql.sql.SqlWriter; import org.polypheny.db.sql.sql.ddl.SqlAlterTable; import org.polypheny.db.transaction.Statement; +import org.polypheny.db.util.CoreUtil; import org.polypheny.db.util.ImmutableNullableList; @@ -97,39 +107,25 @@ public void execute( Context context, Statement statement, QueryParameters param throw new RuntimeException( "Not possible to use ALTER TABLE because " + catalogTable.name + " is not a table." ); } - /* - if ( columnsToMerge.names.size() != 1 ) { - throw new RuntimeException( "No FQDN allowed here: " + columnsToMerge.toString() ); - } - */ - // Make sure that all adapters are of type store (and not source) for ( int storeId : catalogTable.dataPlacements ) { getDataStoreInstance( storeId ); } - /* try { - DdlManager.getInstance().addColumn( - column.getSimple(), + DdlManager.getInstance().mergeColumns( catalogTable, - beforeColumnName == null ? null : beforeColumnName.getSimple(), - newColumnName == null ? null : newColumnName.getSimple(), + columnsToMerge.getList().stream().map( Node::toString ).collect( Collectors.toList()), + newColumnName.getSimple(), ColumnTypeInformation.fromDataTypeSpec( type ), - nullable, - defaultValue, statement ); - - - } catch ( NotNullAndDefaultValueException e ) { - throw CoreUtil.newContextException( column.getPos(), RESOURCE.notNullAndNoDefaultValue( column.getSimple() ) ); + } catch ( UnknownColumnException e ) { + throw CoreUtil.newContextException( columnsToMerge.getPos(), RESOURCE.columnNotFound( e.getColumnName() ) ); } catch ( ColumnAlreadyExistsException e ) { - throw CoreUtil.newContextException( column.getPos(), RESOURCE.columnExists( column.getSimple() ) ); + throw CoreUtil.newContextException( newColumnName.getPos(), RESOURCE.columnExists( newColumnName.getSimple() ) ); } catch ( ColumnNotExistsException e ) { throw CoreUtil.newContextException( table.getPos(), RESOURCE.columnNotFoundInTable( e.columnName, e.tableName ) ); } - - */ } } diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index a498ab58e2..65d2689def 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -1761,9 +1761,9 @@ void mergeColumns( final Context ctx ) { String[] t = request.tableId.split( "\\." ); String tableId = String.format( "\"%s\".\"%s\"", t[0], t[1] ); - // TODO: should be made more sophisticated. - DbColumn newColumn = new DbColumn(request.newColumnName, "varchar", true, - Arrays.stream( request.columnsToMerge ).mapToInt( o -> o.precision ).sum(), null, null); + boolean nullable = Arrays.stream( request.columnsToMerge ).allMatch( c -> c.nullable ); + Integer precison = Arrays.stream( request.columnsToMerge ).mapToInt( c -> c.precision ).sum(); + DbColumn newColumn = new DbColumn(request.newColumnName, "varchar", nullable, precison, null, null); newColumn.collectionsType = ""; String as = ""; @@ -1773,13 +1773,13 @@ void mergeColumns( final Context ctx ) { as = "AS \"" + newColumn.as + "\""; dataType = ""; } + String listOfColumnsToMerge = Arrays.stream( request.columnsToMerge ) .map( s -> "\"" + s.name + "\"") .collect( Collectors.joining(", ")); - // TODO: try without toUpperCase String query = String.format( "ALTER TABLE %s MERGE COLUMNS (%s) IN \"%s\" %s %s", - tableId, listOfColumnsToMerge, newColumn.name, as, dataType.toUpperCase() ); + tableId, listOfColumnsToMerge, newColumn.name, as, dataType ); //we don't want precision, scale etc. for source columns if ( newColumn.as == null ) { @@ -1800,6 +1800,7 @@ void mergeColumns( final Context ctx ) { query = query + " NOT NULL"; } } + // TODO: merge the DEFAULT values too.(?) if ( newColumn.defaultValue != null && !newColumn.defaultValue.equals( "" ) ) { query = query + " DEFAULT "; if ( newColumn.collectionsType != null && !newColumn.collectionsType.equals( "" ) ) { From 3ea3f46ac6688da807f94689a66a687948c03805 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Attila=20S=C3=BCle?= Date: Tue, 20 Sep 2022 20:51:46 +0200 Subject: [PATCH 03/26] merge-columns update working, method should be reorganized --- .../polypheny/db/processing/DataMigrator.java | 2 + .../db/processing/DataMigratorImpl.java | 141 ++++++++++++++++-- 2 files changed, 130 insertions(+), 13 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/processing/DataMigrator.java b/core/src/main/java/org/polypheny/db/processing/DataMigrator.java index 4bfbb66300..403f8ba662 100644 --- a/core/src/main/java/org/polypheny/db/processing/DataMigrator.java +++ b/core/src/main/java/org/polypheny/db/processing/DataMigrator.java @@ -86,6 +86,8 @@ void mergeColumns( //is used within copyData void executeQuery( List columns, AlgRoot sourceRel, Statement sourceStatement, Statement targetStatement, AlgRoot targetRel, boolean isMaterializedView, boolean doesSubstituteOrderBy ); + void executeMergeQuery( List sourceColumns, List targetColumns, AlgRoot sourceRel, Statement sourceStatement, Statement targetStatement, AlgRoot targetRel, boolean isMaterializedView, boolean doesSubstituteOrderBy ); + AlgRoot buildDeleteStatement( Statement statement, List to, long partitionId ); AlgRoot getSourceIterator( Statement statement, Map> placementDistribution ); diff --git a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java index 3433d15fde..26cdb674fc 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java +++ b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java @@ -221,6 +221,116 @@ public void executeQuery( List selectColumnList, AlgRoot sourceAl } + @Override + public void executeMergeQuery( List sourceColumns, List targetColumns, AlgRoot sourceAlg, Statement sourceStatement, Statement targetStatement, AlgRoot targetAlg, boolean isMaterializedView, boolean doesSubstituteOrderBy ) { + try { + PolyResult result; + if ( isMaterializedView ) { + result = sourceStatement.getQueryProcessor().prepareQuery( + sourceAlg, + sourceAlg.alg.getCluster().getTypeFactory().builder().build(), + false, + false, + doesSubstituteOrderBy ); + } else { + result = sourceStatement.getQueryProcessor().prepareQuery( + sourceAlg, + sourceAlg.alg.getCluster().getTypeFactory().builder().build(), + true, + false, + false ); + } + final Enumerable enumerable = result.enumerable( sourceStatement.getDataContext() ); + //noinspection unchecked + Iterator sourceIterator = enumerable.iterator(); + + Map sourceColMapping = new HashMap<>(); + for ( CatalogColumn catalogColumn : sourceColumns ) { + int i = 0; + for ( AlgDataTypeField metaData : result.getRowType().getFieldList() ) { + if ( metaData.getName().equalsIgnoreCase( catalogColumn.name ) ) { + sourceColMapping.put( catalogColumn.id, i ); + } + i++; + } + } + + if ( isMaterializedView ) { + for ( CatalogColumn catalogColumn : sourceColumns ) { + if ( !sourceColMapping.containsKey( catalogColumn.id ) ) { + int i = sourceColMapping.values().stream().mapToInt( v -> v ).max().orElseThrow( NoSuchElementException::new ); + sourceColMapping.put( catalogColumn.id, i + 1 ); + } + } + } + + Map targetColMapping = new HashMap<>(); + for ( CatalogColumn catalogColumn : targetColumns ) { + int i = 0; + for ( AlgDataTypeField metaData : result.getRowType().getFieldList() ) { + if ( metaData.getName().equalsIgnoreCase( catalogColumn.name ) ) { + targetColMapping.put( catalogColumn.id, i ); + } + i++; + } + } + // TODO: ismaterializedView for targetMapping? + + int batchSize = RuntimeConfig.DATA_MIGRATOR_BATCH_SIZE.getInteger(); + int i = 0; + while ( sourceIterator.hasNext() ) { + List> rows = MetaImpl.collect( result.getCursorFactory(), LimitIterator.of( sourceIterator, batchSize ), new ArrayList<>() ); + Map> values = new HashMap<>(); + + for ( List list : rows ) { + for ( Map.Entry entry : sourceColMapping.entrySet() ) { + if ( !values.containsKey( entry.getKey() ) ) { + values.put( entry.getKey(), new LinkedList<>() ); + } + if ( isMaterializedView ) { + if ( entry.getValue() > list.size() - 1 ) { + values.get( entry.getKey() ).add( i ); + i++; + } else { + values.get( entry.getKey() ).add( list.get( entry.getValue() ) ); + } + } else { + values.get( entry.getKey() ).add( list.get( entry.getValue() ) ); + } + } + } + List fields; + if ( isMaterializedView ) { + fields = targetAlg.alg.getTable().getRowType().getFieldList(); + } else { + fields = sourceAlg.validatedRowType.getFieldList(); + } + int pos = 0; + for ( Map.Entry> v : values.entrySet() ) { + targetStatement.getDataContext().addParameterValues( + (v.getValue().get( 0 ) instanceof String) ? targetColumns.get( 0 ).id : v.getKey(), + fields.get( sourceColMapping.get( v.getKey() ) ).getType(), // TODO: get the type of the targetcolumn + (v.getValue().get( 0 ) instanceof String) ? v.getValue().stream().map( s -> ((String)s).concat( " testtestest" ) ).collect( Collectors.toList()) : v.getValue() ) ; + pos++; + } + + Iterator iterator = targetStatement.getQueryProcessor() + .prepareQuery( targetAlg, sourceAlg.validatedRowType, true, false, false ) + .enumerable( targetStatement.getDataContext() ) + .iterator(); + //noinspection WhileLoopReplaceableByForEach + while ( iterator.hasNext() ) { + iterator.next(); + } + targetStatement.getDataContext().resetParameterValues(); + } + } catch ( Throwable t ) { + throw new RuntimeException( t ); + } + } + + + @Override public AlgRoot buildDeleteStatement( Statement statement, List to, long partitionId ) { List qualifiedTableName = ImmutableList.of( @@ -353,7 +463,7 @@ private AlgRoot buildUpdateStatement( Statement statement, List sourceColumns, CatalogColumn targetColumn ) { CatalogTable table = Catalog.getInstance().getTable( sourceColumns.get( 0 ).tableId ); + CatalogPrimaryKey primaryKey = Catalog.getInstance().getPrimaryKey( table.primaryKey ); - // Check Lists - List sourceColumnPlacements = new LinkedList<>(); - for ( CatalogColumn catalogColumn : sourceColumns ) { - sourceColumnPlacements.add( Catalog.getInstance().getColumnPlacement( store.id, catalogColumn.id ) ); + List selectColumnList = new LinkedList<>( sourceColumns ); + + // Add primary keys to select column list + for ( long cid : primaryKey.columnIds ) { + CatalogColumn catalogColumn = Catalog.getInstance().getColumn( cid ); + if ( !selectColumnList.contains( catalogColumn ) ) { + selectColumnList.add( catalogColumn ); + } } + Map> sourceColumnPlacements = new HashMap<>(); + sourceColumnPlacements.put( + table.partitionProperty.partitionIds.get( 0 ), + selectSourcePlacements( table, selectColumnList, -1) ); + CatalogColumnPlacement targetColumnPlacement = Catalog.getInstance().getColumnPlacement( store.id, targetColumn.id ); Statement sourceStatement = transaction.createStatement(); Statement targetStatement = transaction.createStatement(); - Map> placementDistribution = new HashMap<>(); - placementDistribution.put( - table.partitionProperty.partitionIds.get( 0 ), - selectSourcePlacements( table, sourceColumns, -1) ); - - Map> subDistribution = new HashMap<>( placementDistribution ); + Map> subDistribution = new HashMap<>( sourceColumnPlacements ); subDistribution.keySet().retainAll( Arrays.asList( table.partitionProperty.partitionIds.get( 0 ) ) ); AlgRoot sourceAlg = getSourceIterator( sourceStatement, subDistribution ); - AlgRoot targetAlg = buildUpdateStatementForMerge( sourceStatement, targetColumnPlacement, table.partitionProperty.partitionIds.get( 0 ) ); + AlgRoot targetAlg = buildUpdateStatement( targetStatement, Collections.singletonList( targetColumnPlacement ), table.partitionProperty.partitionIds.get( 0 ) ); - executeQuery( sourceColumns, sourceAlg, sourceStatement, targetStatement, targetAlg, false, false ); + executeMergeQuery( selectColumnList, Collections.singletonList(targetColumn), sourceAlg, sourceStatement, targetStatement, targetAlg, false, false ); } } From c4d0a535c0c32404fa1f73fcb6fa05835013416a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Attila=20S=C3=BCle?= Date: Thu, 22 Sep 2022 17:33:52 +0200 Subject: [PATCH 04/26] merge-columns merge of columns working --- .../polypheny/db/processing/DataMigrator.java | 2 +- .../db/processing/DataMigratorImpl.java | 99 +++++-------------- 2 files changed, 27 insertions(+), 74 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/processing/DataMigrator.java b/core/src/main/java/org/polypheny/db/processing/DataMigrator.java index 403f8ba662..664f923c16 100644 --- a/core/src/main/java/org/polypheny/db/processing/DataMigrator.java +++ b/core/src/main/java/org/polypheny/db/processing/DataMigrator.java @@ -86,7 +86,7 @@ void mergeColumns( //is used within copyData void executeQuery( List columns, AlgRoot sourceRel, Statement sourceStatement, Statement targetStatement, AlgRoot targetRel, boolean isMaterializedView, boolean doesSubstituteOrderBy ); - void executeMergeQuery( List sourceColumns, List targetColumns, AlgRoot sourceRel, Statement sourceStatement, Statement targetStatement, AlgRoot targetRel, boolean isMaterializedView, boolean doesSubstituteOrderBy ); + void executeMergeQuery( List primaryKeyColumns, List sourceColumns, CatalogColumn targetColumn, AlgRoot sourceRel, Statement sourceStatement, Statement targetStatement, AlgRoot targetRel, boolean isMaterializedView, boolean doesSubstituteOrderBy ); AlgRoot buildDeleteStatement( Statement statement, List to, long partitionId ); diff --git a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java index 26cdb674fc..5b767e9314 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java +++ b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java @@ -39,6 +39,7 @@ import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.algebra.core.TableModify.Operation; import org.polypheny.db.algebra.logical.LogicalValues; +import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.AlgDataTypeSystem; @@ -222,7 +223,7 @@ public void executeQuery( List selectColumnList, AlgRoot sourceAl @Override - public void executeMergeQuery( List sourceColumns, List targetColumns, AlgRoot sourceAlg, Statement sourceStatement, Statement targetStatement, AlgRoot targetAlg, boolean isMaterializedView, boolean doesSubstituteOrderBy ) { + public void executeMergeQuery(List primaryKeyColumns, List sourceColumns, CatalogColumn targetColumn, AlgRoot sourceAlg, Statement sourceStatement, Statement targetStatement, AlgRoot targetAlg, boolean isMaterializedView, boolean doesSubstituteOrderBy ) { try { PolyResult result; if ( isMaterializedView ) { @@ -264,18 +265,6 @@ public void executeMergeQuery( List sourceColumns, List targetColMapping = new HashMap<>(); - for ( CatalogColumn catalogColumn : targetColumns ) { - int i = 0; - for ( AlgDataTypeField metaData : result.getRowType().getFieldList() ) { - if ( metaData.getName().equalsIgnoreCase( catalogColumn.name ) ) { - targetColMapping.put( catalogColumn.id, i ); - } - i++; - } - } - // TODO: ismaterializedView for targetMapping? - int batchSize = RuntimeConfig.DATA_MIGRATOR_BATCH_SIZE.getInteger(); int i = 0; while ( sourceIterator.hasNext() ) { @@ -299,19 +288,28 @@ public void executeMergeQuery( List sourceColumns, List fields; - if ( isMaterializedView ) { - fields = targetAlg.alg.getTable().getRowType().getFieldList(); - } else { - fields = sourceAlg.validatedRowType.getFieldList(); - } - int pos = 0; + + final AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); + + List mergedValueList = null; for ( Map.Entry> v : values.entrySet() ) { - targetStatement.getDataContext().addParameterValues( - (v.getValue().get( 0 ) instanceof String) ? targetColumns.get( 0 ).id : v.getKey(), - fields.get( sourceColMapping.get( v.getKey() ) ).getType(), // TODO: get the type of the targetcolumn - (v.getValue().get( 0 ) instanceof String) ? v.getValue().stream().map( s -> ((String)s).concat( " testtestest" ) ).collect( Collectors.toList()) : v.getValue() ) ; - pos++; + if (v.getValue().get( 0 ) instanceof String) { + if( mergedValueList == null ) { + mergedValueList = v.getValue(); + } else { + int j = 0; + for (Object value : mergedValueList) { + mergedValueList.set( j, ( (String) value).concat( " " + v.getValue().get( j++ ) ) ); + } + } + } + } + targetStatement.getDataContext().addParameterValues(targetColumn.id, targetColumn.getAlgDataType( typeFactory ) , mergedValueList ); + + for ( CatalogColumn primaryKey : primaryKeyColumns ) { + AlgDataType primaryKeyAlgDataType = primaryKey.getAlgDataType( typeFactory ); + List primaryKeyValues = values.get( primaryKey.id ); + targetStatement.getDataContext().addParameterValues(primaryKey.id, primaryKeyAlgDataType , primaryKeyValues ); } Iterator iterator = targetStatement.getQueryProcessor() @@ -463,52 +461,6 @@ private AlgRoot buildUpdateStatement( Statement statement, List qualifiedTableName = ImmutableList.of( - PolySchemaBuilder.buildAdapterSchemaName( - to.adapterUniqueName, - to.getLogicalSchemaName(), - to.physicalSchemaName ), - to.getLogicalTableName() + "_" + partitionId ); - AlgOptTable physical = statement.getTransaction().getCatalogReader().getTableForMember( qualifiedTableName ); - ModifiableTable modifiableTable = physical.unwrap( ModifiableTable.class ); - - AlgOptCluster cluster = AlgOptCluster.create( - statement.getQueryProcessor().getPlanner(), - new RexBuilder( statement.getTransaction().getTypeFactory() ) ); - AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); - - AlgBuilder builder = AlgBuilder.create( statement, cluster ); - builder.scan( qualifiedTableName ); - - List columnNames = new LinkedList<>(); - List values = new LinkedList<>(); - - CatalogColumn catalogColumn = Catalog.getInstance().getColumn( to.columnId ); - columnNames.add( to.getLogicalColumnName() ); - values.add( new RexDynamicParam( catalogColumn.getAlgDataType( typeFactory ), (int) catalogColumn.id ) ); - builder.projectPlus( values ); AlgNode node = modifiableTable.toModificationAlg( @@ -530,7 +482,6 @@ private AlgRoot buildUpdateStatementForMerge( Statement statement, CatalogColumn return algRoot.withAlg( typeFlattener.rewrite( algRoot.alg ) ); } - @Override public AlgRoot getSourceIterator( Statement statement, Map> placementDistribution ) { @@ -844,6 +795,7 @@ public void mergeColumns( Transaction transaction, CatalogAdapter store, List selectColumnList = new LinkedList<>( sourceColumns ); + List primaryKeyList = new LinkedList<>( ); // Add primary keys to select column list for ( long cid : primaryKey.columnIds ) { @@ -851,6 +803,7 @@ public void mergeColumns( Transaction transaction, CatalogAdapter store, List> sourceColumnPlacements = new HashMap<>(); @@ -869,7 +822,7 @@ public void mergeColumns( Transaction transaction, CatalogAdapter store, List Date: Thu, 22 Sep 2022 18:34:00 +0200 Subject: [PATCH 05/26] merge-columns removing of the old columns added --- dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index d0468c7e8f..0b3074c168 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -616,6 +616,10 @@ public void mergeColumns( CatalogTable catalogTable, List columnNamesToM AdapterManager.getInstance().getStore( store.getAdapterId() ).addColumn( statement.getPrepareContext(), catalogTable, addedColumn ); // Call migrator dataMigrator.mergeColumns( statement.getTransaction(), catalog.getAdapter( store.getAdapterId() ), sourceCatalogColumns, targetCatalogColumn); + + for ( CatalogColumn sourceCatalogColumn : sourceCatalogColumns ) { + catalog.deleteColumn( sourceCatalogColumn.id ); + } } // Reset plan cache implementation cache & routing cache From d7e5223f6ba656736e590ff340e638d41826b4bf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Attila=20S=C3=BCle?= Date: Fri, 23 Sep 2022 16:43:42 +0200 Subject: [PATCH 06/26] merge-columns variables renamed, docs added --- .../java/org/polypheny/db/ddl/DdlManager.java | 12 ++++++++++-- .../polypheny/db/processing/DataMigrator.java | 18 ++++++++++++------ .../org/polypheny/db/ddl/DdlManagerImpl.java | 14 +++++++------- .../main/java/org/polypheny/db/webui/Crud.java | 8 ++++---- .../models/requests/MergeColumnsRequest.java | 4 ++-- 5 files changed, 35 insertions(+), 21 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java index c55d531656..30fcffb8b1 100644 --- a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java +++ b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java @@ -204,8 +204,16 @@ public static DdlManager getInstance() { */ public abstract void addForeignKey( CatalogTable catalogTable, CatalogTable refTable, List columnNames, List refColumnNames, String constraintName, ForeignKeyOption onUpdate, ForeignKeyOption onDelete ) throws UnknownColumnException, GenericCatalogException; - // TODO: add comments - public abstract void mergeColumns( CatalogTable catalogTable, List columnNamesToMerge, String newColumnName, ColumnTypeInformation type, Statement statement ) throws UnknownColumnException, ColumnAlreadyExistsException, ColumnNotExistsException; + /** + * Merge multiple columns into one new column + * + * @param catalogTable the table + * @param sourceColumnNames name of the columns to be merged + * @param targetColumnName name of the new column to be added + * @param type the SQL data type specification of the merged column + * @param statement the initial query statement + */ + public abstract void mergeColumns( CatalogTable catalogTable, List sourceColumnNames, String targetColumnName, ColumnTypeInformation type, Statement statement ) throws UnknownColumnException, ColumnAlreadyExistsException, ColumnNotExistsException; /** * Adds an index to a table diff --git a/core/src/main/java/org/polypheny/db/processing/DataMigrator.java b/core/src/main/java/org/polypheny/db/processing/DataMigrator.java index 664f923c16..57c0aaf7c4 100644 --- a/core/src/main/java/org/polypheny/db/processing/DataMigrator.java +++ b/core/src/main/java/org/polypheny/db/processing/DataMigrator.java @@ -75,17 +75,23 @@ void copyPartitionData( List sourcePartitionIds, List targetPartitionIds ); - void mergeColumns( - Transaction transaction, - CatalogAdapter store, - List sourceColumns, - CatalogColumn targetColumn); + /** + * Currently used to to transfer data if unpartitioned is about to be partitioned. + * For Table Merge use {@link #copySelectiveData(Transaction, CatalogAdapter, CatalogTable, CatalogTable, List, Map, List)} } instead + * + * @param transaction Transactional scope + * @param store Target Store where data should be migrated to + * @param sourceColumns Columns to be merged + * @param targetColumn New column to be added + */ + void mergeColumns( Transaction transaction, CatalogAdapter store, List sourceColumns, CatalogColumn targetColumn ); AlgRoot buildInsertStatement( Statement statement, List to, long partitionId ); - //is used within copyData + // is used within copyData void executeQuery( List columns, AlgRoot sourceRel, Statement sourceStatement, Statement targetStatement, AlgRoot targetRel, boolean isMaterializedView, boolean doesSubstituteOrderBy ); + // is used within mergeColumns void executeMergeQuery( List primaryKeyColumns, List sourceColumns, CatalogColumn targetColumn, AlgRoot sourceRel, Statement sourceStatement, Statement targetStatement, AlgRoot targetRel, boolean isMaterializedView, boolean doesSubstituteOrderBy ); AlgRoot buildDeleteStatement( Statement statement, List to, long partitionId ); diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 0b3074c168..f8799aa221 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -564,17 +564,17 @@ public void addForeignKey( CatalogTable catalogTable, CatalogTable refTable, Lis catalog.addForeignKey( catalogTable.id, columnIds, refTable.id, referencesIds, constraintName, onUpdate, onDelete ); } - public void mergeColumns( CatalogTable catalogTable, List columnNamesToMerge, String newColumnName, ColumnTypeInformation type, Statement statement ) throws UnknownColumnException, ColumnAlreadyExistsException, ColumnNotExistsException { + public void mergeColumns( CatalogTable catalogTable, List sourceColumnNames, String targetColumnName, ColumnTypeInformation type, Statement statement ) throws UnknownColumnException, ColumnAlreadyExistsException, ColumnNotExistsException { - if ( catalog.checkIfExistsColumn( catalogTable.id, newColumnName ) ) { - throw new ColumnAlreadyExistsException( newColumnName, catalogTable.name ); + if ( catalog.checkIfExistsColumn( catalogTable.id, targetColumnName ) ) { + throw new ColumnAlreadyExistsException( targetColumnName, catalogTable.name ); } - CatalogColumn afterColumn = getCatalogColumn( catalogTable.id, columnNamesToMerge.get( columnNamesToMerge.size()-1 ) ); + CatalogColumn afterColumn = getCatalogColumn( catalogTable.id, sourceColumnNames.get( sourceColumnNames.size()-1 ) ); int position = updateAdjacentPositions( catalogTable, null, afterColumn ); long columnId = catalog.addColumn( - newColumnName, + targetColumnName, catalogTable.id, position, type.type, @@ -598,10 +598,10 @@ public void mergeColumns( CatalogTable catalogTable, List columnNamesToM // Build catalog columns List sourceCatalogColumns = new LinkedList<>(); - for ( String columnName : columnNamesToMerge ) { + for ( String columnName : sourceColumnNames ) { sourceCatalogColumns.add( catalog.getColumn( catalogTable.id, columnName ) ); } - CatalogColumn targetCatalogColumn = catalog.getColumn( catalogTable.id, newColumnName ); + CatalogColumn targetCatalogColumn = catalog.getColumn( catalogTable.id, targetColumnName ); // Add column on underlying data stores and insert default value for ( DataStore store : stores ) { diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index 65d2689def..da8f87a0ea 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -1761,9 +1761,9 @@ void mergeColumns( final Context ctx ) { String[] t = request.tableId.split( "\\." ); String tableId = String.format( "\"%s\".\"%s\"", t[0], t[1] ); - boolean nullable = Arrays.stream( request.columnsToMerge ).allMatch( c -> c.nullable ); - Integer precison = Arrays.stream( request.columnsToMerge ).mapToInt( c -> c.precision ).sum(); - DbColumn newColumn = new DbColumn(request.newColumnName, "varchar", nullable, precison, null, null); + boolean nullable = Arrays.stream( request.sourceColumns ).allMatch( c -> c.nullable ); + Integer precison = Arrays.stream( request.sourceColumns ).mapToInt( c -> c.precision ).sum(); + DbColumn newColumn = new DbColumn(request.targetColumnName, "varchar", nullable, precison, null, null); newColumn.collectionsType = ""; String as = ""; @@ -1775,7 +1775,7 @@ void mergeColumns( final Context ctx ) { } String listOfColumnsToMerge = - Arrays.stream( request.columnsToMerge ) + Arrays.stream( request.sourceColumns ) .map( s -> "\"" + s.name + "\"") .collect( Collectors.joining(", ")); String query = String.format( "ALTER TABLE %s MERGE COLUMNS (%s) IN \"%s\" %s %s", diff --git a/webui/src/main/java/org/polypheny/db/webui/models/requests/MergeColumnsRequest.java b/webui/src/main/java/org/polypheny/db/webui/models/requests/MergeColumnsRequest.java index bdc675f512..7eb407e0ae 100644 --- a/webui/src/main/java/org/polypheny/db/webui/models/requests/MergeColumnsRequest.java +++ b/webui/src/main/java/org/polypheny/db/webui/models/requests/MergeColumnsRequest.java @@ -22,8 +22,8 @@ public class MergeColumnsRequest extends UIRequest { - public DbColumn[] columnsToMerge; - public String newColumnName; + public DbColumn[] sourceColumns; + public String targetColumnName; // for data sources public String tableType; From ad7732e232a1bd0e2d15548dbd37b1e20eb29a01 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Attila=20S=C3=BCle?= Date: Fri, 23 Sep 2022 19:09:47 +0200 Subject: [PATCH 07/26] merge-columns default and nullable are working properly --- .../java/org/polypheny/db/ddl/DdlManager.java | 4 +- .../org/polypheny/db/ddl/DdlManagerImpl.java | 7 ++-- .../src/main/codegen/includes/parserImpls.ftl | 17 +++++++- .../altertable/SqlAlterTableMergeColumns.java | 21 ++++++++-- .../java/org/polypheny/db/webui/Crud.java | 39 +++++-------------- 5 files changed, 50 insertions(+), 38 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java index 30fcffb8b1..b44c7c8a4f 100644 --- a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java +++ b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java @@ -211,9 +211,11 @@ public static DdlManager getInstance() { * @param sourceColumnNames name of the columns to be merged * @param targetColumnName name of the new column to be added * @param type the SQL data type specification of the merged column + * @param nullable if the merged column should be nullable + * @param defaultValue the new default value of the merged column * @param statement the initial query statement */ - public abstract void mergeColumns( CatalogTable catalogTable, List sourceColumnNames, String targetColumnName, ColumnTypeInformation type, Statement statement ) throws UnknownColumnException, ColumnAlreadyExistsException, ColumnNotExistsException; + public abstract void mergeColumns( CatalogTable catalogTable, List sourceColumnNames, String targetColumnName, ColumnTypeInformation type, boolean nullable, String defaultValue, Statement statement ) throws UnknownColumnException, ColumnAlreadyExistsException, ColumnNotExistsException; /** * Adds an index to a table diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index f8799aa221..be5ee89bad 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -564,7 +564,7 @@ public void addForeignKey( CatalogTable catalogTable, CatalogTable refTable, Lis catalog.addForeignKey( catalogTable.id, columnIds, refTable.id, referencesIds, constraintName, onUpdate, onDelete ); } - public void mergeColumns( CatalogTable catalogTable, List sourceColumnNames, String targetColumnName, ColumnTypeInformation type, Statement statement ) throws UnknownColumnException, ColumnAlreadyExistsException, ColumnNotExistsException { + public void mergeColumns( CatalogTable catalogTable, List sourceColumnNames, String targetColumnName, ColumnTypeInformation type, boolean nullable, String defaultValue, Statement statement ) throws UnknownColumnException, ColumnAlreadyExistsException, ColumnNotExistsException { if ( catalog.checkIfExistsColumn( catalogTable.id, targetColumnName ) ) { throw new ColumnAlreadyExistsException( targetColumnName, catalogTable.name ); @@ -583,13 +583,12 @@ public void mergeColumns( CatalogTable catalogTable, List sourceColumnNa type.scale, type.dimension, type.cardinality, - true, // TODO: value is missing + nullable, Collation.getDefaultCollation() ); - // TODO: get DEFAULT from parameter // Add default value - addDefaultValue( "DEFAULT", columnId ); + addDefaultValue( defaultValue, columnId ); CatalogColumn addedColumn = catalog.getColumn ( columnId ); // Ask router on which stores this column shall be placed diff --git a/sql-language/src/main/codegen/includes/parserImpls.ftl b/sql-language/src/main/codegen/includes/parserImpls.ftl index 1c4732e32a..cf7c3aaea9 100644 --- a/sql-language/src/main/codegen/includes/parserImpls.ftl +++ b/sql-language/src/main/codegen/includes/parserImpls.ftl @@ -657,8 +657,23 @@ SqlAlterTable SqlAlterTable(Span s) : name = SimpleIdentifier() type = DataType() + ( + { nullable = true; } + | + { nullable = false; } + | + { nullable = true; } + ) + ( + + defaultValue = Literal() + | + defaultValue = ArrayConstructor() + | + { defaultValue = null; } + ) { - return new SqlAlterTableMergeColumns(s.end(this), table, columnList, name, type); + return new SqlAlterTableMergeColumns(s.end(this), table, columnList, name, type, nullable, defaultValue); } ) } diff --git a/sql-language/src/main/java/org/polypheny/db/sql/sql/ddl/altertable/SqlAlterTableMergeColumns.java b/sql-language/src/main/java/org/polypheny/db/sql/sql/ddl/altertable/SqlAlterTableMergeColumns.java index 4a897466b5..199e06ecbb 100644 --- a/sql-language/src/main/java/org/polypheny/db/sql/sql/ddl/altertable/SqlAlterTableMergeColumns.java +++ b/sql-language/src/main/java/org/polypheny/db/sql/sql/ddl/altertable/SqlAlterTableMergeColumns.java @@ -55,20 +55,25 @@ public class SqlAlterTableMergeColumns extends SqlAlterTable { private final SqlIdentifier table; private final SqlNodeList columnsToMerge; private final SqlIdentifier newColumnName; // Can be null - private final SqlDataTypeSpec type; + private final boolean nullable; + private final SqlNode defaultValue; public SqlAlterTableMergeColumns( ParserPos pos, SqlIdentifier table, SqlNodeList columnsToMerge, SqlIdentifier newColumnName, - SqlDataTypeSpec type ) { + SqlDataTypeSpec type, + boolean nullable, + SqlNode defaultValue) { super( pos ); this.table = Objects.requireNonNull( table ); this.columnsToMerge = columnsToMerge; this.newColumnName = newColumnName; this.type = type; + this.nullable = nullable; + this.defaultValue = defaultValue; } @@ -92,6 +97,12 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { writer.keyword( "MERGE" ); writer.keyword( "COLUMNS" ); columnsToMerge.unparse( writer, leftPrec, rightPrec ); + writer.keyword( "AS" ); + columnsToMerge.unparse( writer, leftPrec, rightPrec ); + if ( defaultValue != null ) { + writer.keyword( "DEFAULT" ); + defaultValue.unparse( writer, leftPrec, rightPrec ); + } if ( newColumnName != null ) { writer.keyword( "AFTER" ); newColumnName.unparse( writer, leftPrec, rightPrec ); @@ -112,13 +123,17 @@ public void execute( Context context, Statement statement, QueryParameters param getDataStoreInstance( storeId ); } + String defaultValue = this.defaultValue == null ? null : this.defaultValue.toString(); + try { DdlManager.getInstance().mergeColumns( catalogTable, columnsToMerge.getList().stream().map( Node::toString ).collect( Collectors.toList()), newColumnName.getSimple(), ColumnTypeInformation.fromDataTypeSpec( type ), - statement ); + nullable, + defaultValue, + statement); } catch ( UnknownColumnException e ) { throw CoreUtil.newContextException( columnsToMerge.getPos(), RESOURCE.columnNotFound( e.getColumnName() ) ); } catch ( ColumnAlreadyExistsException e ) { diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index da8f87a0ea..4902274076 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -1800,36 +1800,17 @@ void mergeColumns( final Context ctx ) { query = query + " NOT NULL"; } } - // TODO: merge the DEFAULT values too.(?) - if ( newColumn.defaultValue != null && !newColumn.defaultValue.equals( "" ) ) { - query = query + " DEFAULT "; - if ( newColumn.collectionsType != null && !newColumn.collectionsType.equals( "" ) ) { - //handle the case if the user says "ARRAY[1,2,3]" or "[1,2,3]" - if ( !newColumn.defaultValue.startsWith( newColumn.collectionsType ) ) { - query = query + newColumn.collectionsType; - } - query = query + newColumn.defaultValue; - } else { - switch ( newColumn.dataType ) { - case "BIGINT": - case "INTEGER": - case "SMALLINT": - case "TINYINT": - case "FLOAT": - case "DOUBLE": - case "DECIMAL": - newColumn.defaultValue = newColumn.defaultValue.replace( ",", "." ); - BigDecimal b = new BigDecimal( newColumn.defaultValue ); - query = query + b.toString(); - break; - case "VARCHAR": - query = query + String.format( "'%s'", newColumn.defaultValue ); - break; - default: - query = query + newColumn.defaultValue; - } - } + + String defaultValue = Arrays + .stream( request.sourceColumns ) + .map( c -> c.defaultValue ) + .filter(s -> s != null && !s.isEmpty()) + .collect( Collectors.joining(" ")); + + if ( defaultValue != null && !defaultValue.equals( "" ) ) { + query = query + " DEFAULT " + String.format( "'%s'", defaultValue ); } + Result result; try { int affectedRows = executeSqlUpdate( transaction, query ); From 10421b370ab8ec65134b714e5c659dd908774911 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Attila=20S=C3=BCle?= Date: Tue, 1 Nov 2022 12:27:42 +0100 Subject: [PATCH 08/26] joinString added for MergeColumnsRequest --- .../polypheny/db/webui/models/requests/MergeColumnsRequest.java | 1 + 1 file changed, 1 insertion(+) diff --git a/webui/src/main/java/org/polypheny/db/webui/models/requests/MergeColumnsRequest.java b/webui/src/main/java/org/polypheny/db/webui/models/requests/MergeColumnsRequest.java index 7eb407e0ae..75dfcabc92 100644 --- a/webui/src/main/java/org/polypheny/db/webui/models/requests/MergeColumnsRequest.java +++ b/webui/src/main/java/org/polypheny/db/webui/models/requests/MergeColumnsRequest.java @@ -23,6 +23,7 @@ public class MergeColumnsRequest extends UIRequest { public DbColumn[] sourceColumns; + public String joinString; public String targetColumnName; // for data sources public String tableType; From 99adc413c8bdbc6b7f8a3fbe50e99e890a8cdda6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Attila=20S=C3=BCle?= Date: Tue, 1 Nov 2022 17:07:00 +0100 Subject: [PATCH 09/26] respect the order of columns to merge --- .../java/org/polypheny/db/ddl/DdlManager.java | 5 ++-- .../polypheny/db/processing/DataMigrator.java | 13 +++++----- .../org/polypheny/db/ddl/DdlManagerImpl.java | 18 +++++++++----- .../db/processing/DataMigratorImpl.java | 24 +++++++------------ .../src/main/codegen/includes/parserImpls.ftl | 7 ++++-- .../altertable/SqlAlterTableMergeColumns.java | 20 +++++++++------- .../java/org/polypheny/db/webui/Crud.java | 4 ++-- .../models/requests/MergeColumnsRequest.java | 2 +- 8 files changed, 50 insertions(+), 43 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java index b44c7c8a4f..b9a7f87129 100644 --- a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java +++ b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java @@ -209,13 +209,14 @@ public static DdlManager getInstance() { * * @param catalogTable the table * @param sourceColumnNames name of the columns to be merged - * @param targetColumnName name of the new column to be added + * @param newColumnName name of the new column to be added + * @param joinString the string to place between the values * @param type the SQL data type specification of the merged column * @param nullable if the merged column should be nullable * @param defaultValue the new default value of the merged column * @param statement the initial query statement */ - public abstract void mergeColumns( CatalogTable catalogTable, List sourceColumnNames, String targetColumnName, ColumnTypeInformation type, boolean nullable, String defaultValue, Statement statement ) throws UnknownColumnException, ColumnAlreadyExistsException, ColumnNotExistsException; + public abstract void mergeColumns(CatalogTable catalogTable, List sourceColumnNames, String newColumnName, String joinString, ColumnTypeInformation type, boolean nullable, String defaultValue, Statement statement ) throws UnknownColumnException, ColumnAlreadyExistsException, ColumnNotExistsException; /** * Adds an index to a table diff --git a/core/src/main/java/org/polypheny/db/processing/DataMigrator.java b/core/src/main/java/org/polypheny/db/processing/DataMigrator.java index 57c0aaf7c4..fd662cc2f7 100644 --- a/core/src/main/java/org/polypheny/db/processing/DataMigrator.java +++ b/core/src/main/java/org/polypheny/db/processing/DataMigrator.java @@ -76,15 +76,16 @@ void copyPartitionData( List targetPartitionIds ); /** - * Currently used to to transfer data if unpartitioned is about to be partitioned. + * Currently used to transfer data if unpartitioned is about to be partitioned. * For Table Merge use {@link #copySelectiveData(Transaction, CatalogAdapter, CatalogTable, CatalogTable, List, Map, List)} } instead * - * @param transaction Transactional scope - * @param store Target Store where data should be migrated to + * @param transaction Transactional scope + * @param store Target Store where data should be migrated to * @param sourceColumns Columns to be merged - * @param targetColumn New column to be added + * @param targetColumn New column to be added + * @param joinString */ - void mergeColumns( Transaction transaction, CatalogAdapter store, List sourceColumns, CatalogColumn targetColumn ); + void mergeColumns(Transaction transaction, CatalogAdapter store, List sourceColumns, CatalogColumn targetColumn, String joinString); AlgRoot buildInsertStatement( Statement statement, List to, long partitionId ); @@ -92,7 +93,7 @@ void copyPartitionData( void executeQuery( List columns, AlgRoot sourceRel, Statement sourceStatement, Statement targetStatement, AlgRoot targetRel, boolean isMaterializedView, boolean doesSubstituteOrderBy ); // is used within mergeColumns - void executeMergeQuery( List primaryKeyColumns, List sourceColumns, CatalogColumn targetColumn, AlgRoot sourceRel, Statement sourceStatement, Statement targetStatement, AlgRoot targetRel, boolean isMaterializedView, boolean doesSubstituteOrderBy ); + void executeMergeQuery( List primaryKeyColumns, List sourceColumns, CatalogColumn targetColumn, String joinString, AlgRoot sourceRel, Statement sourceStatement, Statement targetStatement, AlgRoot targetRel, boolean isMaterializedView, boolean doesSubstituteOrderBy ); AlgRoot buildDeleteStatement( Statement statement, List to, long partitionId ); diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index be5ee89bad..862cb05bab 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -125,6 +125,7 @@ import org.polypheny.db.schema.LogicalTable; import org.polypheny.db.schema.LogicalView; import org.polypheny.db.schema.PolySchemaBuilder; +import org.polypheny.db.sql.sql.SqlNode; import org.polypheny.db.transaction.Statement; import org.polypheny.db.transaction.TransactionException; import org.polypheny.db.type.ArrayType; @@ -564,17 +565,17 @@ public void addForeignKey( CatalogTable catalogTable, CatalogTable refTable, Lis catalog.addForeignKey( catalogTable.id, columnIds, refTable.id, referencesIds, constraintName, onUpdate, onDelete ); } - public void mergeColumns( CatalogTable catalogTable, List sourceColumnNames, String targetColumnName, ColumnTypeInformation type, boolean nullable, String defaultValue, Statement statement ) throws UnknownColumnException, ColumnAlreadyExistsException, ColumnNotExistsException { + public void mergeColumns(CatalogTable catalogTable, List sourceColumnNames, String newColumnName, String joinString, ColumnTypeInformation type, boolean nullable, String defaultValue, Statement statement ) throws UnknownColumnException, ColumnAlreadyExistsException, ColumnNotExistsException { - if ( catalog.checkIfExistsColumn( catalogTable.id, targetColumnName ) ) { - throw new ColumnAlreadyExistsException( targetColumnName, catalogTable.name ); + if ( catalog.checkIfExistsColumn( catalogTable.id, newColumnName) ) { + throw new ColumnAlreadyExistsException(newColumnName, catalogTable.name ); } CatalogColumn afterColumn = getCatalogColumn( catalogTable.id, sourceColumnNames.get( sourceColumnNames.size()-1 ) ); int position = updateAdjacentPositions( catalogTable, null, afterColumn ); long columnId = catalog.addColumn( - targetColumnName, + newColumnName, catalogTable.id, position, type.type, @@ -591,6 +592,11 @@ public void mergeColumns( CatalogTable catalogTable, List sourceColumnNa addDefaultValue( defaultValue, columnId ); CatalogColumn addedColumn = catalog.getColumn ( columnId ); + // Remove quotes from joinString + if ( joinString.startsWith( "'" ) ) { + joinString = joinString.substring( 1, joinString.length() - 1 ); + } + // Ask router on which stores this column shall be placed List stores = RoutingManager.getInstance().getCreatePlacementStrategy().getDataStoresForNewColumn( addedColumn ); DataMigrator dataMigrator = statement.getTransaction().getDataMigrator(); @@ -600,7 +606,7 @@ public void mergeColumns( CatalogTable catalogTable, List sourceColumnNa for ( String columnName : sourceColumnNames ) { sourceCatalogColumns.add( catalog.getColumn( catalogTable.id, columnName ) ); } - CatalogColumn targetCatalogColumn = catalog.getColumn( catalogTable.id, targetColumnName ); + CatalogColumn targetCatalogColumn = catalog.getColumn( catalogTable.id, newColumnName); // Add column on underlying data stores and insert default value for ( DataStore store : stores ) { @@ -614,7 +620,7 @@ public void mergeColumns( CatalogTable catalogTable, List sourceColumnNa );//Not a valid partitionID --> placeholder AdapterManager.getInstance().getStore( store.getAdapterId() ).addColumn( statement.getPrepareContext(), catalogTable, addedColumn ); // Call migrator - dataMigrator.mergeColumns( statement.getTransaction(), catalog.getAdapter( store.getAdapterId() ), sourceCatalogColumns, targetCatalogColumn); + dataMigrator.mergeColumns( statement.getTransaction(), catalog.getAdapter( store.getAdapterId() ), sourceCatalogColumns, targetCatalogColumn, joinString); for ( CatalogColumn sourceCatalogColumn : sourceCatalogColumns ) { catalog.deleteColumn( sourceCatalogColumn.id ); diff --git a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java index 5b767e9314..03ec8e483f 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java +++ b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java @@ -17,17 +17,9 @@ package org.polypheny.db.processing; import com.google.common.collect.ImmutableList; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashMap; -import java.util.Iterator; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; + +import java.util.*; import java.util.Map.Entry; -import java.util.NoSuchElementException; import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; import org.apache.calcite.avatica.MetaImpl; @@ -223,7 +215,7 @@ public void executeQuery( List selectColumnList, AlgRoot sourceAl @Override - public void executeMergeQuery(List primaryKeyColumns, List sourceColumns, CatalogColumn targetColumn, AlgRoot sourceAlg, Statement sourceStatement, Statement targetStatement, AlgRoot targetAlg, boolean isMaterializedView, boolean doesSubstituteOrderBy ) { + public void executeMergeQuery(List primaryKeyColumns, List sourceColumns, CatalogColumn targetColumn, String joinString, AlgRoot sourceAlg, Statement sourceStatement, Statement targetStatement, AlgRoot targetAlg, boolean isMaterializedView, boolean doesSubstituteOrderBy ) { try { PolyResult result; if ( isMaterializedView ) { @@ -245,7 +237,7 @@ public void executeMergeQuery(List primaryKeyColumns, List sourceIterator = enumerable.iterator(); - Map sourceColMapping = new HashMap<>(); + Map sourceColMapping = new LinkedHashMap<>(); for ( CatalogColumn catalogColumn : sourceColumns ) { int i = 0; for ( AlgDataTypeField metaData : result.getRowType().getFieldList() ) { @@ -269,7 +261,7 @@ public void executeMergeQuery(List primaryKeyColumns, List> rows = MetaImpl.collect( result.getCursorFactory(), LimitIterator.of( sourceIterator, batchSize ), new ArrayList<>() ); - Map> values = new HashMap<>(); + Map> values = new LinkedHashMap<>(); for ( List list : rows ) { for ( Map.Entry entry : sourceColMapping.entrySet() ) { @@ -299,7 +291,7 @@ public void executeMergeQuery(List primaryKeyColumns, List sourceColumns, CatalogColumn targetColumn ) { + public void mergeColumns(Transaction transaction, CatalogAdapter store, List sourceColumns, CatalogColumn targetColumn, String joinString) { CatalogTable table = Catalog.getInstance().getTable( sourceColumns.get( 0 ).tableId ); CatalogPrimaryKey primaryKey = Catalog.getInstance().getPrimaryKey( table.primaryKey ); @@ -822,7 +814,7 @@ public void mergeColumns( Transaction transaction, CatalogAdapter store, List partitionList = new ArrayList(); int partitionIndex = 0; int numPartitionGroups = 0; @@ -654,8 +655,10 @@ SqlAlterTable SqlAlterTable(Span s) : | columnList = ParenthesizedSimpleIdentifierList() - + name = SimpleIdentifier() + + joinString = Literal() type = DataType() ( { nullable = true; } @@ -673,7 +676,7 @@ SqlAlterTable SqlAlterTable(Span s) : { defaultValue = null; } ) { - return new SqlAlterTableMergeColumns(s.end(this), table, columnList, name, type, nullable, defaultValue); + return new SqlAlterTableMergeColumns(s.end(this), table, columnList, name, joinString, type, nullable, defaultValue); } ) } diff --git a/sql-language/src/main/java/org/polypheny/db/sql/sql/ddl/altertable/SqlAlterTableMergeColumns.java b/sql-language/src/main/java/org/polypheny/db/sql/sql/ddl/altertable/SqlAlterTableMergeColumns.java index 199e06ecbb..014301bacf 100644 --- a/sql-language/src/main/java/org/polypheny/db/sql/sql/ddl/altertable/SqlAlterTableMergeColumns.java +++ b/sql-language/src/main/java/org/polypheny/db/sql/sql/ddl/altertable/SqlAlterTableMergeColumns.java @@ -30,7 +30,6 @@ import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.DdlManager.ColumnTypeInformation; import org.polypheny.db.ddl.exception.ColumnNotExistsException; -import org.polypheny.db.ddl.exception.NotNullAndDefaultValueException; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.languages.QueryParameters; import org.polypheny.db.nodes.Node; @@ -54,7 +53,8 @@ public class SqlAlterTableMergeColumns extends SqlAlterTable { private final SqlIdentifier table; private final SqlNodeList columnsToMerge; - private final SqlIdentifier newColumnName; // Can be null + private final SqlIdentifier targetColumnName; + private final SqlNode joinString; private final SqlDataTypeSpec type; private final boolean nullable; private final SqlNode defaultValue; @@ -63,14 +63,16 @@ public SqlAlterTableMergeColumns( ParserPos pos, SqlIdentifier table, SqlNodeList columnsToMerge, - SqlIdentifier newColumnName, + SqlIdentifier targetColumnName, + SqlNode joinString, SqlDataTypeSpec type, boolean nullable, SqlNode defaultValue) { super( pos ); this.table = Objects.requireNonNull( table ); this.columnsToMerge = columnsToMerge; - this.newColumnName = newColumnName; + this.targetColumnName = targetColumnName; + this.joinString = joinString; this.type = type; this.nullable = nullable; this.defaultValue = defaultValue; @@ -103,9 +105,9 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { writer.keyword( "DEFAULT" ); defaultValue.unparse( writer, leftPrec, rightPrec ); } - if ( newColumnName != null ) { + if ( targetColumnName != null ) { writer.keyword( "AFTER" ); - newColumnName.unparse( writer, leftPrec, rightPrec ); + targetColumnName.unparse( writer, leftPrec, rightPrec ); } } @@ -124,12 +126,14 @@ public void execute( Context context, Statement statement, QueryParameters param } String defaultValue = this.defaultValue == null ? null : this.defaultValue.toString(); + String joinString = this.joinString == null ? "" : this.joinString.toString(); try { DdlManager.getInstance().mergeColumns( catalogTable, columnsToMerge.getList().stream().map( Node::toString ).collect( Collectors.toList()), - newColumnName.getSimple(), + targetColumnName.getSimple(), + joinString, ColumnTypeInformation.fromDataTypeSpec( type ), nullable, defaultValue, @@ -137,7 +141,7 @@ public void execute( Context context, Statement statement, QueryParameters param } catch ( UnknownColumnException e ) { throw CoreUtil.newContextException( columnsToMerge.getPos(), RESOURCE.columnNotFound( e.getColumnName() ) ); } catch ( ColumnAlreadyExistsException e ) { - throw CoreUtil.newContextException( newColumnName.getPos(), RESOURCE.columnExists( newColumnName.getSimple() ) ); + throw CoreUtil.newContextException( targetColumnName.getPos(), RESOURCE.columnExists( targetColumnName.getSimple() ) ); } catch ( ColumnNotExistsException e ) { throw CoreUtil.newContextException( table.getPos(), RESOURCE.columnNotFoundInTable( e.columnName, e.tableName ) ); } diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index 4902274076..5ba45ad083 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -1778,8 +1778,8 @@ void mergeColumns( final Context ctx ) { Arrays.stream( request.sourceColumns ) .map( s -> "\"" + s.name + "\"") .collect( Collectors.joining(", ")); - String query = String.format( "ALTER TABLE %s MERGE COLUMNS (%s) IN \"%s\" %s %s", - tableId, listOfColumnsToMerge, newColumn.name, as, dataType ); + String query = String.format( "ALTER TABLE %s MERGE COLUMNS (%s) INTO \"%s\" WITH '%s' %s %s", + tableId, listOfColumnsToMerge, newColumn.name, request.joinString, as, dataType ); //we don't want precision, scale etc. for source columns if ( newColumn.as == null ) { diff --git a/webui/src/main/java/org/polypheny/db/webui/models/requests/MergeColumnsRequest.java b/webui/src/main/java/org/polypheny/db/webui/models/requests/MergeColumnsRequest.java index 75dfcabc92..2bf4aaf41c 100644 --- a/webui/src/main/java/org/polypheny/db/webui/models/requests/MergeColumnsRequest.java +++ b/webui/src/main/java/org/polypheny/db/webui/models/requests/MergeColumnsRequest.java @@ -23,8 +23,8 @@ public class MergeColumnsRequest extends UIRequest { public DbColumn[] sourceColumns; - public String joinString; public String targetColumnName; + public String joinString; // for data sources public String tableType; From d8fef697f1644e51cb72c3bc0f7c9d78e4b9a6a6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Attila=20S=C3=BCle?= Date: Wed, 9 Nov 2022 19:06:09 +0100 Subject: [PATCH 10/26] Merge from the Master --- .../org/polypheny/db/ddl/DdlManagerImpl.java | 1 - .../db/processing/DataMigratorImpl.java | 19 ++++--------------- .../altertable/SqlAlterTableMergeColumns.java | 14 +++++--------- .../java/org/polypheny/db/webui/Crud.java | 2 +- 4 files changed, 10 insertions(+), 26 deletions(-) diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 40dbf69a86..ad203df29d 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -133,7 +133,6 @@ import org.polypheny.db.runtime.PolyphenyDbException; import org.polypheny.db.schema.LogicalTable; import org.polypheny.db.schema.PolySchemaBuilder; -import org.polypheny.db.sql.sql.SqlNode; import org.polypheny.db.transaction.Statement; import org.polypheny.db.transaction.TransactionException; import org.polypheny.db.type.ArrayType; diff --git a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java index 84c3adda70..15ebf8132c 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java +++ b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java @@ -17,16 +17,9 @@ package org.polypheny.db.processing; import com.google.common.collect.ImmutableList; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashMap; -import java.util.Iterator; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; + +import java.util.*; import java.util.Map.Entry; -import java.util.NoSuchElementException; import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; import org.apache.calcite.avatica.MetaImpl; @@ -43,11 +36,7 @@ import org.polypheny.db.algebra.logical.lpg.LogicalLpgScan; import org.polypheny.db.algebra.logical.lpg.LogicalLpgValues; import org.polypheny.db.algebra.logical.relational.LogicalValues; -import org.polypheny.db.algebra.type.AlgDataTypeFactory; -import org.polypheny.db.algebra.type.AlgDataTypeField; -import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; -import org.polypheny.db.algebra.type.AlgDataTypeSystem; -import org.polypheny.db.algebra.type.AlgRecordType; +import org.polypheny.db.algebra.type.*; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogColumn; @@ -317,7 +306,7 @@ public void executeQuery( List selectColumnList, AlgRoot sourceAl @Override public void executeMergeQuery(List primaryKeyColumns, List sourceColumns, CatalogColumn targetColumn, String joinString, AlgRoot sourceAlg, Statement sourceStatement, Statement targetStatement, AlgRoot targetAlg, boolean isMaterializedView, boolean doesSubstituteOrderBy ) { try { - PolyResult result; + PolyImplementation result; if ( isMaterializedView ) { result = sourceStatement.getQueryProcessor().prepareQuery( sourceAlg, diff --git a/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableMergeColumns.java b/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableMergeColumns.java index 01094305b3..a67c976aa9 100644 --- a/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableMergeColumns.java +++ b/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableMergeColumns.java @@ -23,7 +23,7 @@ import java.util.Objects; import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; -import org.polypheny.db.catalog.Catalog.TableType; +import org.polypheny.db.catalog.Catalog.EntityType; import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.catalog.exceptions.ColumnAlreadyExistsException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; @@ -34,12 +34,8 @@ import org.polypheny.db.languages.QueryParameters; import org.polypheny.db.nodes.Node; import org.polypheny.db.prepare.Context; -import org.polypheny.db.sql.sql.SqlDataTypeSpec; -import org.polypheny.db.sql.sql.SqlIdentifier; -import org.polypheny.db.sql.sql.SqlNode; -import org.polypheny.db.sql.sql.SqlNodeList; -import org.polypheny.db.sql.sql.SqlWriter; -import org.polypheny.db.sql.sql.ddl.SqlAlterTable; +import org.polypheny.db.sql.language.*; +import org.polypheny.db.sql.language.ddl.SqlAlterTable; import org.polypheny.db.transaction.Statement; import org.polypheny.db.util.CoreUtil; import org.polypheny.db.util.ImmutableNullableList; @@ -92,7 +88,7 @@ public List getSqlOperandList() { @Override - public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { + public void unparse(SqlWriter writer, int leftPrec, int rightPrec ) { writer.keyword( "ALTER" ); writer.keyword( "TABLE" ); table.unparse( writer, leftPrec, rightPrec ); @@ -116,7 +112,7 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { public void execute( Context context, Statement statement, QueryParameters parameters ) { CatalogTable catalogTable = getCatalogTable( context, table ); - if ( catalogTable.tableType != TableType.TABLE ) { + if ( catalogTable.entityType != EntityType.ENTITY ) { throw new RuntimeException( "Not possible to use ALTER TABLE because " + catalogTable.name + " is not a table." ); } diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index cadd3ded07..71ad0c297f 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -1827,7 +1827,7 @@ void mergeColumns( final Context ctx ) { .stream( request.sourceColumns ) .map( c -> c.defaultValue ) .filter(s -> s != null && !s.isEmpty()) - .collect( Collectors.joining(" ")); + .collect( Collectors.joining(request.joinString)); if ( defaultValue != null && !defaultValue.equals( "" ) ) { query = query + " DEFAULT " + String.format( "'%s'", defaultValue ); From 4268e18287ea9054a66f25ffee39e1aaf5d5e8a0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Attila=20S=C3=BCle?= Date: Mon, 14 Nov 2022 19:39:50 +0100 Subject: [PATCH 11/26] bugfix: exclude primary key(s) from merge --- .../main/java/org/polypheny/db/processing/DataMigratorImpl.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java index 15ebf8132c..d5c60829a1 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java +++ b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java @@ -374,7 +374,7 @@ public void executeMergeQuery(List primaryKeyColumns, List mergedValueList = null; for ( Map.Entry> v : values.entrySet() ) { - if (v.getValue().get( 0 ) instanceof String) { + if ( !primaryKeyColumns.stream().map(c -> c.id).collect(Collectors.toList()).contains( v.getKey() ) ) { if( mergedValueList == null ) { mergedValueList = v.getValue(); } else { From 34ca336e0c4681d3d861af708137d2a76514c5ac Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Attila=20S=C3=BCle?= Date: Tue, 15 Nov 2022 19:22:38 +0100 Subject: [PATCH 12/26] bugfix: drop after mergecolumns fixed. dropcolumn of mongostore fixed. --- .../org/polypheny/db/ddl/DdlManagerImpl.java | 16 ++++++++++++++++ .../polypheny/db/adapter/mongodb/MongoStore.java | 2 +- 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index ad203df29d..729bfbe24c 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -655,7 +655,23 @@ public void mergeColumns(CatalogTable catalogTable, List sourceColumnNam dataMigrator.mergeColumns( statement.getTransaction(), catalog.getAdapter( store.getAdapterId() ), sourceCatalogColumns, targetCatalogColumn, joinString); for ( CatalogColumn sourceCatalogColumn : sourceCatalogColumns ) { + // Delete column from underlying data stores + for ( CatalogColumnPlacement dp : catalog.getColumnPlacementsByColumn( sourceCatalogColumn.id ) ) { + if ( catalogTable.entityType == EntityType.ENTITY ) { + AdapterManager.getInstance().getStore( dp.adapterId ).dropColumn( statement.getPrepareContext(), dp ); + } + catalog.deleteColumnPlacement( dp.adapterId, dp.columnId, true ); + } + + // Delete from catalog + List columns = catalog.getColumns( catalogTable.id ); catalog.deleteColumn( sourceCatalogColumn.id ); + if ( sourceCatalogColumn.position != columns.size() ) { + // Update position of the other columns + for ( int i = sourceCatalogColumn.position; i < columns.size(); i++ ) { + catalog.setColumnPosition( columns.get( i ).id, i ); + } + } } } diff --git a/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoStore.java b/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoStore.java index 4751d54dc4..3846dcb77a 100644 --- a/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoStore.java +++ b/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoStore.java @@ -413,7 +413,7 @@ private String getPhysicalColumnName( CatalogColumnPlacement columnPlacement ) { public void dropColumn( Context context, CatalogColumnPlacement columnPlacement ) { commitAll(); for ( CatalogPartitionPlacement partitionPlacement : catalog.getPartitionPlacementsByTableOnAdapter( columnPlacement.adapterId, columnPlacement.tableId ) ) { - Document field = new Document().append( partitionPlacement.physicalTableName, 1 ); + Document field = new Document().append( getPhysicalColumnName(columnPlacement.physicalColumnName, columnPlacement.columnId), 1 ); Document filter = new Document().append( "$unset", field ); context.getStatement().getTransaction().registerInvolvedAdapter( this ); From 9c881f4a0d077e48cf5a0341514633ac53361b40 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Attila=20S=C3=BCle?= Date: Thu, 24 Nov 2022 23:18:48 +0100 Subject: [PATCH 13/26] relocate realional is working. migrate to docustore started. --- .../org/polypheny/db/catalog/CatalogImpl.java | 87 ++++++++++++--- .../polypheny/db/algebra/constant/Kind.java | 6 +- .../org/polypheny/db/catalog/Catalog.java | 4 +- .../java/org/polypheny/db/ddl/DdlManager.java | 4 +- .../org/polypheny/db/catalog/MockCatalog.java | 7 +- .../org/polypheny/db/ddl/DdlManagerImpl.java | 77 ++++++++++++- sql-language/src/main/codegen/config.fmpp | 3 +- .../src/main/codegen/includes/parserImpls.ftl | 6 ++ .../src/main/codegen/templates/Parser.jj | 1 + .../SqlAlterSchemaTransferTable.java | 102 ++++++++++++++++++ .../java/org/polypheny/db/webui/Crud.java | 46 +++++--- .../org/polypheny/db/webui/HttpServer.java | 4 +- .../db/webui/models/TransferTableRequest.java | 26 +++++ 13 files changed, 339 insertions(+), 34 deletions(-) create mode 100644 sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterschema/SqlAlterSchemaTransferTable.java create mode 100644 webui/src/main/java/org/polypheny/db/webui/models/TransferTableRequest.java diff --git a/catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java b/catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java index 4eb7aa51ec..106b16d744 100644 --- a/catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java +++ b/catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java @@ -17,29 +17,22 @@ package org.polypheny.db.catalog; +import com.google.common.base.Predicates; +import com.google.common.collect.Collections2; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import java.io.File; import java.io.IOException; import java.sql.Timestamp; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Map; +import java.util.*; import java.util.Map.Entry; -import java.util.Objects; -import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.stream.Collectors; import java.util.stream.Stream; import lombok.Getter; import lombok.extern.slf4j.Slf4j; +import org.jetbrains.annotations.NotNull; import org.mapdb.BTreeMap; import org.mapdb.DB; import org.mapdb.DBException.SerializationError; @@ -1900,6 +1893,37 @@ public long addTable( String name, long namespaceId, int ownerId, EntityType ent return id; } + @Override + public long relocateTable(CatalogTable sourceTable, long targetNamespaceId ) { + + CatalogTable targetTable = transferCatalogTable( sourceTable, targetNamespaceId ); + synchronized ( this ) { + ImmutableList reducedSourceSchemaChildren = ImmutableList + .copyOf( Collections2.filter( schemaChildren.get( sourceTable.namespaceId ), + Predicates.not( Predicates.equalTo( sourceTable.id ) ) ) ); + ImmutableList extendedTargetSchemaChildren = new ImmutableList.Builder() + .addAll( schemaChildren.get(targetNamespaceId ) ) + .add( targetTable.id ) + .build(); + + schemaChildren.replace( sourceTable.namespaceId, reducedSourceSchemaChildren ); + schemaChildren.replace( targetNamespaceId, extendedTargetSchemaChildren ); + + tables.replace( sourceTable.id, targetTable ); + tableNames.remove( new Object[]{ sourceTable.databaseId, sourceTable.namespaceId, sourceTable.name } ); + tableNames.put( new Object[]{ targetTable.databaseId, targetNamespaceId, targetTable.name }, targetTable ); + + for( Long fieldId: sourceTable.fieldIds ) { + CatalogColumn targetCatalogColumn = transferCatalogColumn( targetNamespaceId, columns.get( fieldId ) ); + columns.replace( fieldId, targetCatalogColumn ); + columnNames.remove( new Object[]{sourceTable.databaseId, sourceTable.namespaceId, sourceTable.id, targetCatalogColumn.name } ); + columnNames.put( new Object[]{sourceTable.databaseId, targetNamespaceId, sourceTable.id, targetCatalogColumn.name }, targetCatalogColumn ); + } + } + listeners.firePropertyChange( "table", sourceTable, null ); + + return sourceTable.id; + } /** * {@inheritDoc} @@ -5480,6 +5504,45 @@ private CatalogKey getKey( long keyId ) { } } + @NotNull + private static CatalogColumn transferCatalogColumn(long targetNamespaceId, CatalogColumn sourceCatalogColumn) { + CatalogColumn targetCatalogColumn = new CatalogColumn( + sourceCatalogColumn.id, + sourceCatalogColumn.name, + sourceCatalogColumn.tableId, + targetNamespaceId, + sourceCatalogColumn.databaseId, + sourceCatalogColumn.position, + sourceCatalogColumn.type, + sourceCatalogColumn.collectionsType, + sourceCatalogColumn.length, + sourceCatalogColumn.scale, + sourceCatalogColumn.dimension, + sourceCatalogColumn.cardinality, + sourceCatalogColumn.nullable, + sourceCatalogColumn.collation, + sourceCatalogColumn.defaultValue); + return targetCatalogColumn; + } + + + @NotNull + private CatalogTable transferCatalogTable(CatalogTable sourceTable, long targetNamespaceId) { + return new CatalogTable( + sourceTable.id, + sourceTable.name, + sourceTable.fieldIds, + targetNamespaceId, + sourceTable.databaseId, + sourceTable.ownerId, + sourceTable.entityType, + sourceTable.primaryKey, + sourceTable.dataPlacements, + sourceTable.modifiable, + sourceTable.partitionProperty, + sourceTable.connectedViews); + } + static class CatalogValidator { @@ -5510,4 +5573,4 @@ public void startCheck() { } -} +} \ No newline at end of file diff --git a/core/src/main/java/org/polypheny/db/algebra/constant/Kind.java b/core/src/main/java/org/polypheny/db/algebra/constant/Kind.java index d84c8dd294..8e0ed3df0d 100644 --- a/core/src/main/java/org/polypheny/db/algebra/constant/Kind.java +++ b/core/src/main/java/org/polypheny/db/algebra/constant/Kind.java @@ -236,6 +236,11 @@ public enum Kind { */ MERGE, + /** + * TRANSFER statement + */ + TRANSFER, + /** * TABLESAMPLE operator */ @@ -1713,4 +1718,3 @@ private static > EnumSet concat( EnumSet set0, EnumSet convertTableTypeList( @NonNull final List return typeList; } -} +} \ No newline at end of file diff --git a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java index 812615e594..e58433e97c 100644 --- a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java +++ b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java @@ -451,6 +451,8 @@ public static DdlManager getInstance() { */ public abstract void createTable( long schemaId, String tableName, List columns, List constraints, boolean ifNotExists, List stores, PlacementType placementType, Statement statement ) throws EntityAlreadyExistsException, ColumnNotExistsException, UnknownPartitionTypeException, UnknownColumnException, PartitionGroupNamesNotUniqueException; + public abstract void transferTable( CatalogTable table, long targetSchemaId, Statement statement ) throws EntityAlreadyExistsException; + /** * Create a new view * @@ -751,4 +753,4 @@ public enum DefaultIndexPlacementStrategy { POLYPHENY, ONE_DATA_STORE, ALL_DATA_STORES } -} +} \ No newline at end of file diff --git a/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java b/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java index c39aebee21..287a93f283 100644 --- a/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java +++ b/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java @@ -351,6 +351,11 @@ public long addTable( String name, long namespaceId, int ownerId, EntityType ent throw new NotImplementedException(); } + @Override + public long relocateTable(CatalogTable sourceTable, long targetNamespaceId ) { + throw new NotImplementedException(); + } + @Override public long addView( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable, AlgNode definition, AlgCollation algCollation, Map> underlyingTables, AlgDataType fieldList, String query, QueryLanguage language ) { @@ -1371,4 +1376,4 @@ public void updateCollectionPartitionPhysicalNames( long collectionId, int adapt throw new NotImplementedException(); } -} +} \ No newline at end of file diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 8b896c69c5..213c61ac53 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -127,18 +127,23 @@ import org.polypheny.db.partition.properties.TemperaturePartitionProperty; import org.polypheny.db.partition.properties.TemperaturePartitionProperty.PartitionCostIndication; import org.polypheny.db.partition.raw.RawTemperaturePartitionInformation; +import org.polypheny.db.prepare.Context; import org.polypheny.db.processing.DataMigrator; import org.polypheny.db.routing.RoutingManager; import org.polypheny.db.runtime.PolyphenyDbContextException; import org.polypheny.db.runtime.PolyphenyDbException; import org.polypheny.db.schema.LogicalTable; import org.polypheny.db.schema.PolySchemaBuilder; +import org.polypheny.db.sql.language.SqlIdentifier; import org.polypheny.db.transaction.Statement; import org.polypheny.db.transaction.TransactionException; import org.polypheny.db.type.ArrayType; import org.polypheny.db.type.PolyType; +import org.polypheny.db.util.CoreUtil; import org.polypheny.db.view.MaterializedViewManager; +import static org.polypheny.db.util.Static.RESOURCE; + @Slf4j public class DdlManagerImpl extends DdlManager { @@ -2242,6 +2247,50 @@ public void createTable( long schemaId, String name, List fiel } } + @Override + public void transferTable( CatalogTable sourceTable, long targetSchemaId, Statement statement) throws EntityAlreadyExistsException { + // Check if there is already an entity with this name + if ( assertEntityExists( targetSchemaId, sourceTable.name, true ) ) { + return; + } + NamespaceType sourceNamespaceType = catalog.getSchema(sourceTable.namespaceId).namespaceType; + NamespaceType targetNamespaceType = catalog.getSchema(targetSchemaId).namespaceType; + + if ( sourceNamespaceType == targetNamespaceType ) { + catalog.relocateTable(sourceTable, targetSchemaId); + } + + if ( targetNamespaceType == NamespaceType.DOCUMENT ) { + List stores = sourceTable.dataPlacements + .stream() + .map(id -> (DataStore) AdapterManager.getInstance().getAdapter(id)) + .collect(Collectors.toList()); + PlacementType placementType = catalog.getDataPlacement(sourceTable.dataPlacements.get(0), sourceTable.id).placementType; + createCollection( targetSchemaId, sourceTable.name, false, stores, placementType, statement ); + try { + CatalogTable table = catalog.getTable(targetSchemaId, sourceTable.name); + DataMigrator dataMigrator = statement.getTransaction().getDataMigrator(); + // dataMigrator.copyRelationalDataToGraphData( statement.getTransaction(), stores ) + } catch (UnknownTableException e) { + throw new RuntimeException(e); + } + // Migrator + catalog.deleteTable(sourceTable.id); + } + + + //statement.getTransaction().getSchema().add(table.name, catalog.getSchema( targetSchemaId )., NamespaceType.DOCUMENT); + + /* + if ( catalog.getSchema( targetSchemaId ).namespaceType == NamespaceType.DOCUMENT ) { + PolyphenyDbCatalogReader catalogReader = statement.getTransaction().getCatalogReader(); + catalogReader.getSchemaPaths().add(List.of("kaka", "maka")); + statement.getTransaction().getCatalogReader(); + } + + */ + } + @Override public void createCollection( long schemaId, String name, boolean ifNotExists, List stores, PlacementType placementType, Statement statement ) throws EntityAlreadyExistsException { @@ -3135,6 +3184,32 @@ private void prepareMonitoring( Statement statement, Kind kind, CatalogTable cat } } + protected CatalogTable getCatalogTable( Context context, SqlIdentifier tableName ) { + CatalogTable catalogTable; + try { + long schemaId; + String tableOldName; + Catalog catalog = Catalog.getInstance(); + if ( tableName.names.size() == 3 ) { // DatabaseName.SchemaName.TableName + schemaId = catalog.getSchema( tableName.names.get( 0 ), tableName.names.get( 1 ) ).id; + tableOldName = tableName.names.get( 2 ); + } else if ( tableName.names.size() == 2 ) { // SchemaName.TableName + schemaId = catalog.getSchema( context.getDatabaseId(), tableName.names.get( 0 ) ).id; + tableOldName = tableName.names.get( 1 ); + } else { // TableName + schemaId = catalog.getSchema( context.getDatabaseId(), context.getDefaultSchemaName() ).id; + tableOldName = tableName.names.get( 0 ); + } + catalogTable = catalog.getTable( schemaId, tableOldName ); + } catch ( UnknownDatabaseException e ) { + throw CoreUtil.newContextException( tableName.getPos(), RESOURCE.databaseNotFound( tableName.toString() ) ); + } catch ( UnknownSchemaException e ) { + throw CoreUtil.newContextException( tableName.getPos(), RESOURCE.schemaNotFound( tableName.toString() ) ); + } catch ( UnknownTableException e ) { + throw CoreUtil.newContextException( tableName.getPos(), RESOURCE.tableNotFound( tableName.toString() ) ); + } + return catalogTable; + } @Override public void dropFunction() { @@ -3159,4 +3234,4 @@ public void dropType() { throw new RuntimeException( "Not supported yet" ); } -} +} \ No newline at end of file diff --git a/sql-language/src/main/codegen/config.fmpp b/sql-language/src/main/codegen/config.fmpp index 1acf60a9ae..7563495c7b 100644 --- a/sql-language/src/main/codegen/config.fmpp +++ b/sql-language/src/main/codegen/config.fmpp @@ -82,6 +82,7 @@ data: { "org.polypheny.db.sql.language.ddl.altermaterializedview.SqlAlterMaterializedViewRenameColumn" "org.polypheny.db.sql.language.ddl.alterschema.SqlAlterSchemaOwner" "org.polypheny.db.sql.language.ddl.alterschema.SqlAlterSchemaRename" + "org.polypheny.db.sql.language.ddl.alterschema.SqlAlterSchemaTransferTable" "org.polypheny.db.sql.language.ddl.altertable.SqlAlterSourceTableAddColumn" "org.polypheny.db.sql.language.ddl.altertable.SqlAlterTableAddColumn" "org.polypheny.db.sql.language.ddl.altertable.SqlAlterTableAddForeignKey" @@ -422,6 +423,7 @@ data: { "TRANSACTIONS_ACTIVE" "TRANSACTIONS_COMMITTED" "TRANSACTIONS_ROLLED_BACK" + "TRANSFER" "TRANSFORM" "TRANSFORMS" "TRIGGER_CATALOG" @@ -523,4 +525,3 @@ data: { freemarkerLinks: { includes: includes/ } - diff --git a/sql-language/src/main/codegen/includes/parserImpls.ftl b/sql-language/src/main/codegen/includes/parserImpls.ftl index dbf980e885..bc2a403ded 100644 --- a/sql-language/src/main/codegen/includes/parserImpls.ftl +++ b/sql-language/src/main/codegen/includes/parserImpls.ftl @@ -48,6 +48,12 @@ SqlAlterSchema SqlAlterSchema(Span s) : schema = CompoundIdentifier() ( + + name = CompoundIdentifier() + { + return new SqlAlterSchemaTransferTable(s.end(this), name, schema); + } + | name = CompoundIdentifier() { diff --git a/sql-language/src/main/codegen/templates/Parser.jj b/sql-language/src/main/codegen/templates/Parser.jj index 23502edeee..53233d7625 100644 --- a/sql-language/src/main/codegen/templates/Parser.jj +++ b/sql-language/src/main/codegen/templates/Parser.jj @@ -6649,6 +6649,7 @@ SqlPostfixOperator PostfixRowOperator() : | < TRANSACTIONS_ACTIVE: "TRANSACTIONS_ACTIVE" > | < TRANSACTIONS_COMMITTED: "TRANSACTIONS_COMMITTED" > | < TRANSACTIONS_ROLLED_BACK: "TRANSACTIONS_ROLLED_BACK" > +| < TRANSFER: "TRANSFER" > | < TRANSFORM: "TRANSFORM" > | < TRANSFORMS: "TRANSFORMS" > | < TRANSLATE: "TRANSLATE" > diff --git a/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterschema/SqlAlterSchemaTransferTable.java b/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterschema/SqlAlterSchemaTransferTable.java new file mode 100644 index 0000000000..443bdd0e6e --- /dev/null +++ b/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterschema/SqlAlterSchemaTransferTable.java @@ -0,0 +1,102 @@ +/* + * Copyright 2019-2022 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.sql.language.ddl.alterschema; + + +import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.exceptions.EntityAlreadyExistsException; +import org.polypheny.db.catalog.exceptions.UnknownSchemaException; +import org.polypheny.db.catalog.exceptions.UnknownUserException; +import org.polypheny.db.ddl.DdlManager; +import org.polypheny.db.languages.ParserPos; +import org.polypheny.db.languages.QueryParameters; +import org.polypheny.db.nodes.Node; +import org.polypheny.db.prepare.Context; +import org.polypheny.db.sql.language.SqlIdentifier; +import org.polypheny.db.sql.language.SqlNode; +import org.polypheny.db.sql.language.SqlWriter; +import org.polypheny.db.sql.language.ddl.SqlAlterSchema; +import org.polypheny.db.transaction.Statement; +import org.polypheny.db.util.CoreUtil; +import org.polypheny.db.util.ImmutableNullableList; + +import java.util.List; +import java.util.Objects; + +import static org.polypheny.db.util.Static.RESOURCE; + + +/** + * Parse tree for {@code ALTER SCHEMA name OWNER TO} statement. + */ +public class SqlAlterSchemaTransferTable extends SqlAlterSchema { + + private final SqlIdentifier table; + private final SqlIdentifier targetSchema; + + + /** + * Creates a SqlAlterSchemaOwner. + */ + public SqlAlterSchemaTransferTable(ParserPos pos, SqlIdentifier table, SqlIdentifier targetSchema) { + super( pos ); + this.table = Objects.requireNonNull(table); + this.targetSchema = Objects.requireNonNull(targetSchema); + } + + + @Override + public List getOperandList() { + return ImmutableNullableList.of(table, targetSchema); + } + + + @Override + public List getSqlOperandList() { + return ImmutableNullableList.of(table, targetSchema); + } + + + @Override + public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { + writer.keyword( "ALTER" ); + writer.keyword( "SCHEMA" ); + table.unparse( writer, leftPrec, rightPrec ); + writer.keyword( "OWNER" ); + writer.keyword( "TO" ); + targetSchema.unparse( writer, leftPrec, rightPrec ); + } + + + @Override + public void execute( Context context, Statement statement, QueryParameters parameters ) { + try { + Catalog catalog = Catalog.getInstance(); + CatalogTable catalogTable = getCatalogTable( context, table); + + long targetSchemaId = catalog.getSchema( context.getDatabaseId(), targetSchema.getNames().get(0) ).id; + DdlManager.getInstance().transferTable( catalogTable, targetSchemaId, statement ); + + } catch ( UnknownSchemaException e ) { + throw CoreUtil.newContextException( table.getPos(), RESOURCE.schemaNotFound( table.getSimple() ) ); + } catch (EntityAlreadyExistsException e) { + throw CoreUtil.newContextException( table.getPos(), RESOURCE.tableExists( table.names.get( 1 ) ) ); + } + } + +} diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index 4c0dd16c74..ee021b00ac 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -208,22 +208,10 @@ import org.polypheny.db.webui.models.TableConstraint; import org.polypheny.db.webui.models.Uml; import org.polypheny.db.webui.models.UnderlyingTables; -import org.polypheny.db.webui.models.requests.BatchUpdateRequest; +import org.polypheny.db.webui.models.requests.*; import org.polypheny.db.webui.models.requests.BatchUpdateRequest.Update; -import org.polypheny.db.webui.models.requests.ClassifyAllData; -import org.polypheny.db.webui.models.requests.ColumnRequest; -import org.polypheny.db.webui.models.requests.ConstraintRequest; -import org.polypheny.db.webui.models.requests.EditTableRequest; -import org.polypheny.db.webui.models.requests.ExploreData; -import org.polypheny.db.webui.models.requests.ExploreTables; -import org.polypheny.db.webui.models.requests.HubRequest; -import org.polypheny.db.webui.models.requests.PartitioningRequest; import org.polypheny.db.webui.models.requests.PartitioningRequest.ModifyPartitionRequest; -import org.polypheny.db.webui.models.requests.QueryExplorationRequest; -import org.polypheny.db.webui.models.requests.QueryRequest; -import org.polypheny.db.webui.models.requests.RelAlgRequest; -import org.polypheny.db.webui.models.requests.SchemaTreeRequest; -import org.polypheny.db.webui.models.requests.UIRequest; +import org.polypheny.db.webui.models.requests.TransferTableRequest; @Slf4j @@ -623,6 +611,34 @@ void createTable( final Context ctx ) { ctx.json( result ); } + /** + * Transfer a table + */ + void transferTable(final Context ctx ) { + TransferTableRequest request = ctx.bodyAsClass( TransferTableRequest.class ); + Transaction transaction = getTransaction(); + StringBuilder query = new StringBuilder(); + String targetSchemaId = String.format( "\"%s\"", request.targetSchema ); + String tableId = String.format( "\"%s\".\"%s\"", request.sourceSchema, request.table ); + query.append( "ALTER SCHEMA " ).append( targetSchemaId ).append( " TRANSFER " ).append(tableId); + Result result; + + try { + int a = executeSqlUpdate( transaction, query.toString() ); + result = new Result( a ).setGeneratedQuery( query.toString() ); + transaction.commit(); + } catch ( QueryExecutionException | TransactionException e ) { + log.error( "Caught exception while creating a table", e ); + result = new Result( e ).setGeneratedQuery( query.toString() ); + try { + transaction.rollback(); + } catch ( TransactionException ex ) { + log.error( "Could not rollback CREATE TABLE statement: {}", ex.getMessage(), ex ); + } + } + ctx.json( result ); + } + /** * Initialize a multipart request, so that the values can be fetched with request.raw().getPart( name ) @@ -4184,4 +4200,4 @@ public static class QueryExecutionException extends Exception { } -} +} \ No newline at end of file diff --git a/webui/src/main/java/org/polypheny/db/webui/HttpServer.java b/webui/src/main/java/org/polypheny/db/webui/HttpServer.java index daf1436407..849cd6a2ba 100644 --- a/webui/src/main/java/org/polypheny/db/webui/HttpServer.java +++ b/webui/src/main/java/org/polypheny/db/webui/HttpServer.java @@ -249,6 +249,8 @@ private void crudRoutes( Javalin webuiServer, Crud crud ) { webuiServer.post( "/createTable", crud::createTable ); + webuiServer.post( "/transferTable", crud::transferTable); + webuiServer.post( "/createCollection", crud.languageCrud::createCollection ); webuiServer.get( "/getGeneratedNames", crud::getGeneratedNames ); @@ -408,4 +410,4 @@ private static void enableCORS( Javalin webuiServer ) { } -} +} \ No newline at end of file diff --git a/webui/src/main/java/org/polypheny/db/webui/models/TransferTableRequest.java b/webui/src/main/java/org/polypheny/db/webui/models/TransferTableRequest.java new file mode 100644 index 0000000000..9e7533acec --- /dev/null +++ b/webui/src/main/java/org/polypheny/db/webui/models/TransferTableRequest.java @@ -0,0 +1,26 @@ +/* + * Copyright 2019-2021 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.webui.models.requests; + + +public class TransferTableRequest { + + public String table; + public String sourceSchema; + public String targetSchema; + +} \ No newline at end of file From 1d11bb4ca9b930cfd97e0771452d345440d90e54 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Attila=20S=C3=BCle?= Date: Sun, 27 Nov 2022 20:01:40 +0100 Subject: [PATCH 14/26] relational to document is working. --- .../polypheny/db/processing/DataMigrator.java | 1 + .../org/polypheny/db/ddl/DdlManagerImpl.java | 76 ++++++++++++- .../db/processing/DataMigratorImpl.java | 106 ++++++++++++++++-- 3 files changed, 172 insertions(+), 11 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/processing/DataMigrator.java b/core/src/main/java/org/polypheny/db/processing/DataMigrator.java index 1830269180..53bb778888 100644 --- a/core/src/main/java/org/polypheny/db/processing/DataMigrator.java +++ b/core/src/main/java/org/polypheny/db/processing/DataMigrator.java @@ -88,4 +88,5 @@ void copyPartitionData( void copyGraphData( CatalogGraphDatabase graph, Transaction transaction, Integer existingAdapterId, CatalogAdapter adapter ); + void copyRelationalDataToDocumentData(Transaction transaction , CatalogTable sourceTable, long targetSchemaId); } diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 213c61ac53..aac8f82b0c 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -18,6 +18,8 @@ import com.google.common.collect.ImmutableList; + +import java.sql.ResultSetMetaData; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -31,6 +33,8 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; +import org.jetbrains.annotations.NotNull; +import org.polypheny.db.PolyImplementation; import org.polypheny.db.StatisticsManager; import org.polypheny.db.adapter.Adapter; import org.polypheny.db.adapter.AdapterManager; @@ -119,6 +123,9 @@ import org.polypheny.db.ddl.exception.PlacementNotExistsException; import org.polypheny.db.ddl.exception.SchemaNotExistException; import org.polypheny.db.ddl.exception.UnknownIndexMethodException; +import org.polypheny.db.languages.QueryParameters; +import org.polypheny.db.languages.mql.MqlNode; +import org.polypheny.db.languages.mql.MqlQueryParameters; import org.polypheny.db.monitoring.events.DdlEvent; import org.polypheny.db.monitoring.events.StatementEvent; import org.polypheny.db.partition.PartitionManager; @@ -128,6 +135,7 @@ import org.polypheny.db.partition.properties.TemperaturePartitionProperty.PartitionCostIndication; import org.polypheny.db.partition.raw.RawTemperaturePartitionInformation; import org.polypheny.db.prepare.Context; +import org.polypheny.db.processing.AutomaticDdlProcessor; import org.polypheny.db.processing.DataMigrator; import org.polypheny.db.routing.RoutingManager; import org.polypheny.db.runtime.PolyphenyDbContextException; @@ -136,11 +144,17 @@ import org.polypheny.db.schema.PolySchemaBuilder; import org.polypheny.db.sql.language.SqlIdentifier; import org.polypheny.db.transaction.Statement; +import org.polypheny.db.transaction.Transaction; import org.polypheny.db.transaction.TransactionException; import org.polypheny.db.type.ArrayType; import org.polypheny.db.type.PolyType; import org.polypheny.db.util.CoreUtil; import org.polypheny.db.view.MaterializedViewManager; +import org.polypheny.db.webui.Crud; +import org.polypheny.db.webui.models.DbColumn; +import org.polypheny.db.webui.models.Result; +import org.polypheny.db.webui.models.SortState; +import org.polypheny.db.webui.models.requests.QueryRequest; import static org.polypheny.db.util.Static.RESOURCE; @@ -2267,15 +2281,21 @@ public void transferTable( CatalogTable sourceTable, long targetSchemaId, Statem .collect(Collectors.toList()); PlacementType placementType = catalog.getDataPlacement(sourceTable.dataPlacements.get(0), sourceTable.id).placementType; createCollection( targetSchemaId, sourceTable.name, false, stores, placementType, statement ); + CatalogTable table; try { - CatalogTable table = catalog.getTable(targetSchemaId, sourceTable.name); - DataMigrator dataMigrator = statement.getTransaction().getDataMigrator(); + table = catalog.getTable(targetSchemaId, sourceTable.name); // dataMigrator.copyRelationalDataToGraphData( statement.getTransaction(), stores ) } catch (UnknownTableException e) { throw new RuntimeException(e); } // Migrator + DataMigrator dataMigrator = statement.getTransaction().getDataMigrator(); + dataMigrator.copyRelationalDataToDocumentData( statement.getTransaction(), sourceTable, targetSchemaId ); + catalog.deleteTable(sourceTable.id); + + statement.getQueryProcessor().resetCaches(); + } @@ -2291,6 +2311,58 @@ public void transferTable( CatalogTable sourceTable, long targetSchemaId, Statem */ } + @NotNull + public static Result getResult(QueryLanguage language, Statement statement, String query, PolyImplementation result, Transaction transaction, final boolean noLimit ) { + Catalog catalog = Catalog.getInstance(); + + List> rows = result.getRows( statement, noLimit ? -1 : language == QueryLanguage.CYPHER ? RuntimeConfig.UI_NODE_AMOUNT.getInteger() : RuntimeConfig.UI_PAGE_SIZE.getInteger() ); + + boolean hasMoreRows = result.hasMoreRows(); + + CatalogTable catalogTable = null; + + + ArrayList header = new ArrayList<>(); + for ( AlgDataTypeField metaData : result.rowType.getFieldList() ) { + String columnName = metaData.getName(); + + + DbColumn dbCol = new DbColumn( + metaData.getName(), + metaData.getType().getFullTypeString(), + metaData.getType().isNullable() == (ResultSetMetaData.columnNullable == 1), + metaData.getType().getPrecision(), + null, + null ); + + // Get column default values + if ( catalogTable != null ) { + try { + if ( catalog.checkIfExistsColumn( catalogTable.id, columnName ) ) { + CatalogColumn catalogColumn = catalog.getColumn( catalogTable.id, columnName ); + if ( catalogColumn.defaultValue != null ) { + dbCol.defaultValue = catalogColumn.defaultValue.value; + } + } + } catch ( UnknownColumnException e ) { + log.error( "Caught exception", e ); + } + } + header.add( dbCol ); + } + + ArrayList data = Crud.computeResultData( rows, header, statement.getTransaction() ); + + return new Result( header.toArray( new DbColumn[0] ), data.toArray( new String[0][] ) ) + .setNamespaceType( result.getNamespaceType() ) + .setNamespaceName( "target" ) + .setLanguage( language ) + .setAffectedRows( data.size() ) + .setHasMoreRows( hasMoreRows ) + .setXid( transaction.getXid().toString() ) + .setGeneratedQuery( query ); + } + @Override public void createCollection( long schemaId, String name, boolean ifNotExists, List stores, PlacementType placementType, Statement statement ) throws EntityAlreadyExistsException { diff --git a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java index 10fbe8baba..66c2548575 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java +++ b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java @@ -17,16 +17,9 @@ package org.polypheny.db.processing; import com.google.common.collect.ImmutableList; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashMap; -import java.util.Iterator; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; + +import java.util.*; import java.util.Map.Entry; -import java.util.NoSuchElementException; import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; import org.apache.calcite.avatica.MetaImpl; @@ -56,6 +49,9 @@ import org.polypheny.db.catalog.entity.CatalogPrimaryKey; import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.config.RuntimeConfig; +import org.polypheny.db.languages.QueryParameters; +import org.polypheny.db.languages.mql.MqlNode; +import org.polypheny.db.languages.mql.MqlQueryParameters; import org.polypheny.db.partition.PartitionManager; import org.polypheny.db.partition.PartitionManagerFactory; import org.polypheny.db.plan.AlgOptCluster; @@ -74,6 +70,9 @@ import org.polypheny.db.type.PolyType; import org.polypheny.db.type.PolyTypeFactoryImpl; import org.polypheny.db.util.LimitIterator; +import org.polypheny.db.webui.models.Result; + +import static org.polypheny.db.ddl.DdlManagerImpl.getResult; @Slf4j @@ -147,6 +146,95 @@ public void copyGraphData( CatalogGraphDatabase target, Transaction transaction, } + @Override + public void copyRelationalDataToDocumentData( Transaction transaction, CatalogTable sourceTable, long targetSchemaId ) { + try { + Catalog catalog = Catalog.getInstance(); + List sourceColumns = new ArrayList<>(); + for ( String columnName : sourceTable.getColumnNames() ) { + sourceColumns.add( catalog.getColumn( sourceTable.id, columnName ) ); + } + + Map> sourceColumnPlacements = new HashMap<>(); + sourceColumnPlacements.put( + sourceTable.partitionProperty.partitionIds.get( 0 ), + selectSourcePlacements( sourceTable, sourceColumns, -1 ) ); + + Statement sourceStatement = transaction.createStatement(); + + Map> subDistribution = new HashMap<>( sourceColumnPlacements ); + subDistribution.keySet().retainAll( Arrays.asList( sourceTable.partitionProperty.partitionIds.get( 0 ) ) ); + + AlgRoot sourceAlg = getSourceIterator( sourceStatement, subDistribution ); + PolyImplementation result = sourceStatement.getQueryProcessor().prepareQuery( + sourceAlg, + sourceAlg.alg.getCluster().getTypeFactory().builder().build(), + true, + false, + false ); + + Map sourceColMapping = new LinkedHashMap<>(); + for ( CatalogColumn catalogColumn : sourceColumns ) { + int i = 0; + for ( AlgDataTypeField metaData : result.getRowType().getFieldList() ) { + if ( metaData.getName().equalsIgnoreCase( catalogColumn.name ) ) { + sourceColMapping.put( catalogColumn.name, i ); + } + i++; + } + } + + int batchSize = RuntimeConfig.DATA_MIGRATOR_BATCH_SIZE.getInteger(); + final Enumerable enumerable = result.enumerable( sourceStatement.getDataContext() ); + Iterator sourceIterator = enumerable.iterator(); + while ( sourceIterator.hasNext() ) { + List> rows = MetaImpl.collect( result.getCursorFactory(), LimitIterator.of( sourceIterator, batchSize ), new ArrayList<>() ); + List> values = new ArrayList<>(); + for ( List list : rows ) { + LinkedHashMap currentRowValues = new LinkedHashMap<>(); + sourceColMapping.forEach( ( key, value ) -> currentRowValues.put( key, list.get( value ) ) ); + values.add( currentRowValues ); + } + + boolean firstRow = true; + StringBuffer bf = new StringBuffer(); + bf.append( "db." + sourceTable.name + ".insertMany([" ); + for ( Map row : values ) { + if ( firstRow ) { + bf.append( "{" ); + firstRow = false; + } else { + bf.append( ",{" ); + } + boolean firstColumn = true; + for ( Map.Entry entry : row.entrySet() ) { + if ( firstColumn == true ) { + firstColumn = false; + } else { + bf.append( "," ); + } + bf.append( "\"" + entry.getKey() + "\" : \"" + entry.getValue() + "\"" ); + } + bf.append( "}" ); + } + bf.append( "])" ); + + String query = bf.toString(); + + Statement targetStatement = transaction.createStatement(); + AutomaticDdlProcessor mqlProcessor = (AutomaticDdlProcessor) transaction.getProcessor( Catalog.QueryLanguage.MONGO_QL ); + QueryParameters parameters = new MqlQueryParameters( query, catalog.getSchema( targetSchemaId ).name, Catalog.NamespaceType.DOCUMENT ); + MqlNode parsed = (MqlNode) mqlProcessor.parse( query ).get( 0 ); + AlgRoot logicalRoot = mqlProcessor.translate( targetStatement, parsed, parameters ); + PolyImplementation polyImplementation = targetStatement.getQueryProcessor().prepareQuery( logicalRoot, true ); + Result updateRresult = getResult( Catalog.QueryLanguage.MONGO_QL, targetStatement, query, polyImplementation, transaction, false ); + } + } catch ( Throwable t ) { + throw new RuntimeException( t ); + } + } + + @NotNull private static LogicalLpgValues getLogicalLpgValues( AlgBuilder builder, PolyGraph graph ) { List fields = new ArrayList<>(); From ac827e0a1b82a8da2c7b1b1875e8f222f1d48228 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Attila=20S=C3=BCle?= Date: Sat, 3 Dec 2022 18:16:02 +0100 Subject: [PATCH 15/26] fixes of transfer table --- .../java/org/polypheny/db/ddl/DdlManager.java | 2 +- .../org/polypheny/db/ddl/DdlManagerImpl.java | 27 +++---------------- .../SqlAlterSchemaTransferTable.java | 3 +++ 3 files changed, 8 insertions(+), 24 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java index e58433e97c..4b952ae655 100644 --- a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java +++ b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java @@ -451,7 +451,7 @@ public static DdlManager getInstance() { */ public abstract void createTable( long schemaId, String tableName, List columns, List constraints, boolean ifNotExists, List stores, PlacementType placementType, Statement statement ) throws EntityAlreadyExistsException, ColumnNotExistsException, UnknownPartitionTypeException, UnknownColumnException, PartitionGroupNamesNotUniqueException; - public abstract void transferTable( CatalogTable table, long targetSchemaId, Statement statement ) throws EntityAlreadyExistsException; + public abstract void transferTable( CatalogTable table, long targetSchemaId, Statement statement ) throws EntityAlreadyExistsException, DdlOnSourceException; /** * Create a new view diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index aac8f82b0c..2d0159e458 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -2262,7 +2262,7 @@ public void createTable( long schemaId, String name, List fiel } @Override - public void transferTable( CatalogTable sourceTable, long targetSchemaId, Statement statement) throws EntityAlreadyExistsException { + public void transferTable( CatalogTable sourceTable, long targetSchemaId, Statement statement) throws EntityAlreadyExistsException, DdlOnSourceException { // Check if there is already an entity with this name if ( assertEntityExists( targetSchemaId, sourceTable.name, true ) ) { return; @@ -2274,41 +2274,22 @@ public void transferTable( CatalogTable sourceTable, long targetSchemaId, Statem catalog.relocateTable(sourceTable, targetSchemaId); } - if ( targetNamespaceType == NamespaceType.DOCUMENT ) { + if ( sourceNamespaceType == NamespaceType.RELATIONAL && targetNamespaceType == NamespaceType.DOCUMENT ) { List stores = sourceTable.dataPlacements .stream() .map(id -> (DataStore) AdapterManager.getInstance().getAdapter(id)) .collect(Collectors.toList()); PlacementType placementType = catalog.getDataPlacement(sourceTable.dataPlacements.get(0), sourceTable.id).placementType; createCollection( targetSchemaId, sourceTable.name, false, stores, placementType, statement ); - CatalogTable table; - try { - table = catalog.getTable(targetSchemaId, sourceTable.name); - // dataMigrator.copyRelationalDataToGraphData( statement.getTransaction(), stores ) - } catch (UnknownTableException e) { - throw new RuntimeException(e); - } + // Migrator DataMigrator dataMigrator = statement.getTransaction().getDataMigrator(); dataMigrator.copyRelationalDataToDocumentData( statement.getTransaction(), sourceTable, targetSchemaId ); - catalog.deleteTable(sourceTable.id); + dropTable( sourceTable, statement ); statement.getQueryProcessor().resetCaches(); - } - - - //statement.getTransaction().getSchema().add(table.name, catalog.getSchema( targetSchemaId )., NamespaceType.DOCUMENT); - - /* - if ( catalog.getSchema( targetSchemaId ).namespaceType == NamespaceType.DOCUMENT ) { - PolyphenyDbCatalogReader catalogReader = statement.getTransaction().getCatalogReader(); - catalogReader.getSchemaPaths().add(List.of("kaka", "maka")); - statement.getTransaction().getCatalogReader(); - } - - */ } @NotNull diff --git a/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterschema/SqlAlterSchemaTransferTable.java b/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterschema/SqlAlterSchemaTransferTable.java index 443bdd0e6e..5e8194cf3b 100644 --- a/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterschema/SqlAlterSchemaTransferTable.java +++ b/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterschema/SqlAlterSchemaTransferTable.java @@ -23,6 +23,7 @@ import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.ddl.DdlManager; +import org.polypheny.db.ddl.exception.DdlOnSourceException; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.languages.QueryParameters; import org.polypheny.db.nodes.Node; @@ -96,6 +97,8 @@ public void execute( Context context, Statement statement, QueryParameters param throw CoreUtil.newContextException( table.getPos(), RESOURCE.schemaNotFound( table.getSimple() ) ); } catch (EntityAlreadyExistsException e) { throw CoreUtil.newContextException( table.getPos(), RESOURCE.tableExists( table.names.get( 1 ) ) ); + } catch ( DdlOnSourceException e ) { + throw CoreUtil.newContextException( table.getPos(), RESOURCE.ddlOnSourceTable() ); } } From d4ff64a4468779201cc78aa3317296b106c3bbd1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Attila=20S=C3=BCle?= Date: Wed, 7 Dec 2022 19:39:58 +0100 Subject: [PATCH 16/26] relational -> document: handling null values --- .../polypheny/db/processing/DataMigratorImpl.java | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java index 66c2548575..3aa88b4504 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java +++ b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java @@ -208,12 +208,14 @@ public void copyRelationalDataToDocumentData( Transaction transaction, CatalogTa } boolean firstColumn = true; for ( Map.Entry entry : row.entrySet() ) { - if ( firstColumn == true ) { - firstColumn = false; - } else { - bf.append( "," ); + if (entry.getValue() != null ) { + if ( firstColumn == true ) { + firstColumn = false; + } else { + bf.append( "," ); + } + bf.append( "\"" + entry.getKey() + "\" : \"" + entry.getValue() + "\"" ); } - bf.append( "\"" + entry.getKey() + "\" : \"" + entry.getValue() + "\"" ); } bf.append( "}" ); } From 5f7047029228328839ae6e83eaf39388ad38a391 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Attila=20S=C3=BCle?= Date: Wed, 7 Dec 2022 20:39:44 +0100 Subject: [PATCH 17/26] document -> document: transferTable implemented. --- .../org/polypheny/db/catalog/CatalogImpl.java | 21 ++++++++++++++++--- 1 file changed, 18 insertions(+), 3 deletions(-) diff --git a/catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java b/catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java index 106b16d744..b094c63947 100644 --- a/catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java +++ b/catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java @@ -1919,6 +1919,13 @@ public long relocateTable(CatalogTable sourceTable, long targetNamespaceId ) { columnNames.remove( new Object[]{sourceTable.databaseId, sourceTable.namespaceId, sourceTable.id, targetCatalogColumn.name } ); columnNames.put( new Object[]{sourceTable.databaseId, targetNamespaceId, sourceTable.id, targetCatalogColumn.name }, targetCatalogColumn ); } + + if( getSchema(sourceTable.namespaceId).namespaceType == NamespaceType.DOCUMENT ) { + CatalogCollection targetCollection = transferCatalogCollection( collections.get( sourceTable.id ), targetNamespaceId ); + collections.replace( sourceTable.id, targetCollection ); + collectionNames.remove( new Object[]{ sourceTable.databaseId, sourceTable.namespaceId, sourceTable.name } ); + collectionNames.put( new Object[]{ targetTable.databaseId, targetNamespaceId, targetTable.name }, targetCollection ); + } } listeners.firePropertyChange( "table", sourceTable, null ); @@ -5504,7 +5511,6 @@ private CatalogKey getKey( long keyId ) { } } - @NotNull private static CatalogColumn transferCatalogColumn(long targetNamespaceId, CatalogColumn sourceCatalogColumn) { CatalogColumn targetCatalogColumn = new CatalogColumn( sourceCatalogColumn.id, @@ -5525,8 +5531,6 @@ private static CatalogColumn transferCatalogColumn(long targetNamespaceId, Catal return targetCatalogColumn; } - - @NotNull private CatalogTable transferCatalogTable(CatalogTable sourceTable, long targetNamespaceId) { return new CatalogTable( sourceTable.id, @@ -5543,6 +5547,17 @@ private CatalogTable transferCatalogTable(CatalogTable sourceTable, long targetN sourceTable.connectedViews); } + private CatalogCollection transferCatalogCollection(CatalogCollection sourceCollection, long targetNamespaceId) { + return new CatalogCollection( + sourceCollection.databaseId, + targetNamespaceId, + sourceCollection.id, + sourceCollection.name, + sourceCollection.placements, + sourceCollection.entityType, + sourceCollection.physicalName); + } + static class CatalogValidator { From e3208d70e05f6b21fc94a6ea66b35d928983dff1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Attila=20S=C3=BCle?= Date: Sat, 10 Dec 2022 20:12:47 +0100 Subject: [PATCH 18/26] document -> relational. new table with varchar columns --- .../org/polypheny/db/ddl/DdlManagerImpl.java | 53 ++++++++++++++++--- 1 file changed, 47 insertions(+), 6 deletions(-) diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 2d0159e458..6513ce2a33 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -19,6 +19,7 @@ import com.google.common.collect.ImmutableList; +import com.google.gson.JsonParser; import java.sql.ResultSetMetaData; import java.util.ArrayList; import java.util.Arrays; @@ -128,6 +129,7 @@ import org.polypheny.db.languages.mql.MqlQueryParameters; import org.polypheny.db.monitoring.events.DdlEvent; import org.polypheny.db.monitoring.events.StatementEvent; +import org.polypheny.db.nodes.DataTypeSpec; import org.polypheny.db.partition.PartitionManager; import org.polypheny.db.partition.PartitionManagerFactory; import org.polypheny.db.partition.properties.PartitionProperty; @@ -153,8 +155,6 @@ import org.polypheny.db.webui.Crud; import org.polypheny.db.webui.models.DbColumn; import org.polypheny.db.webui.models.Result; -import org.polypheny.db.webui.models.SortState; -import org.polypheny.db.webui.models.requests.QueryRequest; import static org.polypheny.db.util.Static.RESOURCE; @@ -2267,14 +2267,14 @@ public void transferTable( CatalogTable sourceTable, long targetSchemaId, Statem if ( assertEntityExists( targetSchemaId, sourceTable.name, true ) ) { return; } - NamespaceType sourceNamespaceType = catalog.getSchema(sourceTable.namespaceId).namespaceType; - NamespaceType targetNamespaceType = catalog.getSchema(targetSchemaId).namespaceType; + CatalogSchema sourceNamespace = catalog.getSchema(sourceTable.namespaceId); + CatalogSchema targetNamespace = catalog.getSchema(targetSchemaId); - if ( sourceNamespaceType == targetNamespaceType ) { + if ( sourceNamespace.getNamespaceType() == targetNamespace.getNamespaceType() ) { catalog.relocateTable(sourceTable, targetSchemaId); } - if ( sourceNamespaceType == NamespaceType.RELATIONAL && targetNamespaceType == NamespaceType.DOCUMENT ) { + if ( sourceNamespace.getNamespaceType() == NamespaceType.RELATIONAL && targetNamespace.getNamespaceType() == NamespaceType.DOCUMENT ) { List stores = sourceTable.dataPlacements .stream() .map(id -> (DataStore) AdapterManager.getInstance().getAdapter(id)) @@ -2290,6 +2290,47 @@ public void transferTable( CatalogTable sourceTable, long targetSchemaId, Statem statement.getQueryProcessor().resetCaches(); } + + if ( sourceNamespace.getNamespaceType() == NamespaceType.DOCUMENT && targetNamespace.getNamespaceType() == NamespaceType.RELATIONAL ) { + CatalogCollection sourceCollection = catalog.getCollection( sourceTable.id ); + List stores = sourceTable.dataPlacements + .stream() + .map(id -> (DataStore) AdapterManager.getInstance().getAdapter(id)) + .collect(Collectors.toList()); + PlacementType placementType = catalog.getDataPlacement(sourceTable.dataPlacements.get(0), sourceTable.id).placementType; + + + String query = String.format( "db.%s.find({})", sourceTable.name ); + QueryParameters parameters = new MqlQueryParameters( query, sourceNamespace.name, NamespaceType.DOCUMENT ); + AutomaticDdlProcessor mqlProcessor = (AutomaticDdlProcessor) statement.getTransaction().getProcessor( QueryLanguage.MONGO_QL ); + MqlNode parsed = (MqlNode) mqlProcessor.parse( query ).get( 0 ); + AlgRoot logicalRoot = mqlProcessor.translate( statement, parsed, parameters ); + PolyImplementation polyImplementation = statement.getQueryProcessor().prepareQuery( logicalRoot, true ); + Result result1 = getResult( QueryLanguage.MONGO_QL, statement, query, polyImplementation, statement.getTransaction(), false ); + + List fieldNames = new ArrayList(); + for ( String[] documents : result1.getData()) { + for ( String document : documents) { + + List fieldsInDocument = new ArrayList<>(JsonParser.parseString( document ).getAsJsonObject().keySet()); + fieldsInDocument.removeAll( fieldNames ); + fieldsInDocument.remove( "_id"); + fieldNames.addAll( fieldsInDocument ); + } + } + + ColumnTypeInformation typeInformation = new ColumnTypeInformation( PolyType.VARCHAR, PolyType.VARCHAR, 24, null, null, null, true ); + List fieldInformations = fieldNames + .stream() + .map( fieldName -> new FieldInformation( fieldName, typeInformation, Collation.getDefaultCollation(), null, fieldNames.indexOf( fieldName ) + 1 ) ) + .collect( Collectors.toList()); + + List constraintInformations = Collections.singletonList( new ConstraintInformation( "primary", ConstraintType.PRIMARY, Collections.singletonList( fieldNames.get( 0 ) ) ) ); + createTable( targetSchemaId, sourceTable.name, fieldInformations, constraintInformations, false, stores, placementType, statement); + + dropCollection( sourceCollection, statement ); + statement.getQueryProcessor().resetCaches(); + } } @NotNull From 9ee53c7c6a63601853baa5301acf36df4b7bb1ec Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Attila=20S=C3=BCle?= Date: Sun, 11 Dec 2022 12:23:53 +0100 Subject: [PATCH 19/26] document -> relational. a simplified migration is working --- .../java/org/polypheny/db/ddl/DdlManager.java | 2 +- .../polypheny/db/processing/DataMigrator.java | 6 ++- .../org/polypheny/db/ddl/DdlManagerImpl.java | 22 +++++--- .../db/processing/DataMigratorImpl.java | 50 +++++++++++++++++++ .../SqlAlterSchemaTransferTable.java | 6 +++ 5 files changed, 77 insertions(+), 9 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java index 4b952ae655..a8d30dfaec 100644 --- a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java +++ b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java @@ -451,7 +451,7 @@ public static DdlManager getInstance() { */ public abstract void createTable( long schemaId, String tableName, List columns, List constraints, boolean ifNotExists, List stores, PlacementType placementType, Statement statement ) throws EntityAlreadyExistsException, ColumnNotExistsException, UnknownPartitionTypeException, UnknownColumnException, PartitionGroupNamesNotUniqueException; - public abstract void transferTable( CatalogTable table, long targetSchemaId, Statement statement ) throws EntityAlreadyExistsException, DdlOnSourceException; + public abstract void transferTable( CatalogTable table, long targetSchemaId, Statement statement ) throws EntityAlreadyExistsException, DdlOnSourceException, UnknownTableException, UnknownColumnException; /** * Create a new view diff --git a/core/src/main/java/org/polypheny/db/processing/DataMigrator.java b/core/src/main/java/org/polypheny/db/processing/DataMigrator.java index 53bb778888..dd320af9dc 100644 --- a/core/src/main/java/org/polypheny/db/processing/DataMigrator.java +++ b/core/src/main/java/org/polypheny/db/processing/DataMigrator.java @@ -16,6 +16,7 @@ package org.polypheny.db.processing; +import com.google.gson.JsonObject; import java.util.List; import java.util.Map; import org.polypheny.db.algebra.AlgRoot; @@ -24,10 +25,10 @@ import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogGraphDatabase; import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.exceptions.UnknownColumnException; import org.polypheny.db.transaction.Statement; import org.polypheny.db.transaction.Transaction; - public interface DataMigrator { void copyData( @@ -89,4 +90,7 @@ void copyPartitionData( void copyGraphData( CatalogGraphDatabase graph, Transaction transaction, Integer existingAdapterId, CatalogAdapter adapter ); void copyRelationalDataToDocumentData(Transaction transaction , CatalogTable sourceTable, long targetSchemaId); + + void copyDocumentDataToRelationalData( Transaction transaction, List jsonObjects, CatalogTable table ) throws UnknownColumnException; + } diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 6513ce2a33..62b9b22997 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -19,6 +19,7 @@ import com.google.common.collect.ImmutableList; +import com.google.gson.JsonObject; import com.google.gson.JsonParser; import java.sql.ResultSetMetaData; import java.util.ArrayList; @@ -2262,7 +2263,7 @@ public void createTable( long schemaId, String name, List fiel } @Override - public void transferTable( CatalogTable sourceTable, long targetSchemaId, Statement statement) throws EntityAlreadyExistsException, DdlOnSourceException { + public void transferTable( CatalogTable sourceTable, long targetSchemaId, Statement statement) throws EntityAlreadyExistsException, DdlOnSourceException, UnknownTableException, UnknownColumnException { // Check if there is already an entity with this name if ( assertEntityExists( targetSchemaId, sourceTable.name, true ) ) { return; @@ -2306,28 +2307,35 @@ public void transferTable( CatalogTable sourceTable, long targetSchemaId, Statem MqlNode parsed = (MqlNode) mqlProcessor.parse( query ).get( 0 ); AlgRoot logicalRoot = mqlProcessor.translate( statement, parsed, parameters ); PolyImplementation polyImplementation = statement.getQueryProcessor().prepareQuery( logicalRoot, true ); - Result result1 = getResult( QueryLanguage.MONGO_QL, statement, query, polyImplementation, statement.getTransaction(), false ); + Result result = getResult( QueryLanguage.MONGO_QL, statement, query, polyImplementation, statement.getTransaction(), false ); List fieldNames = new ArrayList(); - for ( String[] documents : result1.getData()) { + List jsonObjects = new ArrayList(); + for ( String[] documents : result.getData()) { for ( String document : documents) { - - List fieldsInDocument = new ArrayList<>(JsonParser.parseString( document ).getAsJsonObject().keySet()); + JsonObject jsonObject = JsonParser.parseString( document ).getAsJsonObject(); + List fieldsInDocument = new ArrayList<>( jsonObject.keySet()); fieldsInDocument.removeAll( fieldNames ); fieldsInDocument.remove( "_id"); fieldNames.addAll( fieldsInDocument ); + jsonObjects.add( jsonObject ); } } - ColumnTypeInformation typeInformation = new ColumnTypeInformation( PolyType.VARCHAR, PolyType.VARCHAR, 24, null, null, null, true ); + ColumnTypeInformation typeInformation = new ColumnTypeInformation( PolyType.VARCHAR, PolyType.VARCHAR, 24, null, null, null, false ); + List fieldInformations = fieldNames .stream() - .map( fieldName -> new FieldInformation( fieldName, typeInformation, Collation.getDefaultCollation(), null, fieldNames.indexOf( fieldName ) + 1 ) ) + .map( fieldName -> new FieldInformation( fieldName, typeInformation, Collation.getDefaultCollation(), "", fieldNames.indexOf( fieldName ) + 1 ) ) .collect( Collectors.toList()); List constraintInformations = Collections.singletonList( new ConstraintInformation( "primary", ConstraintType.PRIMARY, Collections.singletonList( fieldNames.get( 0 ) ) ) ); createTable( targetSchemaId, sourceTable.name, fieldInformations, constraintInformations, false, stores, placementType, statement); + // Migrator + DataMigrator dataMigrator = statement.getTransaction().getDataMigrator(); + dataMigrator.copyDocumentDataToRelationalData( statement.getTransaction(), jsonObjects, catalog.getTable( targetSchemaId, sourceTable.name ) ); + dropCollection( sourceCollection, statement ); statement.getQueryProcessor().resetCaches(); } diff --git a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java index 3aa88b4504..2cb24f9230 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java +++ b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java @@ -18,6 +18,8 @@ import com.google.common.collect.ImmutableList; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; import java.util.*; import java.util.Map.Entry; import java.util.stream.Collectors; @@ -26,6 +28,7 @@ import org.apache.calcite.linq4j.Enumerable; import org.jetbrains.annotations.NotNull; import org.polypheny.db.PolyImplementation; +import org.polypheny.db.adapter.DataStore; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgRoot; import org.polypheny.db.algebra.AlgStructuredTypeFlattener; @@ -48,6 +51,7 @@ import org.polypheny.db.catalog.entity.CatalogGraphDatabase; import org.polypheny.db.catalog.entity.CatalogPrimaryKey; import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.exceptions.UnknownColumnException; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.languages.QueryParameters; import org.polypheny.db.languages.mql.MqlNode; @@ -237,6 +241,52 @@ public void copyRelationalDataToDocumentData( Transaction transaction, CatalogTa } + @Override + public void copyDocumentDataToRelationalData( Transaction transaction, List jsonObjects, CatalogTable targetTable ) throws UnknownColumnException { + final AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); + Catalog catalog = Catalog.getInstance(); + Map> columnValues = new HashMap<>(); + for ( JsonObject jsonObject : jsonObjects) { + for ( String columnName : targetTable.getColumnNames() ) { + CatalogColumn column = catalog.getColumn( targetTable.id, columnName ); + if ( !columnValues.containsKey( column ) ) { + columnValues.put( column, new LinkedList<>() ); + } + JsonElement jsonElement = jsonObject.get( columnName ); + if (jsonElement != null) { + columnValues.get( column ).add( jsonElement.getAsString() ); + } else { + columnValues.get( column ).add( null ); + } + } + } + + List targetColumnPlacements = new LinkedList<>(); + Statement targetStatement = transaction.createStatement(); + AlgRoot targetAlg; + for ( Entry> entry : columnValues.entrySet() ) { + CatalogColumn targetColumn = catalog.getColumn( targetTable.id, entry.getKey().name ); + targetStatement.getDataContext().addParameterValues(targetColumn.id, targetColumn.getAlgDataType( typeFactory ) , entry.getValue() ); + List stores = RoutingManager.getInstance().getCreatePlacementStrategy().getDataStoresForNewColumn( targetColumn ); + for ( DataStore store : stores ) { + targetColumnPlacements.add( Catalog.getInstance().getColumnPlacement( store.getAdapterId(), targetColumn.id ) ); + } + } + + targetAlg = buildInsertStatement( targetStatement, targetColumnPlacements, targetTable.partitionProperty.partitionIds.get( 0 ) ); + Iterator iterator = targetStatement.getQueryProcessor() + .prepareQuery( targetAlg, targetAlg.validatedRowType, true, false, false ) + .enumerable( targetStatement.getDataContext() ) + .iterator(); + //noinspection WhileLoopReplaceableByForEach + while ( iterator.hasNext() ) { + iterator.next(); + } + + targetStatement.getDataContext().resetParameterValues(); + } + + @NotNull private static LogicalLpgValues getLogicalLpgValues( AlgBuilder builder, PolyGraph graph ) { List fields = new ArrayList<>(); diff --git a/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterschema/SqlAlterSchemaTransferTable.java b/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterschema/SqlAlterSchemaTransferTable.java index 5e8194cf3b..3e97cf8c4a 100644 --- a/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterschema/SqlAlterSchemaTransferTable.java +++ b/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterschema/SqlAlterSchemaTransferTable.java @@ -20,7 +20,9 @@ import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.catalog.exceptions.EntityAlreadyExistsException; +import org.polypheny.db.catalog.exceptions.UnknownColumnException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; +import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.exception.DdlOnSourceException; @@ -99,6 +101,10 @@ public void execute( Context context, Statement statement, QueryParameters param throw CoreUtil.newContextException( table.getPos(), RESOURCE.tableExists( table.names.get( 1 ) ) ); } catch ( DdlOnSourceException e ) { throw CoreUtil.newContextException( table.getPos(), RESOURCE.ddlOnSourceTable() ); + } catch ( UnknownTableException e ) { + throw CoreUtil.newContextException( table.getPos(), RESOURCE.tableNotFound( e.getTableName()) ); + } catch ( UnknownColumnException e ) { + throw CoreUtil.newContextException( table.getPos(), RESOURCE.columnNotFound( e.getColumnName() )); } } From 5de662cbe3bd1216e1e262230b7e756760771613 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Attila=20S=C3=BCle?= Date: Tue, 13 Dec 2022 19:18:31 +0100 Subject: [PATCH 20/26] document -> relational -> document is working --- .../main/java/org/polypheny/db/ddl/DdlManager.java | 2 +- .../java/org/polypheny/db/ddl/DdlManagerImpl.java | 7 +++---- .../src/main/codegen/includes/parserImpls.ftl | 9 ++++++++- .../alterschema/SqlAlterSchemaTransferTable.java | 14 ++++++++++++-- .../src/main/java/org/polypheny/db/webui/Crud.java | 11 ++++++++++- .../db/webui/models/TransferTableRequest.java | 1 + 6 files changed, 35 insertions(+), 9 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java index a8d30dfaec..8832707b2e 100644 --- a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java +++ b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java @@ -451,7 +451,7 @@ public static DdlManager getInstance() { */ public abstract void createTable( long schemaId, String tableName, List columns, List constraints, boolean ifNotExists, List stores, PlacementType placementType, Statement statement ) throws EntityAlreadyExistsException, ColumnNotExistsException, UnknownPartitionTypeException, UnknownColumnException, PartitionGroupNamesNotUniqueException; - public abstract void transferTable( CatalogTable table, long targetSchemaId, Statement statement ) throws EntityAlreadyExistsException, DdlOnSourceException, UnknownTableException, UnknownColumnException; + public abstract void transferTable( CatalogTable table, long targetSchemaId, Statement statement, List primaryKeyColumnNames ) throws EntityAlreadyExistsException, DdlOnSourceException, UnknownTableException, UnknownColumnException; /** * Create a new view diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 62b9b22997..52487ae8b0 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -130,7 +130,6 @@ import org.polypheny.db.languages.mql.MqlQueryParameters; import org.polypheny.db.monitoring.events.DdlEvent; import org.polypheny.db.monitoring.events.StatementEvent; -import org.polypheny.db.nodes.DataTypeSpec; import org.polypheny.db.partition.PartitionManager; import org.polypheny.db.partition.PartitionManagerFactory; import org.polypheny.db.partition.properties.PartitionProperty; @@ -2263,7 +2262,7 @@ public void createTable( long schemaId, String name, List fiel } @Override - public void transferTable( CatalogTable sourceTable, long targetSchemaId, Statement statement) throws EntityAlreadyExistsException, DdlOnSourceException, UnknownTableException, UnknownColumnException { + public void transferTable( CatalogTable sourceTable, long targetSchemaId, Statement statement, List primaryKeyColumnNames ) throws EntityAlreadyExistsException, DdlOnSourceException, UnknownTableException, UnknownColumnException { // Check if there is already an entity with this name if ( assertEntityExists( targetSchemaId, sourceTable.name, true ) ) { return; @@ -2326,10 +2325,10 @@ public void transferTable( CatalogTable sourceTable, long targetSchemaId, Statem List fieldInformations = fieldNames .stream() - .map( fieldName -> new FieldInformation( fieldName, typeInformation, Collation.getDefaultCollation(), "", fieldNames.indexOf( fieldName ) + 1 ) ) + .map( fieldName -> new FieldInformation( fieldName, typeInformation, Collation.getDefaultCollation(), null, fieldNames.indexOf( fieldName ) + 1 ) ) .collect( Collectors.toList()); - List constraintInformations = Collections.singletonList( new ConstraintInformation( "primary", ConstraintType.PRIMARY, Collections.singletonList( fieldNames.get( 0 ) ) ) ); + List constraintInformations = Collections.singletonList( new ConstraintInformation( "primary", ConstraintType.PRIMARY, primaryKeyColumnNames ) ); createTable( targetSchemaId, sourceTable.name, fieldInformations, constraintInformations, false, stores, placementType, statement); // Migrator diff --git a/sql-language/src/main/codegen/includes/parserImpls.ftl b/sql-language/src/main/codegen/includes/parserImpls.ftl index bc2a403ded..15f2982169 100644 --- a/sql-language/src/main/codegen/includes/parserImpls.ftl +++ b/sql-language/src/main/codegen/includes/parserImpls.ftl @@ -43,6 +43,7 @@ SqlAlterSchema SqlAlterSchema(Span s) : final SqlIdentifier schema; final SqlIdentifier name; final SqlIdentifier owner; + final SqlNodeList columnList; } { @@ -50,8 +51,14 @@ SqlAlterSchema SqlAlterSchema(Span s) : ( name = CompoundIdentifier() + ( + + columnList = ParenthesizedSimpleIdentifierList() + | + { columnList = null; } + ) { - return new SqlAlterSchemaTransferTable(s.end(this), name, schema); + return new SqlAlterSchemaTransferTable(s.end(this), name, schema, columnList); } | diff --git a/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterschema/SqlAlterSchemaTransferTable.java b/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterschema/SqlAlterSchemaTransferTable.java index 3e97cf8c4a..df5ce37752 100644 --- a/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterschema/SqlAlterSchemaTransferTable.java +++ b/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterschema/SqlAlterSchemaTransferTable.java @@ -17,6 +17,8 @@ package org.polypheny.db.sql.language.ddl.alterschema; +import java.util.Optional; +import java.util.stream.Collectors; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.catalog.exceptions.EntityAlreadyExistsException; @@ -32,6 +34,7 @@ import org.polypheny.db.prepare.Context; import org.polypheny.db.sql.language.SqlIdentifier; import org.polypheny.db.sql.language.SqlNode; +import org.polypheny.db.sql.language.SqlNodeList; import org.polypheny.db.sql.language.SqlWriter; import org.polypheny.db.sql.language.ddl.SqlAlterSchema; import org.polypheny.db.transaction.Statement; @@ -51,15 +54,17 @@ public class SqlAlterSchemaTransferTable extends SqlAlterSchema { private final SqlIdentifier table; private final SqlIdentifier targetSchema; + private final SqlNodeList primaryKeyColumns; /** * Creates a SqlAlterSchemaOwner. */ - public SqlAlterSchemaTransferTable(ParserPos pos, SqlIdentifier table, SqlIdentifier targetSchema) { + public SqlAlterSchemaTransferTable(ParserPos pos, SqlIdentifier table, SqlIdentifier targetSchema, SqlNodeList primaryKeyColumns ) { super( pos ); this.table = Objects.requireNonNull(table); this.targetSchema = Objects.requireNonNull(targetSchema); + this.primaryKeyColumns = primaryKeyColumns; } @@ -93,7 +98,12 @@ public void execute( Context context, Statement statement, QueryParameters param CatalogTable catalogTable = getCatalogTable( context, table); long targetSchemaId = catalog.getSchema( context.getDatabaseId(), targetSchema.getNames().get(0) ).id; - DdlManager.getInstance().transferTable( catalogTable, targetSchemaId, statement ); + + List primaryKeyColumnNames = (primaryKeyColumns != null) + ? primaryKeyColumns.getList().stream().map( Node::toString ).collect( Collectors.toList() ) + : null; + + DdlManager.getInstance().transferTable( catalogTable, targetSchemaId, statement, primaryKeyColumnNames ); } catch ( UnknownSchemaException e ) { throw CoreUtil.newContextException( table.getPos(), RESOURCE.schemaNotFound( table.getSimple() ) ); diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index ee021b00ac..5d0cc11830 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -620,7 +620,16 @@ void transferTable(final Context ctx ) { StringBuilder query = new StringBuilder(); String targetSchemaId = String.format( "\"%s\"", request.targetSchema ); String tableId = String.format( "\"%s\".\"%s\"", request.sourceSchema, request.table ); - query.append( "ALTER SCHEMA " ).append( targetSchemaId ).append( " TRANSFER " ).append(tableId); + query + .append( "ALTER SCHEMA " ) + .append( targetSchemaId ) + .append( " TRANSFER " ) + .append(tableId); + if( request.primaryKeyNames != null && !request.primaryKeyNames.isBlank() ) { + query + .append( " ADD PRIMARY KEY " ) + .append(request.primaryKeyNames); + } Result result; try { diff --git a/webui/src/main/java/org/polypheny/db/webui/models/TransferTableRequest.java b/webui/src/main/java/org/polypheny/db/webui/models/TransferTableRequest.java index 9e7533acec..2affae415f 100644 --- a/webui/src/main/java/org/polypheny/db/webui/models/TransferTableRequest.java +++ b/webui/src/main/java/org/polypheny/db/webui/models/TransferTableRequest.java @@ -22,5 +22,6 @@ public class TransferTableRequest { public String table; public String sourceSchema; public String targetSchema; + public String primaryKeyNames; } \ No newline at end of file From 38547d4b698345a6a62688a55375302f7968461a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Attila=20S=C3=BCle?= Date: Tue, 13 Dec 2022 19:24:50 +0100 Subject: [PATCH 21/26] unused method removed --- .../org/polypheny/db/ddl/DdlManagerImpl.java | 27 ------------------- 1 file changed, 27 deletions(-) diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 52487ae8b0..537ea2e6ce 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -3285,33 +3285,6 @@ private void prepareMonitoring( Statement statement, Kind kind, CatalogTable cat } } - protected CatalogTable getCatalogTable( Context context, SqlIdentifier tableName ) { - CatalogTable catalogTable; - try { - long schemaId; - String tableOldName; - Catalog catalog = Catalog.getInstance(); - if ( tableName.names.size() == 3 ) { // DatabaseName.SchemaName.TableName - schemaId = catalog.getSchema( tableName.names.get( 0 ), tableName.names.get( 1 ) ).id; - tableOldName = tableName.names.get( 2 ); - } else if ( tableName.names.size() == 2 ) { // SchemaName.TableName - schemaId = catalog.getSchema( context.getDatabaseId(), tableName.names.get( 0 ) ).id; - tableOldName = tableName.names.get( 1 ); - } else { // TableName - schemaId = catalog.getSchema( context.getDatabaseId(), context.getDefaultSchemaName() ).id; - tableOldName = tableName.names.get( 0 ); - } - catalogTable = catalog.getTable( schemaId, tableOldName ); - } catch ( UnknownDatabaseException e ) { - throw CoreUtil.newContextException( tableName.getPos(), RESOURCE.databaseNotFound( tableName.toString() ) ); - } catch ( UnknownSchemaException e ) { - throw CoreUtil.newContextException( tableName.getPos(), RESOURCE.schemaNotFound( tableName.toString() ) ); - } catch ( UnknownTableException e ) { - throw CoreUtil.newContextException( tableName.getPos(), RESOURCE.tableNotFound( tableName.toString() ) ); - } - return catalogTable; - } - @Override public void dropFunction() { throw new RuntimeException( "Not supported yet" ); From 52505c0efad8a92a8b05dd6e02d88dd5a60d6a80 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Attila=20S=C3=BCle?= Date: Wed, 14 Dec 2022 18:42:56 +0100 Subject: [PATCH 22/26] little refacftor --- .../java/org/polypheny/db/ddl/DdlManager.java | 2 +- .../polypheny/db/processing/DataMigrator.java | 8 ++--- .../org/polypheny/db/ddl/DdlManagerImpl.java | 27 +++++++------- .../db/processing/DataMigratorImpl.java | 35 ++++++++++--------- .../altertable/SqlAlterTableMergeColumns.java | 9 ++--- .../java/org/polypheny/db/webui/Crud.java | 17 +++++---- 6 files changed, 52 insertions(+), 46 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java index 55bcfba812..52c70af66d 100644 --- a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java +++ b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java @@ -205,7 +205,7 @@ public static DdlManager getInstance() { * @param defaultValue the new default value of the merged column * @param statement the initial query statement */ - public abstract void mergeColumns(CatalogTable catalogTable, List sourceColumnNames, String newColumnName, String joinString, ColumnTypeInformation type, boolean nullable, String defaultValue, Statement statement ) throws UnknownColumnException, ColumnAlreadyExistsException, ColumnNotExistsException; + public abstract void mergeColumns( CatalogTable catalogTable, List sourceColumnNames, String newColumnName, String joinString, ColumnTypeInformation type, boolean nullable, String defaultValue, Statement statement ) throws UnknownColumnException, ColumnAlreadyExistsException, ColumnNotExistsException; /** * Adds an index to a table diff --git a/core/src/main/java/org/polypheny/db/processing/DataMigrator.java b/core/src/main/java/org/polypheny/db/processing/DataMigrator.java index fdb4981582..a7fabb0e2b 100644 --- a/core/src/main/java/org/polypheny/db/processing/DataMigrator.java +++ b/core/src/main/java/org/polypheny/db/processing/DataMigrator.java @@ -77,16 +77,16 @@ void copyPartitionData( List targetPartitionIds ); /** - * Currently used to transfer data if unpartitioned is about to be partitioned. - * For Table Merge use {@link #copySelectiveData(Transaction, CatalogAdapter, CatalogTable, CatalogTable, List, Map, List)} } instead + * Used to merge columns in a relational table. The values of the source columns will be selected, + * concatenated and inserted into the target column. * * @param transaction Transactional scope * @param store Target Store where data should be migrated to * @param sourceColumns Columns to be merged * @param targetColumn New column to be added - * @param joinString + * @param joinString String delimiter between the values to be merged */ - void mergeColumns(Transaction transaction, CatalogAdapter store, List sourceColumns, CatalogColumn targetColumn, String joinString); + void mergeColumns( Transaction transaction, CatalogAdapter store, List sourceColumns, CatalogColumn targetColumn, String joinString ); AlgRoot buildInsertStatement( Statement statement, List to, long partitionId ); diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 729bfbe24c..005e8fe255 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -28,6 +28,7 @@ import java.util.Map; import java.util.Set; import java.util.stream.Collectors; +import java.util.stream.IntStream; import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; @@ -597,13 +598,13 @@ public void addForeignKey( CatalogTable catalogTable, CatalogTable refTable, Lis catalog.addForeignKey( catalogTable.id, columnIds, refTable.id, referencesIds, constraintName, onUpdate, onDelete ); } - public void mergeColumns(CatalogTable catalogTable, List sourceColumnNames, String newColumnName, String joinString, ColumnTypeInformation type, boolean nullable, String defaultValue, Statement statement ) throws UnknownColumnException, ColumnAlreadyExistsException, ColumnNotExistsException { - if ( catalog.checkIfExistsColumn( catalogTable.id, newColumnName) ) { - throw new ColumnAlreadyExistsException(newColumnName, catalogTable.name ); + public void mergeColumns( CatalogTable catalogTable, List sourceColumnNames, String newColumnName, String joinString, ColumnTypeInformation type, boolean nullable, String defaultValue, Statement statement ) throws UnknownColumnException, ColumnAlreadyExistsException, ColumnNotExistsException { + if ( catalog.checkIfExistsColumn( catalogTable.id, newColumnName ) ) { + throw new ColumnAlreadyExistsException( newColumnName, catalogTable.name ); } - CatalogColumn afterColumn = getCatalogColumn( catalogTable.id, sourceColumnNames.get( sourceColumnNames.size()-1 ) ); + CatalogColumn afterColumn = getCatalogColumn( catalogTable.id, sourceColumnNames.get( sourceColumnNames.size() - 1 ) ); int position = updateAdjacentPositions( catalogTable, null, afterColumn ); long columnId = catalog.addColumn( @@ -622,7 +623,7 @@ public void mergeColumns(CatalogTable catalogTable, List sourceColumnNam // Add default value addDefaultValue( defaultValue, columnId ); - CatalogColumn addedColumn = catalog.getColumn ( columnId ); + CatalogColumn addedColumn = catalog.getColumn( columnId ); // Remove quotes from joinString if ( joinString.startsWith( "'" ) ) { @@ -638,7 +639,7 @@ public void mergeColumns(CatalogTable catalogTable, List sourceColumnNam for ( String columnName : sourceColumnNames ) { sourceCatalogColumns.add( catalog.getColumn( catalogTable.id, columnName ) ); } - CatalogColumn targetCatalogColumn = catalog.getColumn( catalogTable.id, newColumnName); + CatalogColumn targetCatalogColumn = catalog.getColumn( catalogTable.id, newColumnName ); // Add column on underlying data stores and insert default value for ( DataStore store : stores ) { @@ -646,13 +647,13 @@ public void mergeColumns(CatalogTable catalogTable, List sourceColumnNam store.getAdapterId(), addedColumn.id, PlacementType.AUTOMATIC, - null, // Will be set later - null, // Will be set later - null // Will be set later - );//Not a valid partitionID --> placeholder + null, + null, + null + ); AdapterManager.getInstance().getStore( store.getAdapterId() ).addColumn( statement.getPrepareContext(), catalogTable, addedColumn ); // Call migrator - dataMigrator.mergeColumns( statement.getTransaction(), catalog.getAdapter( store.getAdapterId() ), sourceCatalogColumns, targetCatalogColumn, joinString); + dataMigrator.mergeColumns( statement.getTransaction(), catalog.getAdapter( store.getAdapterId() ), sourceCatalogColumns, targetCatalogColumn, joinString ); for ( CatalogColumn sourceCatalogColumn : sourceCatalogColumns ) { // Delete column from underlying data stores @@ -668,9 +669,7 @@ public void mergeColumns(CatalogTable catalogTable, List sourceColumnNam catalog.deleteColumn( sourceCatalogColumn.id ); if ( sourceCatalogColumn.position != columns.size() ) { // Update position of the other columns - for ( int i = sourceCatalogColumn.position; i < columns.size(); i++ ) { - catalog.setColumnPosition( columns.get( i ).id, i ); - } + IntStream.range( sourceCatalogColumn.position, columns.size() ).forEach( i -> catalog.setColumnPosition( columns.get( i ).id, i ) ); } } } diff --git a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java index d5c60829a1..932d1c11fb 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java +++ b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java @@ -304,7 +304,7 @@ public void executeQuery( List selectColumnList, AlgRoot sourceAl @Override - public void executeMergeQuery(List primaryKeyColumns, List sourceColumns, CatalogColumn targetColumn, String joinString, AlgRoot sourceAlg, Statement sourceStatement, Statement targetStatement, AlgRoot targetAlg, boolean isMaterializedView, boolean doesSubstituteOrderBy ) { + public void executeMergeQuery( List primaryKeyColumns, List sourceColumns, CatalogColumn targetColumn, String joinString, AlgRoot sourceAlg, Statement sourceStatement, Statement targetStatement, AlgRoot targetAlg, boolean isMaterializedView, boolean doesSubstituteOrderBy ) { try { PolyImplementation result; if ( isMaterializedView ) { @@ -326,6 +326,7 @@ public void executeMergeQuery(List primaryKeyColumns, List sourceIterator = enumerable.iterator(); + // Get the mappings of the source columns from the Catalog Map sourceColMapping = new LinkedHashMap<>(); for ( CatalogColumn catalogColumn : sourceColumns ) { int i = 0; @@ -352,6 +353,7 @@ public void executeMergeQuery(List primaryKeyColumns, List> rows = MetaImpl.collect( result.getCursorFactory(), LimitIterator.of( sourceIterator, batchSize ), new ArrayList<>() ); Map> values = new LinkedHashMap<>(); + // Read the values of the source columns from all rows for ( List list : rows ) { for ( Map.Entry entry : sourceColMapping.entrySet() ) { if ( !values.containsKey( entry.getKey() ) ) { @@ -370,27 +372,28 @@ public void executeMergeQuery(List primaryKeyColumns, List mergedValueList = null; for ( Map.Entry> v : values.entrySet() ) { - if ( !primaryKeyColumns.stream().map(c -> c.id).collect(Collectors.toList()).contains( v.getKey() ) ) { - if( mergedValueList == null ) { + if ( !primaryKeyColumns.stream().map( c -> c.id ).collect( Collectors.toList() ).contains( v.getKey() ) ) { + if ( mergedValueList == null ) { mergedValueList = v.getValue(); } else { int j = 0; - for (Object value : mergedValueList) { - mergedValueList.set(j, ((String) value).concat(joinString + v.getValue().get(j++))); + for ( Object value : mergedValueList ) { + mergedValueList.set( j, ((String) value).concat( joinString + v.getValue().get( j++ ) ) ); } } } } - targetStatement.getDataContext().addParameterValues(targetColumn.id, targetColumn.getAlgDataType( typeFactory ) , mergedValueList ); + targetStatement.getDataContext().addParameterValues( targetColumn.id, targetColumn.getAlgDataType( typeFactory ), mergedValueList ); + // Select the PK columns for the target statement for ( CatalogColumn primaryKey : primaryKeyColumns ) { AlgDataType primaryKeyAlgDataType = primaryKey.getAlgDataType( typeFactory ); List primaryKeyValues = values.get( primaryKey.id ); - targetStatement.getDataContext().addParameterValues(primaryKey.id, primaryKeyAlgDataType , primaryKeyValues ); + targetStatement.getDataContext().addParameterValues( primaryKey.id, primaryKeyAlgDataType, primaryKeyValues ); } Iterator iterator = targetStatement.getQueryProcessor() @@ -409,7 +412,6 @@ public void executeMergeQuery(List primaryKeyColumns, List to, long partitionId ) { List qualifiedTableName = ImmutableList.of( @@ -872,12 +874,12 @@ public void copyPartitionData( Transaction transaction, CatalogAdapter store, Ca @Override - public void mergeColumns(Transaction transaction, CatalogAdapter store, List sourceColumns, CatalogColumn targetColumn, String joinString) { + public void mergeColumns( Transaction transaction, CatalogAdapter store, List sourceColumns, CatalogColumn targetColumn, String joinString ) { CatalogTable table = Catalog.getInstance().getTable( sourceColumns.get( 0 ).tableId ); CatalogPrimaryKey primaryKey = Catalog.getInstance().getPrimaryKey( table.primaryKey ); List selectColumnList = new LinkedList<>( sourceColumns ); - List primaryKeyList = new LinkedList<>( ); + List primaryKeyList = new LinkedList<>(); // Add primary keys to select column list for ( long cid : primaryKey.columnIds ) { @@ -888,19 +890,20 @@ public void mergeColumns(Transaction transaction, CatalogAdapter store, List> sourceColumnPlacements = new HashMap<>(); sourceColumnPlacements.put( table.partitionProperty.partitionIds.get( 0 ), - selectSourcePlacements( table, selectColumnList, -1) ); + selectSourcePlacements( table, selectColumnList, -1 ) ); + // Get the placement of the newly added target column CatalogColumnPlacement targetColumnPlacement = Catalog.getInstance().getColumnPlacement( store.id, targetColumn.id ); + Map> subDistribution = new HashMap<>( sourceColumnPlacements ); + subDistribution.keySet().retainAll( Arrays.asList( table.partitionProperty.partitionIds.get( 0 ) ) ); + // Initialize statements for the reading and inserting Statement sourceStatement = transaction.createStatement(); Statement targetStatement = transaction.createStatement(); - - Map> subDistribution = new HashMap<>( sourceColumnPlacements ); - subDistribution.keySet().retainAll( Arrays.asList( table.partitionProperty.partitionIds.get( 0 ) ) ); - AlgRoot sourceAlg = getSourceIterator( sourceStatement, subDistribution ); AlgRoot targetAlg = buildUpdateStatement( targetStatement, Collections.singletonList( targetColumnPlacement ), table.partitionProperty.partitionIds.get( 0 ) ); diff --git a/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableMergeColumns.java b/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableMergeColumns.java index a67c976aa9..d1baa9e612 100644 --- a/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableMergeColumns.java +++ b/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableMergeColumns.java @@ -55,6 +55,7 @@ public class SqlAlterTableMergeColumns extends SqlAlterTable { private final boolean nullable; private final SqlNode defaultValue; + public SqlAlterTableMergeColumns( ParserPos pos, SqlIdentifier table, @@ -63,7 +64,7 @@ public SqlAlterTableMergeColumns( SqlNode joinString, SqlDataTypeSpec type, boolean nullable, - SqlNode defaultValue) { + SqlNode defaultValue ) { super( pos ); this.table = Objects.requireNonNull( table ); this.columnsToMerge = columnsToMerge; @@ -88,7 +89,7 @@ public List getSqlOperandList() { @Override - public void unparse(SqlWriter writer, int leftPrec, int rightPrec ) { + public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { writer.keyword( "ALTER" ); writer.keyword( "TABLE" ); table.unparse( writer, leftPrec, rightPrec ); @@ -127,13 +128,13 @@ public void execute( Context context, Statement statement, QueryParameters param try { DdlManager.getInstance().mergeColumns( catalogTable, - columnsToMerge.getList().stream().map( Node::toString ).collect( Collectors.toList()), + columnsToMerge.getList().stream().map( Node::toString ).collect( Collectors.toList() ), targetColumnName.getSimple(), joinString, ColumnTypeInformation.fromDataTypeSpec( type ), nullable, defaultValue, - statement); + statement ); } catch ( UnknownColumnException e ) { throw CoreUtil.newContextException( columnsToMerge.getPos(), RESOURCE.columnNotFound( e.getColumnName() ) ); } catch ( ColumnAlreadyExistsException e ) { diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index 71ad0c297f..dc7a12feb8 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -1745,6 +1745,7 @@ void addColumn( final Context ctx ) { ctx.json( result ); } + /** * Delete a column of a table */ @@ -1773,8 +1774,9 @@ void dropColumn( final Context ctx ) { ctx.json( result ); } + /** - * Add a column to an existing table + * Merge multiple columns of table */ void mergeColumns( final Context ctx ) { MergeColumnsRequest request = ctx.bodyAsClass( MergeColumnsRequest.class ); @@ -1785,7 +1787,7 @@ void mergeColumns( final Context ctx ) { boolean nullable = Arrays.stream( request.sourceColumns ).allMatch( c -> c.nullable ); Integer precison = Arrays.stream( request.sourceColumns ).mapToInt( c -> c.precision ).sum(); - DbColumn newColumn = new DbColumn(request.targetColumnName, "varchar", nullable, precison, null, null); + DbColumn newColumn = new DbColumn( request.targetColumnName, "varchar", nullable, precison, null, null ); newColumn.collectionsType = ""; String as = ""; @@ -1798,14 +1800,14 @@ void mergeColumns( final Context ctx ) { String listOfColumnsToMerge = Arrays.stream( request.sourceColumns ) - .map( s -> "\"" + s.name + "\"") - .collect( Collectors.joining(", ")); + .map( s -> "\"" + s.name + "\"" ) + .collect( Collectors.joining( ", " ) ); String query = String.format( "ALTER TABLE %s MERGE COLUMNS (%s) INTO \"%s\" WITH '%s' %s %s", tableId, listOfColumnsToMerge, newColumn.name, request.joinString, as, dataType ); //we don't want precision, scale etc. for source columns if ( newColumn.as == null ) { - if (newColumn.precision != null ) { + if ( newColumn.precision != null ) { query = query + "(" + newColumn.precision; if ( newColumn.scale != null ) { query = query + "," + newColumn.scale; @@ -1826,8 +1828,8 @@ void mergeColumns( final Context ctx ) { String defaultValue = Arrays .stream( request.sourceColumns ) .map( c -> c.defaultValue ) - .filter(s -> s != null && !s.isEmpty()) - .collect( Collectors.joining(request.joinString)); + .filter( s -> s != null && !s.isEmpty() ) + .collect( Collectors.joining( request.joinString ) ); if ( defaultValue != null && !defaultValue.equals( "" ) ) { query = query + " DEFAULT " + String.format( "'%s'", defaultValue ); @@ -1850,6 +1852,7 @@ void mergeColumns( final Context ctx ) { ctx.json( result ); } + /** * Get artificially generated index/foreign key/constraint names for placeholders in the UI */ From baa2856386a877782c23cffc4c58a33ce154ab0d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Attila=20S=C3=BCle?= Date: Wed, 14 Dec 2022 20:37:03 +0100 Subject: [PATCH 23/26] A bigger amount of refactors --- .../org/polypheny/db/catalog/CatalogImpl.java | 40 ++++++---- .../org/polypheny/db/catalog/Catalog.java | 9 ++- .../java/org/polypheny/db/ddl/DdlManager.java | 9 +++ .../polypheny/db/processing/DataMigrator.java | 16 +++- .../org/polypheny/db/catalog/MockCatalog.java | 3 +- .../org/polypheny/db/ddl/DdlManagerImpl.java | 75 +++++++++++-------- .../db/processing/DataMigratorImpl.java | 53 ++++++++----- .../SqlAlterSchemaTransferTable.java | 24 +++--- 8 files changed, 152 insertions(+), 77 deletions(-) diff --git a/catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java b/catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java index b094c63947..91bbc89309 100644 --- a/catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java +++ b/catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java @@ -1893,34 +1893,44 @@ public long addTable( String name, long namespaceId, int ownerId, EntityType ent return id; } - @Override - public long relocateTable(CatalogTable sourceTable, long targetNamespaceId ) { + /** + * {@inheritDoc} + */ + @Override + public long relocateTable( CatalogTable sourceTable, long targetNamespaceId ) { + // Clone the source table by changing the ID of the parent namespace CatalogTable targetTable = transferCatalogTable( sourceTable, targetNamespaceId ); synchronized ( this ) { + // Build the new immutable list for the source namespace by removing the table to transfer ImmutableList reducedSourceSchemaChildren = ImmutableList .copyOf( Collections2.filter( schemaChildren.get( sourceTable.namespaceId ), Predicates.not( Predicates.equalTo( sourceTable.id ) ) ) ); + // Build the new immutable list for the target namespace by adding the table to transfer ImmutableList extendedTargetSchemaChildren = new ImmutableList.Builder() - .addAll( schemaChildren.get(targetNamespaceId ) ) + .addAll( schemaChildren.get( targetNamespaceId ) ) .add( targetTable.id ) .build(); + // Replace the immutable list for both the source and target namespaces schemaChildren.replace( sourceTable.namespaceId, reducedSourceSchemaChildren ); schemaChildren.replace( targetNamespaceId, extendedTargetSchemaChildren ); + // Replace the tables' trees with the cloned table tables.replace( sourceTable.id, targetTable ); tableNames.remove( new Object[]{ sourceTable.databaseId, sourceTable.namespaceId, sourceTable.name } ); tableNames.put( new Object[]{ targetTable.databaseId, targetNamespaceId, targetTable.name }, targetTable ); - for( Long fieldId: sourceTable.fieldIds ) { + // Replace the trees of the tables' columns with cloned columns + for ( Long fieldId : sourceTable.fieldIds ) { CatalogColumn targetCatalogColumn = transferCatalogColumn( targetNamespaceId, columns.get( fieldId ) ); columns.replace( fieldId, targetCatalogColumn ); - columnNames.remove( new Object[]{sourceTable.databaseId, sourceTable.namespaceId, sourceTable.id, targetCatalogColumn.name } ); - columnNames.put( new Object[]{sourceTable.databaseId, targetNamespaceId, sourceTable.id, targetCatalogColumn.name }, targetCatalogColumn ); + columnNames.remove( new Object[]{ sourceTable.databaseId, sourceTable.namespaceId, sourceTable.id, targetCatalogColumn.name } ); + columnNames.put( new Object[]{ sourceTable.databaseId, targetNamespaceId, sourceTable.id, targetCatalogColumn.name }, targetCatalogColumn ); } - if( getSchema(sourceTable.namespaceId).namespaceType == NamespaceType.DOCUMENT ) { + // When transferring between document-based namespaces, also replace the collection trees. + if ( getSchema( sourceTable.namespaceId ).namespaceType == NamespaceType.DOCUMENT ) { CatalogCollection targetCollection = transferCatalogCollection( collections.get( sourceTable.id ), targetNamespaceId ); collections.replace( sourceTable.id, targetCollection ); collectionNames.remove( new Object[]{ sourceTable.databaseId, sourceTable.namespaceId, sourceTable.name } ); @@ -1932,6 +1942,7 @@ public long relocateTable(CatalogTable sourceTable, long targetNamespaceId ) { return sourceTable.id; } + /** * {@inheritDoc} */ @@ -5511,7 +5522,8 @@ private CatalogKey getKey( long keyId ) { } } - private static CatalogColumn transferCatalogColumn(long targetNamespaceId, CatalogColumn sourceCatalogColumn) { + + private static CatalogColumn transferCatalogColumn( long targetNamespaceId, CatalogColumn sourceCatalogColumn ) { CatalogColumn targetCatalogColumn = new CatalogColumn( sourceCatalogColumn.id, sourceCatalogColumn.name, @@ -5527,11 +5539,12 @@ private static CatalogColumn transferCatalogColumn(long targetNamespaceId, Catal sourceCatalogColumn.cardinality, sourceCatalogColumn.nullable, sourceCatalogColumn.collation, - sourceCatalogColumn.defaultValue); + sourceCatalogColumn.defaultValue ); return targetCatalogColumn; } - private CatalogTable transferCatalogTable(CatalogTable sourceTable, long targetNamespaceId) { + + private CatalogTable transferCatalogTable( CatalogTable sourceTable, long targetNamespaceId ) { return new CatalogTable( sourceTable.id, sourceTable.name, @@ -5544,10 +5557,11 @@ private CatalogTable transferCatalogTable(CatalogTable sourceTable, long targetN sourceTable.dataPlacements, sourceTable.modifiable, sourceTable.partitionProperty, - sourceTable.connectedViews); + sourceTable.connectedViews ); } - private CatalogCollection transferCatalogCollection(CatalogCollection sourceCollection, long targetNamespaceId) { + + private CatalogCollection transferCatalogCollection( CatalogCollection sourceCollection, long targetNamespaceId ) { return new CatalogCollection( sourceCollection.databaseId, targetNamespaceId, @@ -5555,7 +5569,7 @@ private CatalogCollection transferCatalogCollection(CatalogCollection sourceColl sourceCollection.name, sourceCollection.placements, sourceCollection.entityType, - sourceCollection.physicalName); + sourceCollection.physicalName ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/Catalog.java b/core/src/main/java/org/polypheny/db/catalog/Catalog.java index 26a5acd6c6..bbda006347 100644 --- a/core/src/main/java/org/polypheny/db/catalog/Catalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/Catalog.java @@ -489,7 +489,14 @@ protected final boolean isValidIdentifier( final String str ) { */ public abstract long addTable( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable ); - public abstract long relocateTable(CatalogTable table, long targetNamespaceId ); + /** + * Relocate a table from one namespace to another if the model of both namespaces is the same. + * + * @param table The name of the table to add + * @param targetNamespaceId The id of the schema + * @return The id of the table (the ID of the target table remains the same as that of the source table) + */ + public abstract long relocateTable( CatalogTable table, long targetNamespaceId ); /** diff --git a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java index 8832707b2e..39c0471eec 100644 --- a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java +++ b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java @@ -451,6 +451,15 @@ public static DdlManager getInstance() { */ public abstract void createTable( long schemaId, String tableName, List columns, List constraints, boolean ifNotExists, List stores, PlacementType placementType, Statement statement ) throws EntityAlreadyExistsException, ColumnNotExistsException, UnknownPartitionTypeException, UnknownColumnException, PartitionGroupNamesNotUniqueException; + /** + * Transfer a table between two namespaces. + * Currently, the transfer works between namespaces of the same model and between relational and document-based and vice versa. + * + * @param table the table about to be transfered + * @param targetSchemaId the id of the target namespace + * @param statement the used statement + * @param statement the used statement + */ public abstract void transferTable( CatalogTable table, long targetSchemaId, Statement statement, List primaryKeyColumnNames ) throws EntityAlreadyExistsException, DdlOnSourceException, UnknownTableException, UnknownColumnException; /** diff --git a/core/src/main/java/org/polypheny/db/processing/DataMigrator.java b/core/src/main/java/org/polypheny/db/processing/DataMigrator.java index dd320af9dc..782376e488 100644 --- a/core/src/main/java/org/polypheny/db/processing/DataMigrator.java +++ b/core/src/main/java/org/polypheny/db/processing/DataMigrator.java @@ -26,6 +26,7 @@ import org.polypheny.db.catalog.entity.CatalogGraphDatabase; import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.catalog.exceptions.UnknownColumnException; +import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.transaction.Statement; import org.polypheny.db.transaction.Transaction; @@ -86,11 +87,24 @@ void copyPartitionData( AlgRoot getSourceIterator( Statement statement, Map> placementDistribution ); - void copyGraphData( CatalogGraphDatabase graph, Transaction transaction, Integer existingAdapterId, CatalogAdapter adapter ); + /** + * Does migration when transferring between a relational and a document-based namespace. + * + * @param transaction Transactional scope + * @param sourceTable Source Table from where data is queried + * @param targetSchemaId ID of the target namespace + */ void copyRelationalDataToDocumentData(Transaction transaction , CatalogTable sourceTable, long targetSchemaId); + /** + * Does migration when transferring between a document-based and a relational namespace. + * + * @param transaction Transactional scope + * @param jsonObjects List of the JSON-objects of the source collection + * @param table Target table created in the {@link DdlManager} + */ void copyDocumentDataToRelationalData( Transaction transaction, List jsonObjects, CatalogTable table ) throws UnknownColumnException; } diff --git a/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java b/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java index 287a93f283..1961a7b565 100644 --- a/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java +++ b/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java @@ -351,8 +351,9 @@ public long addTable( String name, long namespaceId, int ownerId, EntityType ent throw new NotImplementedException(); } + @Override - public long relocateTable(CatalogTable sourceTable, long targetNamespaceId ) { + public long relocateTable( CatalogTable sourceTable, long targetNamespaceId ) { throw new NotImplementedException(); } diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 537ea2e6ce..34fa4f3b11 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -2261,87 +2261,99 @@ public void createTable( long schemaId, String name, List fiel } } + + /** + * {@inheritDoc} + */ @Override public void transferTable( CatalogTable sourceTable, long targetSchemaId, Statement statement, List primaryKeyColumnNames ) throws EntityAlreadyExistsException, DdlOnSourceException, UnknownTableException, UnknownColumnException { // Check if there is already an entity with this name if ( assertEntityExists( targetSchemaId, sourceTable.name, true ) ) { return; } - CatalogSchema sourceNamespace = catalog.getSchema(sourceTable.namespaceId); - CatalogSchema targetNamespace = catalog.getSchema(targetSchemaId); - if ( sourceNamespace.getNamespaceType() == targetNamespace.getNamespaceType() ) { - catalog.relocateTable(sourceTable, targetSchemaId); - } + // Retrieve the catalog schema objects for later use + CatalogSchema sourceNamespace = catalog.getSchema( sourceTable.namespaceId ); + CatalogSchema targetNamespace = catalog.getSchema( targetSchemaId ); - if ( sourceNamespace.getNamespaceType() == NamespaceType.RELATIONAL && targetNamespace.getNamespaceType() == NamespaceType.DOCUMENT ) { + if ( sourceNamespace.getNamespaceType() == targetNamespace.getNamespaceType() ) { + // If the source and target namespaces are from the same model, it is sufficient to just move them in the catalog + catalog.relocateTable( sourceTable, targetSchemaId ); + } else if ( sourceNamespace.getNamespaceType() == NamespaceType.RELATIONAL && targetNamespace.getNamespaceType() == NamespaceType.DOCUMENT ) { + // If the source namespace is relational and the target is document-based, the migration has to be called + // Create the new collection in the same datastore List stores = sourceTable.dataPlacements .stream() - .map(id -> (DataStore) AdapterManager.getInstance().getAdapter(id)) - .collect(Collectors.toList()); - PlacementType placementType = catalog.getDataPlacement(sourceTable.dataPlacements.get(0), sourceTable.id).placementType; + .map( id -> (DataStore) AdapterManager.getInstance().getAdapter( id ) ) + .collect( Collectors.toList() ); + PlacementType placementType = catalog.getDataPlacement( sourceTable.dataPlacements.get( 0 ), sourceTable.id ).placementType; createCollection( targetSchemaId, sourceTable.name, false, stores, placementType, statement ); - // Migrator + // Call the migrator DataMigrator dataMigrator = statement.getTransaction().getDataMigrator(); dataMigrator.copyRelationalDataToDocumentData( statement.getTransaction(), sourceTable, targetSchemaId ); + // Drop the source table dropTable( sourceTable, statement ); - statement.getQueryProcessor().resetCaches(); - } - - if ( sourceNamespace.getNamespaceType() == NamespaceType.DOCUMENT && targetNamespace.getNamespaceType() == NamespaceType.RELATIONAL ) { + } else if ( sourceNamespace.getNamespaceType() == NamespaceType.DOCUMENT && targetNamespace.getNamespaceType() == NamespaceType.RELATIONAL ) { + // If the source namespace is document-based and the target is relational, the migration has to be called + // Retrieve the data placements of the source catalog CatalogCollection sourceCollection = catalog.getCollection( sourceTable.id ); List stores = sourceTable.dataPlacements .stream() - .map(id -> (DataStore) AdapterManager.getInstance().getAdapter(id)) - .collect(Collectors.toList()); - PlacementType placementType = catalog.getDataPlacement(sourceTable.dataPlacements.get(0), sourceTable.id).placementType; - + .map( id -> (DataStore) AdapterManager.getInstance().getAdapter( id ) ) + .collect( Collectors.toList() ); + PlacementType placementType = catalog.getDataPlacement( sourceTable.dataPlacements.get( 0 ), sourceTable.id ).placementType; + // Get all documents of the source collection. Here it is necessary to create the target table with its columns String query = String.format( "db.%s.find({})", sourceTable.name ); - QueryParameters parameters = new MqlQueryParameters( query, sourceNamespace.name, NamespaceType.DOCUMENT ); + QueryParameters parameters = new MqlQueryParameters( query, sourceNamespace.name, NamespaceType.DOCUMENT ); AutomaticDdlProcessor mqlProcessor = (AutomaticDdlProcessor) statement.getTransaction().getProcessor( QueryLanguage.MONGO_QL ); MqlNode parsed = (MqlNode) mqlProcessor.parse( query ).get( 0 ); AlgRoot logicalRoot = mqlProcessor.translate( statement, parsed, parameters ); PolyImplementation polyImplementation = statement.getQueryProcessor().prepareQuery( logicalRoot, true ); - Result result = getResult( QueryLanguage.MONGO_QL, statement, query, polyImplementation, statement.getTransaction(), false ); + + // Create a list of the JsonObjects skipping the _id column which is only needed for the documents but not for the table List fieldNames = new ArrayList(); List jsonObjects = new ArrayList(); - for ( String[] documents : result.getData()) { - for ( String document : documents) { + for ( String[] documents : result.getData() ) { + for ( String document : documents ) { JsonObject jsonObject = JsonParser.parseString( document ).getAsJsonObject(); - List fieldsInDocument = new ArrayList<>( jsonObject.keySet()); + List fieldsInDocument = new ArrayList<>( jsonObject.keySet() ); fieldsInDocument.removeAll( fieldNames ); - fieldsInDocument.remove( "_id"); + fieldsInDocument.remove( "_id" ); fieldNames.addAll( fieldsInDocument ); jsonObjects.add( jsonObject ); } } - ColumnTypeInformation typeInformation = new ColumnTypeInformation( PolyType.VARCHAR, PolyType.VARCHAR, 24, null, null, null, false ); - + // Create the target table + // Only VARCHAR(32) columns are added in the current version + ColumnTypeInformation typeInformation = new ColumnTypeInformation( PolyType.VARCHAR, PolyType.VARCHAR, 32, null, null, null, false ); List fieldInformations = fieldNames .stream() .map( fieldName -> new FieldInformation( fieldName, typeInformation, Collation.getDefaultCollation(), null, fieldNames.indexOf( fieldName ) + 1 ) ) - .collect( Collectors.toList()); + .collect( Collectors.toList() ); + // Set the PKs selected by the user List constraintInformations = Collections.singletonList( new ConstraintInformation( "primary", ConstraintType.PRIMARY, primaryKeyColumnNames ) ); - createTable( targetSchemaId, sourceTable.name, fieldInformations, constraintInformations, false, stores, placementType, statement); + createTable( targetSchemaId, sourceTable.name, fieldInformations, constraintInformations, false, stores, placementType, statement ); - // Migrator + // Call the DataMigrator DataMigrator dataMigrator = statement.getTransaction().getDataMigrator(); dataMigrator.copyDocumentDataToRelationalData( statement.getTransaction(), jsonObjects, catalog.getTable( targetSchemaId, sourceTable.name ) ); + // Remove the source collection dropCollection( sourceCollection, statement ); statement.getQueryProcessor().resetCaches(); } } + @NotNull - public static Result getResult(QueryLanguage language, Statement statement, String query, PolyImplementation result, Transaction transaction, final boolean noLimit ) { + public static Result getResult( QueryLanguage language, Statement statement, String query, PolyImplementation result, Transaction transaction, final boolean noLimit ) { Catalog catalog = Catalog.getInstance(); List> rows = result.getRows( statement, noLimit ? -1 : language == QueryLanguage.CYPHER ? RuntimeConfig.UI_NODE_AMOUNT.getInteger() : RuntimeConfig.UI_PAGE_SIZE.getInteger() ); @@ -2350,12 +2362,10 @@ public static Result getResult(QueryLanguage language, Statement statement, Stri CatalogTable catalogTable = null; - ArrayList header = new ArrayList<>(); for ( AlgDataTypeField metaData : result.rowType.getFieldList() ) { String columnName = metaData.getName(); - DbColumn dbCol = new DbColumn( metaData.getName(), metaData.getType().getFullTypeString(), @@ -3285,6 +3295,7 @@ private void prepareMonitoring( Statement statement, Kind kind, CatalogTable cat } } + @Override public void dropFunction() { throw new RuntimeException( "Not supported yet" ); diff --git a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java index 2cb24f9230..e1ea2ffadb 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java +++ b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java @@ -150,25 +150,30 @@ public void copyGraphData( CatalogGraphDatabase target, Transaction transaction, } + /** + * {@inheritDoc} + */ @Override public void copyRelationalDataToDocumentData( Transaction transaction, CatalogTable sourceTable, long targetSchemaId ) { try { Catalog catalog = Catalog.getInstance(); + + // Collect the columns of the source table List sourceColumns = new ArrayList<>(); for ( String columnName : sourceTable.getColumnNames() ) { sourceColumns.add( catalog.getColumn( sourceTable.id, columnName ) ); } + // Retrieve the placements of the source table Map> sourceColumnPlacements = new HashMap<>(); sourceColumnPlacements.put( sourceTable.partitionProperty.partitionIds.get( 0 ), selectSourcePlacements( sourceTable, sourceColumns, -1 ) ); - - Statement sourceStatement = transaction.createStatement(); - Map> subDistribution = new HashMap<>( sourceColumnPlacements ); subDistribution.keySet().retainAll( Arrays.asList( sourceTable.partitionProperty.partitionIds.get( 0 ) ) ); + // Initialize the source statement to read all values from the source table + Statement sourceStatement = transaction.createStatement(); AlgRoot sourceAlg = getSourceIterator( sourceStatement, subDistribution ); PolyImplementation result = sourceStatement.getQueryProcessor().prepareQuery( sourceAlg, @@ -177,6 +182,7 @@ public void copyRelationalDataToDocumentData( Transaction transaction, CatalogTa false, false ); + // Build the data structure to map the columns to the physical placements Map sourceColMapping = new LinkedHashMap<>(); for ( CatalogColumn catalogColumn : sourceColumns ) { int i = 0; @@ -192,6 +198,7 @@ public void copyRelationalDataToDocumentData( Transaction transaction, CatalogTa final Enumerable enumerable = result.enumerable( sourceStatement.getDataContext() ); Iterator sourceIterator = enumerable.iterator(); while ( sourceIterator.hasNext() ) { + // Build a data structure for all values of the source table for the insert query List> rows = MetaImpl.collect( result.getCursorFactory(), LimitIterator.of( sourceIterator, batchSize ), new ArrayList<>() ); List> values = new ArrayList<>(); for ( List list : rows ) { @@ -200,6 +207,7 @@ public void copyRelationalDataToDocumentData( Transaction transaction, CatalogTa values.add( currentRowValues ); } + // Create the insert query for all documents in the collection boolean firstRow = true; StringBuffer bf = new StringBuffer(); bf.append( "db." + sourceTable.name + ".insertMany([" ); @@ -212,7 +220,7 @@ public void copyRelationalDataToDocumentData( Transaction transaction, CatalogTa } boolean firstColumn = true; for ( Map.Entry entry : row.entrySet() ) { - if (entry.getValue() != null ) { + if ( entry.getValue() != null ) { if ( firstColumn == true ) { firstColumn = false; } else { @@ -225,14 +233,16 @@ public void copyRelationalDataToDocumentData( Transaction transaction, CatalogTa } bf.append( "])" ); - String query = bf.toString(); - + // Insert als documents into the newlz created collection Statement targetStatement = transaction.createStatement(); + String query = bf.toString(); AutomaticDdlProcessor mqlProcessor = (AutomaticDdlProcessor) transaction.getProcessor( Catalog.QueryLanguage.MONGO_QL ); QueryParameters parameters = new MqlQueryParameters( query, catalog.getSchema( targetSchemaId ).name, Catalog.NamespaceType.DOCUMENT ); MqlNode parsed = (MqlNode) mqlProcessor.parse( query ).get( 0 ); AlgRoot logicalRoot = mqlProcessor.translate( targetStatement, parsed, parameters ); PolyImplementation polyImplementation = targetStatement.getQueryProcessor().prepareQuery( logicalRoot, true ); + + // TODO: something is wrong with the transactions. Try to get rid of this. Result updateRresult = getResult( Catalog.QueryLanguage.MONGO_QL, targetStatement, query, polyImplementation, transaction, false ); } } catch ( Throwable t ) { @@ -241,39 +251,48 @@ public void copyRelationalDataToDocumentData( Transaction transaction, CatalogTa } + /** + * {@inheritDoc} + */ @Override public void copyDocumentDataToRelationalData( Transaction transaction, List jsonObjects, CatalogTable targetTable ) throws UnknownColumnException { - final AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); Catalog catalog = Catalog.getInstance(); + + // Get the values in all documents of the collection + // TODO: A data structure is needed to represent also 1:N relations of multiple tables Map> columnValues = new HashMap<>(); - for ( JsonObject jsonObject : jsonObjects) { + for ( JsonObject jsonObject : jsonObjects ) { for ( String columnName : targetTable.getColumnNames() ) { CatalogColumn column = catalog.getColumn( targetTable.id, columnName ); if ( !columnValues.containsKey( column ) ) { columnValues.put( column, new LinkedList<>() ); } JsonElement jsonElement = jsonObject.get( columnName ); - if (jsonElement != null) { - columnValues.get( column ).add( jsonElement.getAsString() ); + if ( jsonElement != null ) { + columnValues.get( column ).add( jsonElement.getAsString() ); } else { columnValues.get( column ).add( null ); } } } - List targetColumnPlacements = new LinkedList<>(); Statement targetStatement = transaction.createStatement(); - AlgRoot targetAlg; + final AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); + List targetColumnPlacements = new LinkedList<>(); for ( Entry> entry : columnValues.entrySet() ) { + // Add the values to the column to the statement CatalogColumn targetColumn = catalog.getColumn( targetTable.id, entry.getKey().name ); - targetStatement.getDataContext().addParameterValues(targetColumn.id, targetColumn.getAlgDataType( typeFactory ) , entry.getValue() ); - List stores = RoutingManager.getInstance().getCreatePlacementStrategy().getDataStoresForNewColumn( targetColumn ); - for ( DataStore store : stores ) { - targetColumnPlacements.add( Catalog.getInstance().getColumnPlacement( store.getAdapterId(), targetColumn.id ) ); + targetStatement.getDataContext().addParameterValues( targetColumn.id, targetColumn.getAlgDataType( typeFactory ), entry.getValue() ); + + // Add all placements of the column to the targetColumnPlacements list + for ( DataStore store : RoutingManager.getInstance().getCreatePlacementStrategy().getDataStoresForNewColumn( targetColumn ) ) { + CatalogColumnPlacement columnPlacement = Catalog.getInstance().getColumnPlacement( store.getAdapterId(), targetColumn.id ); + targetColumnPlacements.add( columnPlacement ); } } - targetAlg = buildInsertStatement( targetStatement, targetColumnPlacements, targetTable.partitionProperty.partitionIds.get( 0 ) ); + // Prepare the insert query + AlgRoot targetAlg = buildInsertStatement( targetStatement, targetColumnPlacements, targetTable.partitionProperty.partitionIds.get( 0 ) ); Iterator iterator = targetStatement.getQueryProcessor() .prepareQuery( targetAlg, targetAlg.validatedRowType, true, false, false ) .enumerable( targetStatement.getDataContext() ) diff --git a/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterschema/SqlAlterSchemaTransferTable.java b/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterschema/SqlAlterSchemaTransferTable.java index df5ce37752..88082462b6 100644 --- a/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterschema/SqlAlterSchemaTransferTable.java +++ b/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterschema/SqlAlterSchemaTransferTable.java @@ -48,7 +48,7 @@ /** - * Parse tree for {@code ALTER SCHEMA name OWNER TO} statement. + * Parse tree for {@code ALTER SCHEMA name TRANSFER tanle TO namespace} statement. */ public class SqlAlterSchemaTransferTable extends SqlAlterSchema { @@ -60,23 +60,23 @@ public class SqlAlterSchemaTransferTable extends SqlAlterSchema { /** * Creates a SqlAlterSchemaOwner. */ - public SqlAlterSchemaTransferTable(ParserPos pos, SqlIdentifier table, SqlIdentifier targetSchema, SqlNodeList primaryKeyColumns ) { + public SqlAlterSchemaTransferTable( ParserPos pos, SqlIdentifier table, SqlIdentifier targetSchema, SqlNodeList primaryKeyColumns ) { super( pos ); - this.table = Objects.requireNonNull(table); - this.targetSchema = Objects.requireNonNull(targetSchema); + this.table = Objects.requireNonNull( table ); + this.targetSchema = Objects.requireNonNull( targetSchema ); this.primaryKeyColumns = primaryKeyColumns; } @Override public List getOperandList() { - return ImmutableNullableList.of(table, targetSchema); + return ImmutableNullableList.of( table, targetSchema ); } @Override public List getSqlOperandList() { - return ImmutableNullableList.of(table, targetSchema); + return ImmutableNullableList.of( table, targetSchema ); } @@ -95,9 +95,9 @@ public void unparse( SqlWriter writer, int leftPrec, int rightPrec ) { public void execute( Context context, Statement statement, QueryParameters parameters ) { try { Catalog catalog = Catalog.getInstance(); - CatalogTable catalogTable = getCatalogTable( context, table); + CatalogTable catalogTable = getCatalogTable( context, table ); - long targetSchemaId = catalog.getSchema( context.getDatabaseId(), targetSchema.getNames().get(0) ).id; + long targetSchemaId = catalog.getSchema( context.getDatabaseId(), targetSchema.getNames().get( 0 ) ).id; List primaryKeyColumnNames = (primaryKeyColumns != null) ? primaryKeyColumns.getList().stream().map( Node::toString ).collect( Collectors.toList() ) @@ -107,14 +107,14 @@ public void execute( Context context, Statement statement, QueryParameters param } catch ( UnknownSchemaException e ) { throw CoreUtil.newContextException( table.getPos(), RESOURCE.schemaNotFound( table.getSimple() ) ); - } catch (EntityAlreadyExistsException e) { - throw CoreUtil.newContextException( table.getPos(), RESOURCE.tableExists( table.names.get( 1 ) ) ); + } catch ( EntityAlreadyExistsException e ) { + throw CoreUtil.newContextException( table.getPos(), RESOURCE.tableExists( table.names.get( 1 ) ) ); } catch ( DdlOnSourceException e ) { throw CoreUtil.newContextException( table.getPos(), RESOURCE.ddlOnSourceTable() ); } catch ( UnknownTableException e ) { - throw CoreUtil.newContextException( table.getPos(), RESOURCE.tableNotFound( e.getTableName()) ); + throw CoreUtil.newContextException( table.getPos(), RESOURCE.tableNotFound( e.getTableName() ) ); } catch ( UnknownColumnException e ) { - throw CoreUtil.newContextException( table.getPos(), RESOURCE.columnNotFound( e.getColumnName() )); + throw CoreUtil.newContextException( table.getPos(), RESOURCE.columnNotFound( e.getColumnName() ) ); } } From 61a3359fcfc3331bc56a07ae13af1291579854ec Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Attila=20S=C3=BCle?= Date: Tue, 20 Dec 2022 20:32:47 +0100 Subject: [PATCH 24/26] document -> relational is working also with embedded documents --- .../org/polypheny/db/catalog/CatalogImpl.java | 1 - .../java/org/polypheny/db/ddl/DdlManager.java | 2 +- .../polypheny/db/processing/DataMigrator.java | 5 +- .../org/polypheny/db/ddl/DdlManagerImpl.java | 108 ++++++++++++---- .../db/processing/DataMigratorImpl.java | 122 +++++++++++++----- sql-language/src/main/codegen/config.fmpp | 1 + .../src/main/codegen/includes/parserImpls.ftl | 2 +- .../src/main/codegen/templates/Parser.jj | 1 + .../SqlAlterSchemaTransferTable.java | 33 +++-- .../java/org/polypheny/db/webui/Crud.java | 2 +- .../polypheny/db/webui/crud/LanguageCrud.java | 1 + 11 files changed, 208 insertions(+), 70 deletions(-) diff --git a/catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java b/catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java index 91bbc89309..cda0145dda 100644 --- a/catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java +++ b/catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java @@ -32,7 +32,6 @@ import java.util.stream.Stream; import lombok.Getter; import lombok.extern.slf4j.Slf4j; -import org.jetbrains.annotations.NotNull; import org.mapdb.BTreeMap; import org.mapdb.DB; import org.mapdb.DBException.SerializationError; diff --git a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java index f290bc2130..c70dc34e33 100644 --- a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java +++ b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java @@ -474,7 +474,7 @@ public static DdlManager getInstance() { * @param statement the used statement * @param statement the used statement */ - public abstract void transferTable( CatalogTable table, long targetSchemaId, Statement statement, List primaryKeyColumnNames ) throws EntityAlreadyExistsException, DdlOnSourceException, UnknownTableException, UnknownColumnException; + public abstract void transferTable( CatalogTable table, long targetSchemaId, Statement statement, Map> primaryKeyColumnNames ) throws EntityAlreadyExistsException, DdlOnSourceException, UnknownTableException, UnknownColumnException, GenericCatalogException; /** * Create a new view diff --git a/core/src/main/java/org/polypheny/db/processing/DataMigrator.java b/core/src/main/java/org/polypheny/db/processing/DataMigrator.java index 36f624c366..752efe8e70 100644 --- a/core/src/main/java/org/polypheny/db/processing/DataMigrator.java +++ b/core/src/main/java/org/polypheny/db/processing/DataMigrator.java @@ -26,6 +26,7 @@ import org.polypheny.db.catalog.entity.CatalogGraphDatabase; import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.catalog.exceptions.UnknownColumnException; +import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.transaction.Statement; import org.polypheny.db.transaction.Transaction; @@ -118,8 +119,8 @@ void copyPartitionData( * * @param transaction Transactional scope * @param jsonObjects List of the JSON-objects of the source collection - * @param table Target table created in the {@link DdlManager} + * @param table Target tables created in the {@link DdlManager} */ - void copyDocumentDataToRelationalData( Transaction transaction, List jsonObjects, CatalogTable table ) throws UnknownColumnException; + void copyDocumentDataToRelationalData( Transaction transaction, List jsonObjects, List table ) throws UnknownColumnException, UnknownTableException; } diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 10d43ad019..ec5599b015 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -18,7 +18,7 @@ import com.google.common.collect.ImmutableList; - +import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.JsonParser; import java.sql.ResultSetMetaData; @@ -27,14 +27,19 @@ import java.util.Collections; import java.util.HashMap; import java.util.HashSet; +import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.Set; +import java.util.SortedSet; import java.util.stream.Collectors; import java.util.stream.IntStream; +import java.util.stream.Stream; import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; +import org.apache.commons.collections4.map.LinkedMap; import org.apache.commons.lang3.StringUtils; import org.jetbrains.annotations.NotNull; import org.polypheny.db.PolyImplementation; @@ -137,7 +142,6 @@ import org.polypheny.db.partition.properties.TemperaturePartitionProperty; import org.polypheny.db.partition.properties.TemperaturePartitionProperty.PartitionCostIndication; import org.polypheny.db.partition.raw.RawTemperaturePartitionInformation; -import org.polypheny.db.prepare.Context; import org.polypheny.db.processing.AutomaticDdlProcessor; import org.polypheny.db.processing.DataMigrator; import org.polypheny.db.routing.RoutingManager; @@ -145,20 +149,16 @@ import org.polypheny.db.runtime.PolyphenyDbException; import org.polypheny.db.schema.LogicalTable; import org.polypheny.db.schema.PolySchemaBuilder; -import org.polypheny.db.sql.language.SqlIdentifier; import org.polypheny.db.transaction.Statement; import org.polypheny.db.transaction.Transaction; import org.polypheny.db.transaction.TransactionException; import org.polypheny.db.type.ArrayType; import org.polypheny.db.type.PolyType; -import org.polypheny.db.util.CoreUtil; import org.polypheny.db.view.MaterializedViewManager; import org.polypheny.db.webui.Crud; import org.polypheny.db.webui.models.DbColumn; import org.polypheny.db.webui.models.Result; -import static org.polypheny.db.util.Static.RESOURCE; - @Slf4j public class DdlManagerImpl extends DdlManager { @@ -697,6 +697,7 @@ public void mergeColumns( CatalogTable catalogTable, List sourceColumnNa statement.getQueryProcessor().resetCaches(); } + @Override public void addIndex( CatalogTable catalogTable, String indexMethodName, List columnNames, String indexName, boolean isUnique, DataStore location, Statement statement ) throws UnknownColumnException, UnknownIndexMethodException, GenericCatalogException, UnknownTableException, UnknownUserException, UnknownSchemaException, UnknownKeyException, UnknownDatabaseException, TransactionException, AlterSourceException, IndexExistsException, MissingColumnPlacementException { List columnIds = new LinkedList<>(); @@ -2346,7 +2347,7 @@ public void createTable( long schemaId, String name, List fiel * {@inheritDoc} */ @Override - public void transferTable( CatalogTable sourceTable, long targetSchemaId, Statement statement, List primaryKeyColumnNames ) throws EntityAlreadyExistsException, DdlOnSourceException, UnknownTableException, UnknownColumnException { + public void transferTable( CatalogTable sourceTable, long targetSchemaId, Statement statement, Map> pkColumnNamesOfTables ) throws EntityAlreadyExistsException, DdlOnSourceException, UnknownTableException, UnknownColumnException, GenericCatalogException { // Check if there is already an entity with this name if ( assertEntityExists( targetSchemaId, sourceTable.name, true ) ) { return; @@ -2396,34 +2397,63 @@ public void transferTable( CatalogTable sourceTable, long targetSchemaId, Statem Result result = getResult( QueryLanguage.MONGO_QL, statement, query, polyImplementation, statement.getTransaction(), false ); // Create a list of the JsonObjects skipping the _id column which is only needed for the documents but not for the table - List fieldNames = new ArrayList(); List jsonObjects = new ArrayList(); + LinkedList currentPosition = new LinkedList<>( Arrays.asList( sourceTable.name ) ); + LinkedMap documentHierarchy = new LinkedMap<>( Map.of( currentPosition, new LinkedHashSet() ) ); for ( String[] documents : result.getData() ) { for ( String document : documents ) { JsonObject jsonObject = JsonParser.parseString( document ).getAsJsonObject(); - List fieldsInDocument = new ArrayList<>( jsonObject.keySet() ); - fieldsInDocument.removeAll( fieldNames ); - fieldsInDocument.remove( "_id" ); - fieldNames.addAll( fieldsInDocument ); + buildDocumentHierarchy( jsonObject, documentHierarchy, currentPosition, pkColumnNamesOfTables ); jsonObjects.add( jsonObject ); } } // Create the target table // Only VARCHAR(32) columns are added in the current version - ColumnTypeInformation typeInformation = new ColumnTypeInformation( PolyType.VARCHAR, PolyType.VARCHAR, 32, null, null, null, false ); - List fieldInformations = fieldNames - .stream() - .map( fieldName -> new FieldInformation( fieldName, typeInformation, Collation.getDefaultCollation(), null, fieldNames.indexOf( fieldName ) + 1 ) ) - .collect( Collectors.toList() ); + ColumnTypeInformation typeInformation = new ColumnTypeInformation( PolyType.VARCHAR, PolyType.VARCHAR, 64, null, null, null, false ); + List addedTables = new ArrayList(); + for ( Entry hierarchyEntry : documentHierarchy.entrySet() ) { + LinkedList tablePath = hierarchyEntry.getKey(); + LinkedHashSet tableChildren = hierarchyEntry.getValue(); + String tableName = tablePath.getLast(); + + // If the table has already added. E.g. when it has multiple parents + if ( !addedTables.stream().anyMatch( table -> tableName.equals( table.name ) ) ) { + // If the table has also another parent, merge the tableChildren + List sameTableWithOtherParents = documentHierarchy.keySet() + .stream() + .filter( k -> !k.equals( hierarchyEntry.getKey() ) && k.getLast().equals( hierarchyEntry.getKey().getLast() ) ) + .collect( Collectors.toList() ); + for ( LinkedList sameTableWithOtherParent : sameTableWithOtherParents ) { + tableChildren.addAll( documentHierarchy.get( sameTableWithOtherParent ) ); + } - // Set the PKs selected by the user - List constraintInformations = Collections.singletonList( new ConstraintInformation( "primary", ConstraintType.PRIMARY, primaryKeyColumnNames ) ); - createTable( targetSchemaId, sourceTable.name, fieldInformations, constraintInformations, false, stores, placementType, statement ); + // Create the list of the PKs for the current table + List constraintInformations = + List.of( new ConstraintInformation( "primary", ConstraintType.PRIMARY, pkColumnNamesOfTables.get( tableName ) ) ); + List fieldInformations = tableChildren + .stream() + .map( fieldName -> new FieldInformation( fieldName, typeInformation, Collation.getDefaultCollation(), null, new ArrayList( tableChildren ).indexOf( fieldName ) + 1 ) ) + .collect( Collectors.toList() ); + createTable( targetSchemaId, tableName, fieldInformations, constraintInformations, false, stores, placementType, statement ); + addedTables.add( catalog.getTable( targetSchemaId, tableName ) ); + } + // Add FK if it's a child table + CatalogTable table = catalog.getTable( targetSchemaId, tableName ); + if ( tablePath.size() > 1 ) { + CatalogTable refTable = catalog.getTable( targetSchemaId, tablePath.get( tablePath.size() - 2 ) ); + List refColumnNames = pkColumnNamesOfTables.get( refTable.name ); + List columnNames = refColumnNames + .stream() + .map( columnName -> "fk_" + refTable.name + "_" + columnName ) + .collect( Collectors.toList() ); + addForeignKey( table, refTable, columnNames, refColumnNames, "fk_from_" + table.name + "_to_" + refTable.name, ForeignKeyOption.NONE, ForeignKeyOption.NONE ); + } + } - // Call the DataMigrator + //Call the DataMigrator DataMigrator dataMigrator = statement.getTransaction().getDataMigrator(); - dataMigrator.copyDocumentDataToRelationalData( statement.getTransaction(), jsonObjects, catalog.getTable( targetSchemaId, sourceTable.name ) ); + dataMigrator.copyDocumentDataToRelationalData( statement.getTransaction(), jsonObjects, addedTables ); // Remove the source collection dropCollection( sourceCollection, statement ); @@ -2432,6 +2462,40 @@ public void transferTable( CatalogTable sourceTable, long targetSchemaId, Statem } + private static void buildDocumentHierarchy( JsonElement jsonObject, Map documentHierarchy, LinkedList currentPath, + Map> pkColumnNamesOfTables ) { + Set currentTableColumnNames = documentHierarchy.get( currentPath ); + for ( String fieldName : ((JsonObject) jsonObject).keySet() ) { + JsonElement jsonElement = ((JsonObject) jsonObject).get( fieldName ); + if ( jsonElement instanceof JsonObject ) { + LinkedList childTablePath = (LinkedList) currentPath.clone(); + childTablePath.add( fieldName ); + if ( !documentHierarchy.containsKey( childTablePath ) ) { + documentHierarchy.put( childTablePath, new LinkedHashSet() ); + } + buildDocumentHierarchy( jsonElement, documentHierarchy, childTablePath, pkColumnNamesOfTables ); + } else if ( !currentTableColumnNames.contains( fieldName ) && !fieldName.equals( "_id" ) ) { + currentTableColumnNames.add( fieldName ); + } + } + // if no PK column was given, select the _id column. + // if the _id column not exist add it to the columns of the table + if ( !pkColumnNamesOfTables.containsKey( currentPath.getLast() ) ) { + pkColumnNamesOfTables.put( currentPath.getLast(), List.of( "_id" ) ); + currentTableColumnNames.add( "_id" ); + } + + // Add the PKs of the parent table to te current table as FKs, if it's a child table + if ( currentPath.size() > 1 ) { + String parentTableName = currentPath.get( currentPath.size() - 2 ); + pkColumnNamesOfTables.getOrDefault( parentTableName, List.of( "_id" ) ) + .stream() + .map( parentPkColumnName -> "fk_" + parentTableName + "_" + parentPkColumnName ) + .forEach( currentTableColumnNames::add ); + } + } + + @NotNull public static Result getResult( QueryLanguage language, Statement statement, String query, PolyImplementation result, Transaction transaction, final boolean noLimit ) { Catalog catalog = Catalog.getInstance(); diff --git a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java index b816afc7bc..b9766a1ce1 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java +++ b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java @@ -22,12 +22,12 @@ import com.google.gson.JsonObject; import java.util.*; -import java.util.*; import java.util.Map.Entry; import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; import org.apache.calcite.avatica.MetaImpl; import org.apache.calcite.linq4j.Enumerable; +import org.apache.commons.lang.StringUtils; import org.jetbrains.annotations.NotNull; import org.polypheny.db.PolyImplementation; import org.polypheny.db.adapter.DataStore; @@ -46,10 +46,12 @@ import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; +import org.polypheny.db.catalog.entity.CatalogForeignKey; import org.polypheny.db.catalog.entity.CatalogGraphDatabase; import org.polypheny.db.catalog.entity.CatalogPrimaryKey; import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.catalog.exceptions.UnknownColumnException; +import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.languages.QueryParameters; import org.polypheny.db.languages.mql.MqlNode; @@ -72,6 +74,7 @@ import org.polypheny.db.type.PolyType; import org.polypheny.db.type.PolyTypeFactoryImpl; import org.polypheny.db.util.LimitIterator; +import org.polypheny.db.webui.models.ForeignKey; import org.polypheny.db.webui.models.Result; import static org.polypheny.db.ddl.DdlManagerImpl.getResult; @@ -253,54 +256,107 @@ public void copyRelationalDataToDocumentData( Transaction transaction, CatalogTa * {@inheritDoc} */ @Override - public void copyDocumentDataToRelationalData( Transaction transaction, List jsonObjects, CatalogTable targetTable ) throws UnknownColumnException { + public void copyDocumentDataToRelationalData( Transaction transaction, List jsonObjects, List targetTables ) throws UnknownColumnException, UnknownTableException { Catalog catalog = Catalog.getInstance(); // Get the values in all documents of the collection - // TODO: A data structure is needed to represent also 1:N relations of multiple tables Map> columnValues = new HashMap<>(); for ( JsonObject jsonObject : jsonObjects ) { - for ( String columnName : targetTable.getColumnNames() ) { - CatalogColumn column = catalog.getColumn( targetTable.id, columnName ); + getColumnValuesForTable( catalog, targetTables.get( 0 ), columnValues, jsonObject, Collections.emptyMap() ); + } + + for ( CatalogTable targetTable : targetTables ) { + Statement targetStatement = transaction.createStatement(); + final AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); + List targetColumnPlacements = new LinkedList<>(); + for ( CatalogColumn targetColumn : catalog.getColumns( targetTable.id ) ) { + // Add the values to the column to the statement + targetStatement.getDataContext().addParameterValues( targetColumn.id, targetColumn.getAlgDataType( typeFactory ), columnValues.get( targetColumn ) ); + + // Add all placements of the column to the targetColumnPlacements list + for ( DataStore store : RoutingManager.getInstance().getCreatePlacementStrategy().getDataStoresForNewColumn( targetColumn ) ) { + CatalogColumnPlacement columnPlacement = Catalog.getInstance().getColumnPlacement( store.getAdapterId(), targetColumn.id ); + targetColumnPlacements.add( columnPlacement ); + } + } + + // Prepare the insert query + AlgRoot targetAlg = buildInsertStatement( targetStatement, targetColumnPlacements, targetTable.partitionProperty.partitionIds.get( 0 ) ); + Iterator iterator = targetStatement.getQueryProcessor() + .prepareQuery( targetAlg, targetAlg.validatedRowType, true, false, false ) + .enumerable( targetStatement.getDataContext() ) + .iterator(); + //noinspection WhileLoopReplaceableByForEach + while ( iterator.hasNext() ) { + iterator.next(); + } + targetStatement.getDataContext().resetParameterValues(); + } + } + + + private static void getColumnValuesForTable( Catalog catalog, CatalogTable table, Map> columnValues, JsonObject jsonObject, Map parentPkValues ) throws UnknownColumnException, UnknownTableException { + Map pkValues = new HashMap(); + + // For the columns are not in the current document + for ( String columnName : table.getColumnNames() ) { + if ( !jsonObject.keySet().contains( columnName ) ) { + CatalogColumn column = catalog.getColumn( table.id, columnName ); if ( !columnValues.containsKey( column ) ) { - columnValues.put( column, new LinkedList<>() ); + columnValues.put( column, new ArrayList<>() ); } - JsonElement jsonElement = jsonObject.get( columnName ); - if ( jsonElement != null ) { - columnValues.get( column ).add( jsonElement.getAsString() ); - } else { + if ( catalog.getPrimaryKey( table.primaryKey ).columnIds.contains( column.id ) ) { + // Generate _id if it's not in the document. + String generatedValue = UUID.randomUUID().toString(); + columnValues.get( column ).add( generatedValue ); + pkValues.put( String.join( ".", table.name, column.name ), generatedValue ); + } else if ( !catalog.getForeignKeys( table.id ).stream().anyMatch( fk -> fk.getColumnNames().contains( columnName ) ) ) { + // It's not a FK in the table just simply add null. columnValues.get( column ).add( null ); } } } - Statement targetStatement = transaction.createStatement(); - final AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); - List targetColumnPlacements = new LinkedList<>(); - for ( Entry> entry : columnValues.entrySet() ) { - // Add the values to the column to the statement - CatalogColumn targetColumn = catalog.getColumn( targetTable.id, entry.getKey().name ); - targetStatement.getDataContext().addParameterValues( targetColumn.id, targetColumn.getAlgDataType( typeFactory ), entry.getValue() ); - - // Add all placements of the column to the targetColumnPlacements list - for ( DataStore store : RoutingManager.getInstance().getCreatePlacementStrategy().getDataStoresForNewColumn( targetColumn ) ) { - CatalogColumnPlacement columnPlacement = Catalog.getInstance().getColumnPlacement( store.getAdapterId(), targetColumn.id ); - targetColumnPlacements.add( columnPlacement ); + for ( String fieldName : jsonObject.keySet() ) { + // Skip the _id field if the target table is not intended to contain the _id column + if ( fieldName.equals( "_id" ) && !table.getColumnNames().contains( "_id" ) ) { + continue; } - } - // Prepare the insert query - AlgRoot targetAlg = buildInsertStatement( targetStatement, targetColumnPlacements, targetTable.partitionProperty.partitionIds.get( 0 ) ); - Iterator iterator = targetStatement.getQueryProcessor() - .prepareQuery( targetAlg, targetAlg.validatedRowType, true, false, false ) - .enumerable( targetStatement.getDataContext() ) - .iterator(); - //noinspection WhileLoopReplaceableByForEach - while ( iterator.hasNext() ) { - iterator.next(); + JsonElement jsonElement = jsonObject.get( fieldName ); + // If it's a parent element + if ( jsonElement instanceof JsonObject ) { + // Add PKs from the document + for ( long pkColumnId : catalog.getPrimaryKey( table.primaryKey ).columnIds ) { + String pkColumnName = catalog.getColumn( pkColumnId ).name; + if ( jsonObject.has( pkColumnName ) ) { + pkValues.put( String.join( ".", table.name, pkColumnName ), jsonObject.get( pkColumnName ).getAsString() ); + } + } + + CatalogTable childTable = catalog.getTable( table.namespaceId, fieldName ); + getColumnValuesForTable( catalog, childTable, columnValues, (JsonObject) jsonElement, pkValues ); + } else { + CatalogColumn column = catalog.getColumn( table.id, fieldName ); + if ( !columnValues.containsKey( column ) ) { + columnValues.put( column, new LinkedList<>() ); + } + columnValues.get( column ).add( jsonObject.get( fieldName ).getAsString() ); + } } - targetStatement.getDataContext().resetParameterValues(); + // Add the FK columns + for ( CatalogForeignKey fk : catalog.getForeignKeys( table.id ) ) { + int i = 0; + for ( String refColumnName : fk.getReferencedKeyColumnNames() ) { + CatalogColumn fkColumn = catalog.getColumn( table.id, fk.getColumnNames().get( i++ ) ); + if ( !columnValues.containsKey( fkColumn ) ) { + columnValues.put( fkColumn, new ArrayList<>() ); + } + String refColumnValue = parentPkValues.get( String.join( ".", fk.getReferencedKeyTableName(), refColumnName ) ); + columnValues.get( fkColumn ).add( refColumnValue ); + } + } } diff --git a/sql-language/src/main/codegen/config.fmpp b/sql-language/src/main/codegen/config.fmpp index 1ed035e0e6..989396c6f5 100644 --- a/sql-language/src/main/codegen/config.fmpp +++ b/sql-language/src/main/codegen/config.fmpp @@ -254,6 +254,7 @@ data: { "JSON" "K" "KEY" + "KEYS" "KEY_MEMBER" "KEY_TYPE" "LABEL" diff --git a/sql-language/src/main/codegen/includes/parserImpls.ftl b/sql-language/src/main/codegen/includes/parserImpls.ftl index 3bbf7fc124..60ee53557d 100644 --- a/sql-language/src/main/codegen/includes/parserImpls.ftl +++ b/sql-language/src/main/codegen/includes/parserImpls.ftl @@ -52,7 +52,7 @@ SqlAlterSchema SqlAlterSchema(Span s) : name = CompoundIdentifier() ( - + columnList = ParenthesizedSimpleIdentifierList() | { columnList = null; } diff --git a/sql-language/src/main/codegen/templates/Parser.jj b/sql-language/src/main/codegen/templates/Parser.jj index b968828780..2e97de81a6 100644 --- a/sql-language/src/main/codegen/templates/Parser.jj +++ b/sql-language/src/main/codegen/templates/Parser.jj @@ -6311,6 +6311,7 @@ SqlPostfixOperator PostfixRowOperator() : | < JSON_QUERY: "JSON_QUERY" > | < K: "K" > | < KEY: "KEY" > +| < KEYS: "KEYS" > | < KEY_MEMBER: "KEY_MEMBER" > | < KEY_TYPE: "KEY_TYPE" > | < DISTANCE: "DISTANCE" > diff --git a/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterschema/SqlAlterSchemaTransferTable.java b/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterschema/SqlAlterSchemaTransferTable.java index 88082462b6..6a70c5358c 100644 --- a/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterschema/SqlAlterSchemaTransferTable.java +++ b/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/alterschema/SqlAlterSchemaTransferTable.java @@ -17,15 +17,17 @@ package org.polypheny.db.sql.language.ddl.alterschema; -import java.util.Optional; -import java.util.stream.Collectors; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Map; +import org.apache.commons.lang.StringUtils; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogTable; import org.polypheny.db.catalog.exceptions.EntityAlreadyExistsException; +import org.polypheny.db.catalog.exceptions.GenericCatalogException; import org.polypheny.db.catalog.exceptions.UnknownColumnException; import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownTableException; -import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.ddl.exception.DdlOnSourceException; import org.polypheny.db.languages.ParserPos; @@ -98,12 +100,7 @@ public void execute( Context context, Statement statement, QueryParameters param CatalogTable catalogTable = getCatalogTable( context, table ); long targetSchemaId = catalog.getSchema( context.getDatabaseId(), targetSchema.getNames().get( 0 ) ).id; - - List primaryKeyColumnNames = (primaryKeyColumns != null) - ? primaryKeyColumns.getList().stream().map( Node::toString ).collect( Collectors.toList() ) - : null; - - DdlManager.getInstance().transferTable( catalogTable, targetSchemaId, statement, primaryKeyColumnNames ); + DdlManager.getInstance().transferTable( catalogTable, targetSchemaId, statement, buildPkColumnNamesOfTables() ); } catch ( UnknownSchemaException e ) { throw CoreUtil.newContextException( table.getPos(), RESOURCE.schemaNotFound( table.getSimple() ) ); @@ -115,7 +112,25 @@ public void execute( Context context, Statement statement, QueryParameters param throw CoreUtil.newContextException( table.getPos(), RESOURCE.tableNotFound( e.getTableName() ) ); } catch ( UnknownColumnException e ) { throw CoreUtil.newContextException( table.getPos(), RESOURCE.columnNotFound( e.getColumnName() ) ); + } catch ( GenericCatalogException e ) { + throw new RuntimeException( e ); + } + } + + + private Map> buildPkColumnNamesOfTables() { + Map> pkColumnNamesOfTables = new HashMap(); + if(primaryKeyColumns != null) { + for (Node pkNode : primaryKeyColumns.getList()) { + String tableName = StringUtils.substringBefore( pkNode.toString(), "." ); + String columnName = StringUtils.substringAfter( pkNode.toString(), "." ); + if( !pkColumnNamesOfTables.containsKey( tableName )) { + pkColumnNamesOfTables.put( tableName, new ArrayList<>() ); + } + pkColumnNamesOfTables.get( tableName ).add( columnName ); + } } + return pkColumnNamesOfTables; } } diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index 7b2ea01a89..f6096513b5 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -636,7 +636,7 @@ void transferTable(final Context ctx ) { .append(tableId); if( request.primaryKeyNames != null && !request.primaryKeyNames.isBlank() ) { query - .append( " ADD PRIMARY KEY " ) + .append( " SET PRIMARY KEYS " ) .append(request.primaryKeyNames); } Result result; diff --git a/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java b/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java index 9461674ca3..3bf8d860ba 100644 --- a/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java +++ b/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java @@ -16,6 +16,7 @@ package org.polypheny.db.webui.crud; +import com.google.gson.JsonObject; import io.javalin.http.Context; import java.sql.ResultSetMetaData; import java.util.ArrayList; From 9dcc5f53b2fd64e2a6452724a57f9b0b9ddfceee Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Attila=20S=C3=BCle?= Date: Wed, 21 Dec 2022 11:40:54 +0100 Subject: [PATCH 25/26] some little refactor of trasnfertable --- .../org/polypheny/db/ddl/DdlManagerImpl.java | 207 ++++++++++-------- .../db/processing/DataMigratorImpl.java | 30 ++- 2 files changed, 131 insertions(+), 106 deletions(-) diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index ec5599b015..a5b0618a73 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -2356,115 +2356,134 @@ public void transferTable( CatalogTable sourceTable, long targetSchemaId, Statem // Retrieve the catalog schema objects for later use CatalogSchema sourceNamespace = catalog.getSchema( sourceTable.namespaceId ); CatalogSchema targetNamespace = catalog.getSchema( targetSchemaId ); - if ( sourceNamespace.getNamespaceType() == targetNamespace.getNamespaceType() ) { // If the source and target namespaces are from the same model, it is sufficient to just move them in the catalog catalog.relocateTable( sourceTable, targetSchemaId ); } else if ( sourceNamespace.getNamespaceType() == NamespaceType.RELATIONAL && targetNamespace.getNamespaceType() == NamespaceType.DOCUMENT ) { - // If the source namespace is relational and the target is document-based, the migration has to be called - // Create the new collection in the same datastore - List stores = sourceTable.dataPlacements - .stream() - .map( id -> (DataStore) AdapterManager.getInstance().getAdapter( id ) ) - .collect( Collectors.toList() ); - PlacementType placementType = catalog.getDataPlacement( sourceTable.dataPlacements.get( 0 ), sourceTable.id ).placementType; - createCollection( targetSchemaId, sourceTable.name, false, stores, placementType, statement ); + // If the source namespace is relational and the target is document-based (the DataMigrator will to be also called) + transferRelationalToDocument( sourceTable, targetSchemaId, statement ); + } else if ( sourceNamespace.getNamespaceType() == NamespaceType.DOCUMENT && targetNamespace.getNamespaceType() == NamespaceType.RELATIONAL ) { + // If the source namespace is document-based and the target is relational (the DataMigrator will to be also called) + transferDocumentToRelational( sourceTable, targetSchemaId, pkColumnNamesOfTables, statement ); + } + } - // Call the migrator - DataMigrator dataMigrator = statement.getTransaction().getDataMigrator(); - dataMigrator.copyRelationalDataToDocumentData( statement.getTransaction(), sourceTable, targetSchemaId ); - // Drop the source table - dropTable( sourceTable, statement ); - statement.getQueryProcessor().resetCaches(); - } else if ( sourceNamespace.getNamespaceType() == NamespaceType.DOCUMENT && targetNamespace.getNamespaceType() == NamespaceType.RELATIONAL ) { - // If the source namespace is document-based and the target is relational, the migration has to be called - // Retrieve the data placements of the source catalog - CatalogCollection sourceCollection = catalog.getCollection( sourceTable.id ); - List stores = sourceTable.dataPlacements - .stream() - .map( id -> (DataStore) AdapterManager.getInstance().getAdapter( id ) ) - .collect( Collectors.toList() ); - PlacementType placementType = catalog.getDataPlacement( sourceTable.dataPlacements.get( 0 ), sourceTable.id ).placementType; - - // Get all documents of the source collection. Here it is necessary to create the target table with its columns - String query = String.format( "db.%s.find({})", sourceTable.name ); - QueryParameters parameters = new MqlQueryParameters( query, sourceNamespace.name, NamespaceType.DOCUMENT ); - AutomaticDdlProcessor mqlProcessor = (AutomaticDdlProcessor) statement.getTransaction().getProcessor( QueryLanguage.MONGO_QL ); - MqlNode parsed = (MqlNode) mqlProcessor.parse( query ).get( 0 ); - AlgRoot logicalRoot = mqlProcessor.translate( statement, parsed, parameters ); - PolyImplementation polyImplementation = statement.getQueryProcessor().prepareQuery( logicalRoot, true ); - Result result = getResult( QueryLanguage.MONGO_QL, statement, query, polyImplementation, statement.getTransaction(), false ); - - // Create a list of the JsonObjects skipping the _id column which is only needed for the documents but not for the table - List jsonObjects = new ArrayList(); - LinkedList currentPosition = new LinkedList<>( Arrays.asList( sourceTable.name ) ); - LinkedMap documentHierarchy = new LinkedMap<>( Map.of( currentPosition, new LinkedHashSet() ) ); - for ( String[] documents : result.getData() ) { - for ( String document : documents ) { - JsonObject jsonObject = JsonParser.parseString( document ).getAsJsonObject(); - buildDocumentHierarchy( jsonObject, documentHierarchy, currentPosition, pkColumnNamesOfTables ); - jsonObjects.add( jsonObject ); - } - } + private void transferRelationalToDocument( CatalogTable sourceTable, long targetSchemaId, Statement statement ) throws EntityAlreadyExistsException, DdlOnSourceException { + // Create the new collection in the same datastore + List stores = sourceTable.dataPlacements + .stream() + .map( id -> (DataStore) AdapterManager.getInstance().getAdapter( id ) ) + .collect( Collectors.toList() ); + PlacementType placementType = catalog.getDataPlacement( sourceTable.dataPlacements.get( 0 ), sourceTable.id ).placementType; + createCollection( targetSchemaId, sourceTable.name, false, stores, placementType, statement ); - // Create the target table - // Only VARCHAR(32) columns are added in the current version - ColumnTypeInformation typeInformation = new ColumnTypeInformation( PolyType.VARCHAR, PolyType.VARCHAR, 64, null, null, null, false ); - List addedTables = new ArrayList(); - for ( Entry hierarchyEntry : documentHierarchy.entrySet() ) { - LinkedList tablePath = hierarchyEntry.getKey(); - LinkedHashSet tableChildren = hierarchyEntry.getValue(); - String tableName = tablePath.getLast(); - - // If the table has already added. E.g. when it has multiple parents - if ( !addedTables.stream().anyMatch( table -> tableName.equals( table.name ) ) ) { - // If the table has also another parent, merge the tableChildren - List sameTableWithOtherParents = documentHierarchy.keySet() - .stream() - .filter( k -> !k.equals( hierarchyEntry.getKey() ) && k.getLast().equals( hierarchyEntry.getKey().getLast() ) ) - .collect( Collectors.toList() ); - for ( LinkedList sameTableWithOtherParent : sameTableWithOtherParents ) { - tableChildren.addAll( documentHierarchy.get( sameTableWithOtherParent ) ); - } + // Call the migrator + DataMigrator dataMigrator = statement.getTransaction().getDataMigrator(); + dataMigrator.copyRelationalDataToDocumentData( statement.getTransaction(), sourceTable, targetSchemaId ); - // Create the list of the PKs for the current table - List constraintInformations = - List.of( new ConstraintInformation( "primary", ConstraintType.PRIMARY, pkColumnNamesOfTables.get( tableName ) ) ); - List fieldInformations = tableChildren - .stream() - .map( fieldName -> new FieldInformation( fieldName, typeInformation, Collation.getDefaultCollation(), null, new ArrayList( tableChildren ).indexOf( fieldName ) + 1 ) ) - .collect( Collectors.toList() ); - createTable( targetSchemaId, tableName, fieldInformations, constraintInformations, false, stores, placementType, statement ); - addedTables.add( catalog.getTable( targetSchemaId, tableName ) ); - } - // Add FK if it's a child table - CatalogTable table = catalog.getTable( targetSchemaId, tableName ); - if ( tablePath.size() > 1 ) { - CatalogTable refTable = catalog.getTable( targetSchemaId, tablePath.get( tablePath.size() - 2 ) ); - List refColumnNames = pkColumnNamesOfTables.get( refTable.name ); - List columnNames = refColumnNames - .stream() - .map( columnName -> "fk_" + refTable.name + "_" + columnName ) - .collect( Collectors.toList() ); - addForeignKey( table, refTable, columnNames, refColumnNames, "fk_from_" + table.name + "_to_" + refTable.name, ForeignKeyOption.NONE, ForeignKeyOption.NONE ); + // Drop the source table + dropTable( sourceTable, statement ); + statement.getQueryProcessor().resetCaches(); + } + + + private void transferDocumentToRelational( CatalogTable sourceTable, long targetSchemaId, Map> pkColumnNamesOfTables, Statement statement ) throws EntityAlreadyExistsException, UnknownTableException, UnknownColumnException, GenericCatalogException { + // Retrieve the data placements of the source catalog + CatalogCollection sourceCollection = catalog.getCollection( sourceTable.id ); + List stores = sourceTable.dataPlacements + .stream() + .map( id -> (DataStore) AdapterManager.getInstance().getAdapter( id ) ) + .collect( Collectors.toList() ); + PlacementType placementType = catalog.getDataPlacement( sourceTable.dataPlacements.get( 0 ), sourceTable.id ).placementType; + + // Get all documents of the source collection. Here it is necessary to create the target table with its columns + String query = String.format( "db.%s.find({})", sourceTable.name ); + + QueryParameters parameters = new MqlQueryParameters( query, catalog.getSchema( sourceTable.namespaceId ).name, NamespaceType.DOCUMENT ); + AutomaticDdlProcessor mqlProcessor = (AutomaticDdlProcessor) statement.getTransaction().getProcessor( QueryLanguage.MONGO_QL ); + MqlNode parsed = (MqlNode) mqlProcessor.parse( query ).get( 0 ); + AlgRoot logicalRoot = mqlProcessor.translate( statement, parsed, parameters ); + PolyImplementation polyImplementation = statement.getQueryProcessor().prepareQuery( logicalRoot, true ); + Result result = getResult( QueryLanguage.MONGO_QL, statement, query, polyImplementation, statement.getTransaction(), false ); + + // Create a list of the JsonObjects skipping the _id column which is only needed for the documents but not for the table + List jsonObjects = new ArrayList(); + LinkedList currentPosition = new LinkedList<>( Arrays.asList( sourceTable.name ) ); + LinkedMap documentHierarchy = new LinkedMap<>( Map.of( currentPosition, new LinkedHashSet() ) ); + + for ( String document : result.getData()[0] ) { + JsonObject jsonObject = JsonParser.parseString( document ).getAsJsonObject(); + buildDocumentHierarchy( jsonObject, documentHierarchy, currentPosition, pkColumnNamesOfTables ); + jsonObjects.add( jsonObject ); + } + + // Create the target table + // Only VARCHAR(32) columns are added in the current version + ColumnTypeInformation typeInformation = new ColumnTypeInformation( PolyType.VARCHAR, PolyType.VARCHAR, 64, null, null, null, false ); + List addedTables = new ArrayList(); + for ( Entry hierarchyEntry : documentHierarchy.entrySet() ) { + LinkedList tablePath = hierarchyEntry.getKey(); + LinkedHashSet tableChildren = hierarchyEntry.getValue(); + String tableName = tablePath.getLast(); + + // If the table has already added. E.g. when it has multiple parents + if ( !addedTables.stream().anyMatch( table -> tableName.equals( table.name ) ) ) { + // If the table has also another parent, merge the tableChildren + List sameTableWithOtherParents = documentHierarchy.keySet() + .stream() + .filter( k -> !k.equals( hierarchyEntry.getKey() ) && k.getLast().equals( hierarchyEntry.getKey().getLast() ) ) + .collect( Collectors.toList() ); + for ( LinkedList sameTableWithOtherParent : sameTableWithOtherParents ) { + tableChildren.addAll( documentHierarchy.get( sameTableWithOtherParent ) ); } - } - //Call the DataMigrator - DataMigrator dataMigrator = statement.getTransaction().getDataMigrator(); - dataMigrator.copyDocumentDataToRelationalData( statement.getTransaction(), jsonObjects, addedTables ); + // Create the list of the PKs for the current table + List constraintInformations = + List.of( new ConstraintInformation( "primary", ConstraintType.PRIMARY, pkColumnNamesOfTables.get( tableName ) ) ); - // Remove the source collection - dropCollection( sourceCollection, statement ); - statement.getQueryProcessor().resetCaches(); + List fieldInformations = tableChildren + .stream() + .map( fieldName -> new FieldInformation( fieldName, typeInformation, Collation.getDefaultCollation(), null, new ArrayList( tableChildren ).indexOf( fieldName ) + 1 ) ) + .collect( Collectors.toList() ); + + createTable( targetSchemaId, tableName, fieldInformations, constraintInformations, false, stores, placementType, statement ); + addedTables.add( catalog.getTable( targetSchemaId, tableName ) ); + } + // Add FK if it's a child table + CatalogTable table = catalog.getTable( targetSchemaId, tableName ); + if ( tablePath.size() > 1 ) { + CatalogTable refTable = catalog.getTable( targetSchemaId, tablePath.get( tablePath.size() - 2 ) ); + List refColumnNames = pkColumnNamesOfTables.get( refTable.name ); + List columnNames = refColumnNames + .stream() + .map( columnName -> "fk_" + refTable.name + "_" + columnName ) + .collect( Collectors.toList() ); + addForeignKey( table, refTable, columnNames, refColumnNames, "fk_from_" + table.name + "_to_" + refTable.name, ForeignKeyOption.NONE, ForeignKeyOption.NONE ); + } } + + //Call the DataMigrator + DataMigrator dataMigrator = statement.getTransaction().getDataMigrator(); + dataMigrator.copyDocumentDataToRelationalData( statement.getTransaction(), jsonObjects, addedTables ); + + // Remove the source collection + dropCollection( sourceCollection, statement ); + statement.getQueryProcessor().resetCaches(); } private static void buildDocumentHierarchy( JsonElement jsonObject, Map documentHierarchy, LinkedList currentPath, Map> pkColumnNamesOfTables ) { Set currentTableColumnNames = documentHierarchy.get( currentPath ); + + // if no PK column was given, select the _id column. + // if the _id column not exist add it to the columns of the table + if ( !pkColumnNamesOfTables.containsKey( currentPath.getLast() ) ) { + pkColumnNamesOfTables.put( currentPath.getLast(), List.of( "_id" ) ); + currentTableColumnNames.add( "_id" ); + } + for ( String fieldName : ((JsonObject) jsonObject).keySet() ) { JsonElement jsonElement = ((JsonObject) jsonObject).get( fieldName ); if ( jsonElement instanceof JsonObject ) { @@ -2478,12 +2497,6 @@ private static void buildDocumentHierarchy( JsonElement jsonObject, Map 1 ) { diff --git a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java index b9766a1ce1..0c05df3f96 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java +++ b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java @@ -16,18 +16,28 @@ package org.polypheny.db.processing; -import com.google.common.collect.ImmutableList; +import static org.polypheny.db.ddl.DdlManagerImpl.getResult; +import com.google.common.collect.ImmutableList; import com.google.gson.JsonElement; import com.google.gson.JsonObject; -import java.util.*; - +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashMap; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; import java.util.Map.Entry; +import java.util.NoSuchElementException; import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; import org.apache.calcite.avatica.MetaImpl; import org.apache.calcite.linq4j.Enumerable; -import org.apache.commons.lang.StringUtils; +import org.bson.types.ObjectId; import org.jetbrains.annotations.NotNull; import org.polypheny.db.PolyImplementation; import org.polypheny.db.adapter.DataStore; @@ -41,7 +51,12 @@ import org.polypheny.db.algebra.logical.lpg.LogicalLpgScan; import org.polypheny.db.algebra.logical.lpg.LogicalLpgValues; import org.polypheny.db.algebra.logical.relational.LogicalValues; -import org.polypheny.db.algebra.type.*; +import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.algebra.type.AlgDataTypeFactory; +import org.polypheny.db.algebra.type.AlgDataTypeField; +import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; +import org.polypheny.db.algebra.type.AlgDataTypeSystem; +import org.polypheny.db.algebra.type.AlgRecordType; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogColumn; @@ -74,11 +89,8 @@ import org.polypheny.db.type.PolyType; import org.polypheny.db.type.PolyTypeFactoryImpl; import org.polypheny.db.util.LimitIterator; -import org.polypheny.db.webui.models.ForeignKey; import org.polypheny.db.webui.models.Result; -import static org.polypheny.db.ddl.DdlManagerImpl.getResult; - @Slf4j public class DataMigratorImpl implements DataMigrator { @@ -307,7 +319,7 @@ private static void getColumnValuesForTable( Catalog catalog, CatalogTable table } if ( catalog.getPrimaryKey( table.primaryKey ).columnIds.contains( column.id ) ) { // Generate _id if it's not in the document. - String generatedValue = UUID.randomUUID().toString(); + String generatedValue = new ObjectId().toString(); columnValues.get( column ).add( generatedValue ); pkValues.put( String.join( ".", table.name, column.name ), generatedValue ); } else if ( !catalog.getForeignKeys( table.id ).stream().anyMatch( fk -> fk.getColumnNames().contains( columnName ) ) ) { From 475f1389b1fbdb855f4aea7aba1f0fa822c543ab Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Attila=20S=C3=BCle?= Date: Wed, 28 Dec 2022 16:04:46 +0100 Subject: [PATCH 26/26] fixed relational -> document-based --- .../org/polypheny/db/ddl/DdlManagerImpl.java | 2 -- .../db/processing/DataMigratorImpl.java | 18 +++++++++++------- 2 files changed, 11 insertions(+), 9 deletions(-) diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index a5b0618a73..492de3bb3c 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -33,10 +33,8 @@ import java.util.Map; import java.util.Map.Entry; import java.util.Set; -import java.util.SortedSet; import java.util.stream.Collectors; import java.util.stream.IntStream; -import java.util.stream.Stream; import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; import org.apache.commons.collections4.map.LinkedMap; diff --git a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java index 0c05df3f96..a177c65a93 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java +++ b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java @@ -16,8 +16,6 @@ package org.polypheny.db.processing; -import static org.polypheny.db.ddl.DdlManagerImpl.getResult; - import com.google.common.collect.ImmutableList; import com.google.gson.JsonElement; import com.google.gson.JsonObject; @@ -89,7 +87,6 @@ import org.polypheny.db.type.PolyType; import org.polypheny.db.type.PolyTypeFactoryImpl; import org.polypheny.db.util.LimitIterator; -import org.polypheny.db.webui.models.Result; @Slf4j @@ -246,17 +243,24 @@ public void copyRelationalDataToDocumentData( Transaction transaction, CatalogTa } bf.append( "])" ); - // Insert als documents into the newlz created collection + // Insert als documents into the newly created collection Statement targetStatement = transaction.createStatement(); String query = bf.toString(); AutomaticDdlProcessor mqlProcessor = (AutomaticDdlProcessor) transaction.getProcessor( Catalog.QueryLanguage.MONGO_QL ); QueryParameters parameters = new MqlQueryParameters( query, catalog.getSchema( targetSchemaId ).name, Catalog.NamespaceType.DOCUMENT ); MqlNode parsed = (MqlNode) mqlProcessor.parse( query ).get( 0 ); AlgRoot logicalRoot = mqlProcessor.translate( targetStatement, parsed, parameters ); - PolyImplementation polyImplementation = targetStatement.getQueryProcessor().prepareQuery( logicalRoot, true ); - // TODO: something is wrong with the transactions. Try to get rid of this. - Result updateRresult = getResult( Catalog.QueryLanguage.MONGO_QL, targetStatement, query, polyImplementation, transaction, false ); + // Prepare the insert query + Iterator iterator = targetStatement.getQueryProcessor() + .prepareQuery( logicalRoot, true ) + .enumerable( targetStatement.getDataContext() ) + .iterator(); + //noinspection WhileLoopReplaceableByForEach + while ( iterator.hasNext() ) { + iterator.next(); + } + targetStatement.getDataContext().resetParameterValues(); } } catch ( Throwable t ) { throw new RuntimeException( t );