diff --git a/.github/workflows/matrix.yml b/.github/workflows/matrix.yml index 1330307909..ab93c2fa7d 100644 --- a/.github/workflows/matrix.yml +++ b/.github/workflows/matrix.yml @@ -37,4 +37,4 @@ jobs: run: ./gradlew assemblePlugins -PwithoutpullingUi=true - name: Execute tests timeout-minutes: 30 - run: ./gradlew check -PwithoutpullingUi=true + run: ./gradlew platformTests -PwithoutpullingUi=true diff --git a/build.gradle b/build.gradle index 3c18278242..ea11fbad98 100644 --- a/build.gradle +++ b/build.gradle @@ -112,6 +112,17 @@ allprojects { } integrationTests.dependsOn(testClasses) + // matrix tests exclude tests which rely on docker during testing due to different platform requirements + tasks.register('platformTests', Test) { + description = 'Runs platform tests.' + group = 'verification' + useJUnitPlatform { + excludeTags("docker") + } + shouldRunAfter(tasks.named('test')) + } + platformTests.dependsOn(testClasses) + testlogger { theme 'standard' showExceptions true diff --git a/core/src/main/java/org/polypheny/db/algebra/constant/Kind.java b/core/src/main/java/org/polypheny/db/algebra/constant/Kind.java index 9b7d6403c7..890283e3e0 100644 --- a/core/src/main/java/org/polypheny/db/algebra/constant/Kind.java +++ b/core/src/main/java/org/polypheny/db/algebra/constant/Kind.java @@ -1322,6 +1322,19 @@ public enum Kind { * Document model {@code $exists} operator */ MQL_EXISTS, + + MQL_GEO_INTERSECTS, + + MQL_GEO_WITHIN, + + MQL_GEO_DISTANCE, + + MQL_NEAR, + + MQL_NEAR_SPHERE, + + MQL_GEO_NEAR, + /* * Deserialize operator */ diff --git a/core/src/main/java/org/polypheny/db/algebra/core/Join.java b/core/src/main/java/org/polypheny/db/algebra/core/Join.java index a20a7f3401..29e74d1135 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/Join.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/Join.java @@ -277,5 +277,4 @@ public PolyAlgArgs bindArguments() { return args; } - } diff --git a/core/src/main/java/org/polypheny/db/algebra/core/Sort.java b/core/src/main/java/org/polypheny/db/algebra/core/Sort.java index dc4c4c9944..576bbea90f 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/Sort.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/Sort.java @@ -82,6 +82,8 @@ public abstract class Sort extends SingleAlg { */ @Getter public final AlgCollation collation; + + @Getter protected final ImmutableList fieldExps; public final RexNode offset; public final RexNode fetch; diff --git a/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentProject.java b/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentProject.java index 2f0fde7cc5..30136407ca 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentProject.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/document/DocumentProject.java @@ -18,6 +18,7 @@ import java.util.ArrayList; import java.util.Arrays; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; @@ -47,17 +48,22 @@ public abstract class DocumentProject extends SingleAlg implements DocumentAlg { public Map includes; public List excludes; + public Map adds; /** * Creates a {@link DocumentProject}. * {@link ModelTrait#DOCUMENT} native node of a project. */ - protected DocumentProject( AlgCluster cluster, AlgTraitSet traits, AlgNode input, @NotNull Map includes, @NotNull List excludes ) { + protected DocumentProject( AlgCluster cluster, AlgTraitSet traits, AlgNode input, @NotNull Map includes, @NotNull List excludes, @NotNull Map adds ) { super( cluster, traits, input ); this.includes = includes; + this.adds = adds; this.excludes = excludes; - this.rowType = DocumentType.ofIncludes( includes ).ofExcludes( excludes ); + Map map = new HashMap<>(); + map.putAll( adds ); + map.putAll( includes ); + this.rowType = DocumentType.ofIncludes( map ).ofExcludes( excludes ); } @@ -97,6 +103,20 @@ public RexNode asSingleProject() { builder.getTypeFactory().createArrayType( builder.getTypeFactory().createPolyType( PolyType.CHAR, 255 ), -1 ), PolyType.ARRAY ) ); nodes.addAll( includes.entrySet().stream().filter( o -> Objects.nonNull( o.getKey() ) ).map( Entry::getValue ).toList() ); + if ( !adds.isEmpty() ) { + nodes.clear(); + nodes.add( doc ); + nodes.add( + builder.makeLiteral( + PolyList.copyOf( adds.keySet().stream().filter( Objects::nonNull ).map( v -> PolyList.copyOf( Arrays.stream( v.split( "\\." ) ).map( PolyString::of ).toList() ) ) + .toList() ), + builder.getTypeFactory().createArrayType( builder.getTypeFactory().createPolyType( PolyType.CHAR, 255 ), -1 ), PolyType.ARRAY ) ); + nodes.addAll( adds.entrySet().stream().filter( o -> Objects.nonNull( o.getKey() ) ).map( Entry::getValue ).toList() ); + + doc = builder.makeCall( getTupleType(), OperatorRegistry.get( QueryLanguage.from( "mongo" ), OperatorName.MQL_MERGE_ADD ), nodes ); + return doc; + } + if ( !includes.isEmpty() ) { doc = builder.makeCall( getTupleType(), OperatorRegistry.get( QueryLanguage.from( "mongo" ), OperatorName.MQL_MERGE ), nodes ); diff --git a/core/src/main/java/org/polypheny/db/algebra/core/relational/RelModify.java b/core/src/main/java/org/polypheny/db/algebra/core/relational/RelModify.java index 13ddc04d83..f373b1c4c6 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/relational/RelModify.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/relational/RelModify.java @@ -231,6 +231,5 @@ public PolyAlgArgs bindArguments() { .put( "flattened", new BooleanArg( isFlattened() ) ); } - } diff --git a/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableSortRule.java b/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableSortRule.java index bf46231536..b55433e9eb 100644 --- a/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableSortRule.java +++ b/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableSortRule.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/core/src/main/java/org/polypheny/db/algebra/enumerable/RexImpTable.java b/core/src/main/java/org/polypheny/db/algebra/enumerable/RexImpTable.java index 1c40720801..85e7095d1b 100644 --- a/core/src/main/java/org/polypheny/db/algebra/enumerable/RexImpTable.java +++ b/core/src/main/java/org/polypheny/db/algebra/enumerable/RexImpTable.java @@ -476,6 +476,11 @@ private void defineCypherMethods() { defineMethod( OperatorRegistry.get( cypher, OperatorName.CYPHER_SET_LABELS ), BuiltInMethod.CYPHER_SET_LABELS.method, NullPolicy.NONE ); defineMethod( OperatorRegistry.get( cypher, OperatorName.CYPHER_REMOVE_LABELS ), BuiltInMethod.CYPHER_REMOVE_LABELS.method, NullPolicy.NONE ); defineMethod( OperatorRegistry.get( cypher, OperatorName.CYPHER_REMOVE_PROPERTY ), BuiltInMethod.CYPHER_REMOVE_PROPERTY.method, NullPolicy.NONE ); + defineMethod( OperatorRegistry.get( cypher, OperatorName.CYPHER_POINT ), BuiltInMethod.CYPHER_POINT.method, NullPolicy.NONE ); + defineMethod( OperatorRegistry.get( cypher, OperatorName.DISTANCE ), BuiltInMethod.CYPHER_DISTANCE.method, NullPolicy.NONE ); + defineMethod( OperatorRegistry.get( cypher, OperatorName.DISTANCE_NEO4J ), BuiltInMethod.CYPHER_DISTANCE_NEO4J.method, NullPolicy.NONE ); + defineMethod( OperatorRegistry.get( cypher, OperatorName.CYPHER_WITHIN_BBOX ), BuiltInMethod.CYPHER_WITHIN_BBOX.method, NullPolicy.NONE ); + defineMethod( OperatorRegistry.get( cypher, OperatorName.CYPHER_WITHIN_GEOMETRY ), BuiltInMethod.CYPHER_WITHIN_GEOMETRY.method, NullPolicy.NONE ); } @@ -503,10 +508,13 @@ private void defineMongoMethods() { defineMethod( OperatorRegistry.get( mongo, OperatorName.MQL_REMOVE ), BuiltInMethod.MQL_REMOVE.method, NullPolicy.STRICT ); defineMethod( OperatorRegistry.get( mongo, OperatorName.MQL_EXISTS ), BuiltInMethod.MQL_EXISTS.method, NullPolicy.STRICT ); defineMethod( OperatorRegistry.get( mongo, OperatorName.MQL_MERGE ), BuiltInMethod.MQL_MERGE.method, NullPolicy.STRICT ); + defineMethod( OperatorRegistry.get( mongo, OperatorName.MQL_MERGE_ADD ), BuiltInMethod.MQL_MERGE_ADD.method, NullPolicy.STRICT ); defineMethod( OperatorRegistry.get( mongo, OperatorName.MQL_PROJECT_INCLUDES ), BuiltInMethod.MQL_PROJECT_INCLUDES.method, NullPolicy.STRICT ); defineMethod( OperatorRegistry.get( mongo, OperatorName.MQL_REPLACE_ROOT ), BuiltInMethod.MQL_REPLACE_ROOT.method, NullPolicy.STRICT ); defineMethod( OperatorRegistry.get( mongo, OperatorName.MQL_NOT_UNSET ), BuiltInMethod.MQL_NOT_UNSET.method, NullPolicy.STRICT ); - + defineImplementor( OperatorRegistry.get( mongo, OperatorName.MQL_GEO_INTERSECTS ), NullPolicy.NONE, new MethodImplementor( BuiltInMethod.MQL_GEO_INTERSECTS.method ), false ); + defineImplementor( OperatorRegistry.get( mongo, OperatorName.MQL_GEO_WITHIN ), NullPolicy.NONE, new MethodImplementor( BuiltInMethod.MQL_GEO_WITHIN.method ), false ); + defineImplementor( OperatorRegistry.get( mongo, OperatorName.MQL_GEO_DISTANCE ), NullPolicy.NONE, new MethodImplementor( BuiltInMethod.MQL_GEO_DISTANCE.method ), false ); defineMqlMethod( OperatorName.PLUS, "plus", NullPolicy.STRICT ); defineMqlMethod( OperatorName.MINUS, "minus", NullPolicy.STRICT ); defineMqlMethod( OperatorName.MULTIPLY, "multiply", NullPolicy.STRICT ); diff --git a/core/src/main/java/org/polypheny/db/algebra/enumerable/document/DocumentAggregateToAggregateRule.java b/core/src/main/java/org/polypheny/db/algebra/enumerable/document/DocumentAggregateToAggregateRule.java index 5ea1f776f1..02e6c84697 100644 --- a/core/src/main/java/org/polypheny/db/algebra/enumerable/document/DocumentAggregateToAggregateRule.java +++ b/core/src/main/java/org/polypheny/db/algebra/enumerable/document/DocumentAggregateToAggregateRule.java @@ -127,7 +127,7 @@ public void onMatch( AlgOptRuleCall call ) { // RexNode doc = builder.getRexBuilder().makeCall( DocumentType.ofId(), OperatorRegistry.get( QueryLanguage.from( "mongo" ), OperatorName.MQL_MERGE ), nodes ); - call.transformTo( LogicalDocumentProject.create( enumerableAggregate, docs, List.of() ) ); + call.transformTo( LogicalDocumentProject.create( enumerableAggregate, docs, List.of(), Map.of() ) ); // call.transformTo( LogicalAggregate.create( alg.getInput(), alg.groupSet, alg.groupSets, alg.aggCalls ) );*/ } diff --git a/core/src/main/java/org/polypheny/db/algebra/enumerable/document/DocumentFilterToCalcRule.java b/core/src/main/java/org/polypheny/db/algebra/enumerable/document/DocumentFilterToCalcRule.java index cc81808259..8fdf4047df 100644 --- a/core/src/main/java/org/polypheny/db/algebra/enumerable/document/DocumentFilterToCalcRule.java +++ b/core/src/main/java/org/polypheny/db/algebra/enumerable/document/DocumentFilterToCalcRule.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -22,10 +22,12 @@ import org.polypheny.db.algebra.core.document.DocumentFilter; import org.polypheny.db.algebra.enumerable.EnumerableCalc; import org.polypheny.db.algebra.enumerable.EnumerableConvention; +import org.polypheny.db.algebra.enumerable.document.DocumentProjectToCalcRule.NearDetector; import org.polypheny.db.algebra.logical.document.LogicalDocumentFilter; import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.algebra.type.DocumentType; +import org.polypheny.db.algebra.type.AlgDataTypeField; +import org.polypheny.db.catalog.logistic.DataModel; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.plan.AlgCluster; @@ -53,13 +55,20 @@ public DocumentFilterToCalcRule() { @Override public AlgNode convert( AlgNode alg ) { final LogicalDocumentFilter filter = (LogicalDocumentFilter) alg; + + NearDetector nearDetector = new NearDetector(); + filter.accept( nearDetector ); + if ( nearDetector.containsNear ) { + return null; + } + final AlgNode input = filter.getInput(); // Create a program containing a filter. final RexBuilder rexBuilder = filter.getCluster().getRexBuilder(); final AlgDataType inputRowType = input.getTupleType(); final RexProgramBuilder programBuilder = new RexProgramBuilder( inputRowType, rexBuilder ); - NameRefReplacer replacer = new NameRefReplacer( filter.getCluster(), false ); + NameRefReplacer replacer = new NameRefReplacer( filter.getCluster(), false, alg.getInput( 0 ) ); programBuilder.addIdentity(); programBuilder.addCondition( filter.condition.accept( replacer ) ); final RexProgram program = programBuilder.getProgram(); @@ -74,26 +83,32 @@ public AlgNode convert( AlgNode alg ) { public static class NameRefReplacer extends RexShuttle { private final AlgCluster cluster; + private final AlgNode input; boolean inplace; - public NameRefReplacer( AlgCluster cluster, boolean inplace ) { + public NameRefReplacer( AlgCluster cluster, boolean inplace, AlgNode input ) { this.cluster = cluster; this.inplace = inplace; + this.input = input; } @Override public RexNode visitNameRef( RexNameRef nameRef ) { + int index = 0; + if ( input.getModel() == DataModel.RELATIONAL ) { + // within document model we just access the main field, if already mapped we use the data field + index = input.getTupleType().getFields().stream().filter( f -> f.getName().equals( "_data" ) ).map( AlgDataTypeField::getIndex ).findAny().orElse( 0 ); + } + return new RexCall( nameRef.getType(), OperatorRegistry.get( QueryLanguage.from( "mql" ), OperatorName.MQL_QUERY_VALUE ), - RexIndexRef.of( 0, DocumentType.ofDoc() ), + RexIndexRef.of( index, input.getTupleType() ), DocumentUtil.getStringArray( nameRef.names, cluster ) ); } - } - } diff --git a/core/src/main/java/org/polypheny/db/algebra/enumerable/document/DocumentGeoNearUnwrap.java b/core/src/main/java/org/polypheny/db/algebra/enumerable/document/DocumentGeoNearUnwrap.java new file mode 100644 index 0000000000..82673ae281 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/algebra/enumerable/document/DocumentGeoNearUnwrap.java @@ -0,0 +1,235 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.algebra.enumerable.document; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import org.polypheny.db.algebra.AlgCollations; +import org.polypheny.db.algebra.AlgFieldCollation; +import org.polypheny.db.algebra.AlgFieldCollation.Direction; +import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.algebra.constant.Kind; +import org.polypheny.db.algebra.convert.ConverterRule; +import org.polypheny.db.algebra.core.AlgFactories; +import org.polypheny.db.algebra.core.document.DocumentFilter; +import org.polypheny.db.algebra.enumerable.document.DocumentProjectToCalcRule.NearDetector; +import org.polypheny.db.algebra.logical.document.LogicalDocumentFilter; +import org.polypheny.db.algebra.logical.document.LogicalDocumentProject; +import org.polypheny.db.algebra.logical.document.LogicalDocumentSort; +import org.polypheny.db.algebra.operators.OperatorName; +import org.polypheny.db.algebra.type.DocumentType; +import org.polypheny.db.catalog.exceptions.GenericRuntimeException; +import org.polypheny.db.languages.OperatorRegistry; +import org.polypheny.db.languages.QueryLanguage; +import org.polypheny.db.nodes.Operator; +import org.polypheny.db.plan.AlgCluster; +import org.polypheny.db.plan.Convention; +import org.polypheny.db.rex.RexCall; +import org.polypheny.db.rex.RexLiteral; +import org.polypheny.db.rex.RexNameRef; +import org.polypheny.db.rex.RexNode; +import org.polypheny.db.type.PolyType; +import org.polypheny.db.type.entity.PolyValue; +import org.polypheny.db.util.Pair; + +/** + * The $near function cannot be executed as a single function, which is why this conversion + * rule will replace the operation by multiple operations: + * + * 1. Projection: Add computed distance field + * 2. Filter: Filter out documents based on minDistance / maxDistance + * 3. Sort: Sort the resulting documents in ascending order by computed distance. + * 4. Projection Remove the computed distance field again. + * + * This conversion is done in the planner in instead of in the MqlToAlgConverter, so + * that we can only apply it if the operation is executed internally in Polypheny. This way, + * it is easier to translate the function when offloading to MongoDB. + * + * Other rules will be blocked by the {@link NearDetector} to force this plan. + */ +public class DocumentGeoNearUnwrap extends ConverterRule { + + public static final DocumentGeoNearUnwrap INSTANCE = new DocumentGeoNearUnwrap(); + + + public DocumentGeoNearUnwrap() { + super( DocumentFilter.class, DocumentGeoNearUnwrap::supports, Convention.NONE, Convention.NONE, AlgFactories.LOGICAL_BUILDER, DocumentGeoNearUnwrap.class.getSimpleName() ); + } + + + private AlgCluster cluster; + + + public static boolean supports( DocumentFilter filter ) { + Kind nearKind = filter.getCondition().getKind(); + return nearKind == Kind.MQL_GEO_NEAR; + } + + + @Override + public AlgNode convert( AlgNode alg ) { + if ( !(alg instanceof DocumentFilter filter) ) { + throw new GenericRuntimeException( "todo" ); + } + cluster = alg.getCluster(); + RexCall geoNearCall = (RexCall) filter.getCondition(); + assert geoNearCall.operands.size() == 8; + RexLiteral nearGeometry = (RexLiteral) geoNearCall.operands.get( 0 ); + RexNameRef distanceField = (RexNameRef) geoNearCall.operands.get( 1 ); + RexLiteral distanceMultiplier = (RexLiteral) geoNearCall.operands.get( 2 ); + RexNameRef includeLocs = (RexNameRef) geoNearCall.operands.get( 3 ); + RexLiteral key = (RexLiteral) geoNearCall.operands.get( 4 ); + RexLiteral maxDistance = (RexLiteral) geoNearCall.operands.get( 5 ); + RexLiteral minDistance = (RexLiteral) geoNearCall.operands.get( 6 ); + RexNode query = geoNearCall.operands.get( 7 ); + + AlgNode replacementNode = filter.getInput(); + + if ( key.value.toString().isEmpty() ) { + // Necessary, because otherwise we do not know which field contains the spatial information. + throw new GenericRuntimeException( "The key option is required when executing $geoNear internally. This is because Polypheny currently does not support any indexes." ); + } + + // + // Step 1: + // Apply filter for $geoNear query + if ( query instanceof RexCall ) { + RexNode filterCondition = getFixedCall( + List.of( query ), + OperatorRegistry.get( OperatorName.AND ), + PolyType.BOOLEAN + ); + replacementNode = LogicalDocumentFilter.create( filter.getInput(), filterCondition ); + replacementNode.getTupleType(); + } + + // + // Step 2: + // Add distanceField using a project + Map adds = new HashMap<>(); + adds.put( distanceField.name, getFixedCall( List.of( + new RexNameRef( List.of( key.value.toString() ), null, DocumentType.ofDoc() ), + nearGeometry, + distanceMultiplier + ), OperatorRegistry.get( QueryLanguage.from( "mongo" ), OperatorName.MQL_GEO_DISTANCE ), PolyType.ANY ) ); + + if ( !includeLocs.name.isEmpty() ) { + // Include the location field that is used for calculating the distance under the includeLocs field. + adds.put( + includeLocs.name, new RexNameRef( key.value.toString(), null, DocumentType.ofDoc() ) // addLocation + ); + } + replacementNode = LogicalDocumentProject.create( replacementNode, Map.of(), List.of(), adds ); + replacementNode.getTupleType(); + + // + // Step 3: + // Filter by minDistance, maxDistance + List filterNodes = new ArrayList<>(); + if ( minDistance.getValue().asNumber().doubleValue() != -1.0 ) { + filterNodes.add( + getFixedCall( + List.of( + distanceField, + convertLiteral( minDistance.getValue() ) ), + OperatorRegistry.get( QueryLanguage.from( "mongo" ), OperatorName.MQL_GTE ), + PolyType.BOOLEAN ) ); + } + if ( maxDistance.getValue().asNumber().doubleValue() != -1.0 ) { + filterNodes.add( + getFixedCall( + List.of( + distanceField, + convertLiteral( maxDistance.getValue() ) ), + OperatorRegistry.get( QueryLanguage.from( "mongo" ), OperatorName.MQL_LTE ), + PolyType.BOOLEAN ) ); + } + if ( !filterNodes.isEmpty() ) { + RexNode filterCondition = getFixedCall( + filterNodes, + OperatorRegistry.get( OperatorName.AND ), + PolyType.BOOLEAN + ); + replacementNode = LogicalDocumentFilter.create( replacementNode, filterCondition ); + replacementNode.getTupleType(); + } + + // + // Step 4: + // Sort by distance ascending + List names = List.of( distanceField.name ); + replacementNode = LogicalDocumentSort.create( + replacementNode, + AlgCollations.of( generateCollation( List.of( Direction.ASCENDING ), names, names ) ), + List.of( distanceField ), + null, + null ); + replacementNode.getTupleType(); + + return replacementNode; + } + + + private List generateCollation( List dirs, List names, List rowNames ) { + List collations = new ArrayList<>(); + int pos = 0; + int index; + for ( String name : names ) { + index = rowNames.indexOf( name ); + collations.add( new AlgFieldCollation( index, dirs.get( pos ) ) ); + pos++; + } + return collations; + } + + + private RexNode convertLiteral( PolyValue polyValue ) { + Pair valuePair = RexLiteral.convertType( polyValue, new DocumentType() ); + return new RexLiteral( valuePair.left, new DocumentType(), valuePair.right ); + } + + + private RexNode getFixedCall( List operands, Operator op, PolyType polyType ) { + if ( operands.size() == 1 ) { + if ( op.getKind() == Kind.NOT && operands.get( 0 ) instanceof RexCall && ((RexCall) operands.get( 0 )).op.getKind() == Kind.NOT ) { + // we have a nested NOT, which can be removed + return ((RexCall) operands.get( 0 )).operands.get( 0 ); + } + + return operands.get( 0 ); + } else { + List toRemove = new ArrayList<>(); + List toAdd = new ArrayList<>(); + // maybe we have to fix nested AND or OR combinations + for ( RexNode operand : operands ) { + if ( operand instanceof RexCall && ((RexCall) operand).op.getName().equals( op.getName() ) ) { // TODO DL maybe remove if not longer same type + toAdd.addAll( ((RexCall) operand).operands ); + toRemove.add( operand ); + } + } + if ( !toAdd.isEmpty() ) { + operands.addAll( toAdd ); + operands.removeAll( toRemove ); + } + + return new RexCall( cluster.getTypeFactory().createTypeWithNullability( cluster.getTypeFactory().createPolyType( polyType ), true ), op, operands ); + } + } + +} diff --git a/core/src/main/java/org/polypheny/db/algebra/enumerable/document/DocumentNearUnwrap.java b/core/src/main/java/org/polypheny/db/algebra/enumerable/document/DocumentNearUnwrap.java new file mode 100644 index 0000000000..ea295c9a22 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/algebra/enumerable/document/DocumentNearUnwrap.java @@ -0,0 +1,214 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.algebra.enumerable.document; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import org.polypheny.db.algebra.AlgCollations; +import org.polypheny.db.algebra.AlgFieldCollation; +import org.polypheny.db.algebra.AlgFieldCollation.Direction; +import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.algebra.constant.Kind; +import org.polypheny.db.algebra.convert.ConverterRule; +import org.polypheny.db.algebra.core.AlgFactories; +import org.polypheny.db.algebra.core.document.DocumentFilter; +import org.polypheny.db.algebra.enumerable.document.DocumentProjectToCalcRule.NearDetector; +import org.polypheny.db.algebra.logical.document.LogicalDocumentFilter; +import org.polypheny.db.algebra.logical.document.LogicalDocumentProject; +import org.polypheny.db.algebra.logical.document.LogicalDocumentSort; +import org.polypheny.db.algebra.operators.OperatorName; +import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.algebra.type.DocumentType; +import org.polypheny.db.languages.OperatorRegistry; +import org.polypheny.db.languages.QueryLanguage; +import org.polypheny.db.nodes.Operator; +import org.polypheny.db.plan.AlgCluster; +import org.polypheny.db.plan.Convention; +import org.polypheny.db.rex.RexCall; +import org.polypheny.db.rex.RexLiteral; +import org.polypheny.db.rex.RexNameRef; +import org.polypheny.db.rex.RexNode; +import org.polypheny.db.type.PolyType; +import org.polypheny.db.type.entity.PolyValue; +import org.polypheny.db.type.entity.numerical.PolyInteger; +import org.polypheny.db.util.Pair; + +/** + * The $near function cannot be executed as a single function, which is why this conversion + * rule will replace the operation by multiple operations: + * + * 1. Projection: Add computed distance field + * 2. Filter: Filter out documents based on minDistance / maxDistance + * 3. Sort: Sort the resulting documents in ascending order by computed distance. + * 4. Projection Remove the computed distance field again. + * + * This conversion is done in the planner in instead of in the MqlToAlgConverter, so + * that we can only apply it if the operation is executed internally in Polypheny. This way, + * it is easier to translate the function when offloading to MongoDB. + * + * Other rules will be blocked by the {@link NearDetector} to force this plan. + */ +public class DocumentNearUnwrap extends ConverterRule { + + public static final DocumentNearUnwrap INSTANCE = new DocumentNearUnwrap(); + + + public DocumentNearUnwrap() { + super( DocumentFilter.class, DocumentNearUnwrap::supports, Convention.NONE, Convention.NONE, AlgFactories.LOGICAL_BUILDER, DocumentNearUnwrap.class.getSimpleName() ); + } + + + private AlgCluster cluster; + + + public static boolean supports( DocumentFilter filter ) { + Kind nearKind = filter.getCondition().getKind(); + return nearKind == Kind.MQL_NEAR || nearKind == Kind.MQL_NEAR_SPHERE; + } + + + @Override + public AlgNode convert( AlgNode alg ) { + if ( !(alg instanceof DocumentFilter filter) ) { + return null; + } + cluster = alg.getCluster(); + RexCall nearCall = (RexCall) filter.getCondition(); + AlgDataType rowType = alg.getTupleType(); + assert nearCall.operands.size() == 4; + RexNameRef input = (RexNameRef) nearCall.operands.get( 0 ); + RexLiteral geometry = (RexLiteral) nearCall.operands.get( 1 ); + RexLiteral minDistance = (RexLiteral) nearCall.operands.get( 2 ); + RexLiteral maxDistance = (RexLiteral) nearCall.operands.get( 3 ); + + // + // Step 1: + // Add temporary distanceField using a project + final String distanceField = "__temp_%s".formatted( UUID.randomUUID().toString() ); + Map adds = new HashMap<>(); + adds.put( distanceField, getFixedCall( List.of( + input, + geometry, + convertLiteral( new PolyInteger( 1 ) ) + ), OperatorRegistry.get( QueryLanguage.from( "mongo" ), OperatorName.MQL_GEO_DISTANCE ), PolyType.ANY ) ); + AlgNode replacementNode = LogicalDocumentProject.create( filter.getInput(), Map.of(), List.of(), adds ); + replacementNode.getTupleType(); + + // + // Step 2: + // Filter by minDistance, maxDistance + List filterNodes = new ArrayList<>(); + if ( minDistance.getValue().asNumber().intValue() != -1 ) { + filterNodes.add( + getFixedCall( + List.of( + new RexNameRef( List.of( distanceField ), null, DocumentType.ofDoc() ), + convertLiteral( minDistance.getValue() ) ), + OperatorRegistry.get( QueryLanguage.from( "mongo" ), OperatorName.MQL_GTE ), + PolyType.BOOLEAN ) ); + } + if ( maxDistance.getValue().asNumber().intValue() != -1 ) { + filterNodes.add( + getFixedCall( + List.of( + new RexNameRef( List.of( distanceField ), null, DocumentType.ofDoc() ), + convertLiteral( maxDistance.getValue() ) ), + OperatorRegistry.get( QueryLanguage.from( "mongo" ), OperatorName.MQL_LTE ), + PolyType.BOOLEAN ) ); + } + if ( !filterNodes.isEmpty() ) { + RexNode filterCondition = getFixedCall( + filterNodes, + OperatorRegistry.get( OperatorName.AND ), + PolyType.BOOLEAN + ); + replacementNode = LogicalDocumentFilter.create( replacementNode, filterCondition ); + replacementNode.getTupleType(); + } + + // + // Step 3: + // Sort by distance ascending + List names = List.of( distanceField ); + replacementNode = LogicalDocumentSort.create( + replacementNode, + AlgCollations.of( generateCollation( List.of( Direction.ASCENDING ), names, names ) ), + List.of( new RexNameRef( List.of( distanceField ), null, DocumentType.ofDoc() ) ), + null, + null ); + replacementNode.getTupleType(); + + // + // Step 4: + // Projection to remove field distance + replacementNode = LogicalDocumentProject.create( replacementNode, Map.of(), List.of( distanceField ), Map.of() ); + replacementNode.getTupleType(); + + return replacementNode; + } + + + private List generateCollation( List dirs, List names, List rowNames ) { + List collations = new ArrayList<>(); + int pos = 0; + int index; + for ( String name : names ) { + index = rowNames.indexOf( name ); + collations.add( new AlgFieldCollation( index, dirs.get( pos ) ) ); + pos++; + } + return collations; + } + + + private RexNode convertLiteral( PolyValue polyValue ) { + Pair valuePair = RexLiteral.convertType( polyValue, new DocumentType() ); + return new RexLiteral( valuePair.left, new DocumentType(), valuePair.right ); + } + + + private RexNode getFixedCall( List operands, Operator op, PolyType polyType ) { + if ( operands.size() == 1 ) { + if ( op.getKind() == Kind.NOT && operands.get( 0 ) instanceof RexCall && ((RexCall) operands.get( 0 )).op.getKind() == Kind.NOT ) { + // we have a nested NOT, which can be removed + return ((RexCall) operands.get( 0 )).operands.get( 0 ); + } + + return operands.get( 0 ); + } else { + List toRemove = new ArrayList<>(); + List toAdd = new ArrayList<>(); + // maybe we have to fix nested AND or OR combinations + for ( RexNode operand : operands ) { + if ( operand instanceof RexCall && ((RexCall) operand).op.getName().equals( op.getName() ) ) { // TODO DL maybe remove if not longer same type + toAdd.addAll( ((RexCall) operand).operands ); + toRemove.add( operand ); + } + } + if ( !toAdd.isEmpty() ) { + operands.addAll( toAdd ); + operands.removeAll( toRemove ); + } + + return new RexCall( cluster.getTypeFactory().createTypeWithNullability( cluster.getTypeFactory().createPolyType( polyType ), true ), op, operands ); + } + } + +} diff --git a/core/src/main/java/org/polypheny/db/algebra/enumerable/document/DocumentProjectToCalcRule.java b/core/src/main/java/org/polypheny/db/algebra/enumerable/document/DocumentProjectToCalcRule.java index 8329dac629..fa8f60ab6a 100644 --- a/core/src/main/java/org/polypheny/db/algebra/enumerable/document/DocumentProjectToCalcRule.java +++ b/core/src/main/java/org/polypheny/db/algebra/enumerable/document/DocumentProjectToCalcRule.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -25,10 +25,13 @@ import org.polypheny.db.algebra.enumerable.EnumerableConvention; import org.polypheny.db.algebra.enumerable.document.DocumentFilterToCalcRule.NameRefReplacer; import org.polypheny.db.algebra.logical.document.LogicalDocumentProject; +import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.algebra.type.DocumentType; import org.polypheny.db.plan.Convention; +import org.polypheny.db.rex.RexCall; import org.polypheny.db.rex.RexNode; import org.polypheny.db.rex.RexProgram; +import org.polypheny.db.rex.RexShuttle; public class DocumentProjectToCalcRule extends ConverterRule { @@ -44,10 +47,42 @@ public DocumentProjectToCalcRule() { public AlgNode convert( AlgNode alg ) { final LogicalDocumentProject project = (LogicalDocumentProject) alg; final AlgNode input = project.getInput(); - NameRefReplacer replacer = new NameRefReplacer( project.getCluster(), false ); + NameRefReplacer replacer = new NameRefReplacer( project.getCluster(), false, alg.getInput( 0 ) ); + + NearDetector nearDetector = new NearDetector(); + project.accept( nearDetector ); + if ( nearDetector.containsNear ) { + return null; + } + List adjustedProjects = List.of( project.asSingleProject().accept( replacer ) ); final RexProgram program = RexProgram.create( input.getTupleType(), adjustedProjects, null, DocumentType.ofId(), project.getCluster().getRexBuilder() ); return EnumerableCalc.create( convert( input, input.getTraitSet().replace( EnumerableConvention.INSTANCE ) ), program ); } + + /** + * See {@link DocumentNearUnwrap} to see why we need to do this. + */ + public static class NearDetector extends RexShuttle { + + public boolean containsNear; + + + NearDetector() { + } + + + @Override + public RexNode visitCall( RexCall call ) { + if ( call.getOperator().getOperatorName() == OperatorName.MQL_NEAR || + call.getOperator().getOperatorName() == OperatorName.MQL_NEAR_SPHERE || + call.getOperator().getOperatorName() == OperatorName.MQL_GEO_NEAR ) { + containsNear = true; + } + return super.visitCall( call ); + } + + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/enumerable/document/DocumentSortToSortRule.java b/core/src/main/java/org/polypheny/db/algebra/enumerable/document/DocumentSortToSortRule.java index 8a3f90d934..9f0c720b05 100644 --- a/core/src/main/java/org/polypheny/db/algebra/enumerable/document/DocumentSortToSortRule.java +++ b/core/src/main/java/org/polypheny/db/algebra/enumerable/document/DocumentSortToSortRule.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -19,6 +19,9 @@ import java.util.List; import java.util.stream.IntStream; import java.util.stream.Stream; +import org.polypheny.db.algebra.AlgCollation; +import org.polypheny.db.algebra.AlgCollations; +import org.polypheny.db.algebra.AlgFieldCollation; import org.polypheny.db.algebra.enumerable.document.DocumentFilterToCalcRule.NameRefReplacer; import org.polypheny.db.algebra.logical.document.LogicalDocumentSort; import org.polypheny.db.algebra.logical.relational.LogicalRelProject; @@ -47,14 +50,23 @@ public void onMatch( AlgOptRuleCall call ) { AlgBuilder builder = call.builder(); builder.push( sort.getInput() ); + List fieldExps = sort.fieldExps; + AlgCollation collation = sort.collation; if ( !sort.fieldExps.isEmpty() ) { // we have to project the target keys out to use it in the sort builder.transform( ModelTrait.RELATIONAL, DocumentType.ofRelational(), false, null ); - NameRefReplacer visitor = new NameRefReplacer( sort.getCluster(), false ); - List inputs = Stream.concat( Stream.of( RexIndexRef.of( 0, DocumentType.ofId().asRelational().getFields() ), RexIndexRef.of( 1, DocumentType.ofId().asRelational().getFields() ) ), sort.fieldExps.stream().map( f -> f.accept( visitor ) ) ).toList(); + NameRefReplacer visitor = new NameRefReplacer( sort.getCluster(), false, builder.peek() ); + List inputs = Stream.concat( + Stream.of( + RexIndexRef.of( 0, DocumentType.ofId().asRelational().getFields() ), + RexIndexRef.of( 1, DocumentType.ofId().asRelational().getFields() ) ), + sort.fieldExps.stream().map( f -> f.accept( visitor ) ) ).toList(); builder.push( LogicalRelProject.create( builder.build(), inputs, IntStream.range( 0, inputs.size() ).mapToObj( i -> "in" + i ).toList() ) ); + fieldExps = List.of( RexIndexRef.of( 2, DocumentType.ofId() ) ); + + collation = AlgCollations.of( new AlgFieldCollation( 2 ) ); } - builder.push( LogicalRelSort.create( builder.build(), sort.fieldExps, sort.collation, sort.offset, sort.fetch ) ); + builder.push( LogicalRelSort.create( builder.build(), fieldExps, collation, sort.offset, sort.fetch ) ); if ( !sort.fieldExps.isEmpty() ) { // we have to restore the initial structure diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentProject.java b/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentProject.java index 478e1c8b3d..d6b27552d2 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentProject.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentProject.java @@ -19,6 +19,7 @@ import java.util.List; import java.util.Map; import java.util.stream.Collectors; +import org.jetbrains.annotations.NotNull; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgShuttle; import org.polypheny.db.algebra.core.document.DocumentProject; @@ -37,18 +38,18 @@ public class LogicalDocumentProject extends DocumentProject { /** * Subclass of {@link DocumentProject} not targeted at any particular engine or calling convention. */ - public LogicalDocumentProject( AlgCluster cluster, AlgTraitSet traits, AlgNode input, Map includes, List excludes ) { - super( cluster, traits, input, includes, excludes ); + public LogicalDocumentProject( AlgCluster cluster, AlgTraitSet traits, AlgNode input, Map includes, List excludes, @NotNull Map adds ) { + super( cluster, traits, input, includes, excludes, adds ); } - public static LogicalDocumentProject create( AlgNode node, Map includes, List excludes ) { - return new LogicalDocumentProject( node.getCluster(), node.getTraitSet(), node, includes, excludes ); + public static LogicalDocumentProject create( AlgNode node, Map includes, List excludes, Map adds ) { + return new LogicalDocumentProject( node.getCluster(), node.getTraitSet(), node, includes, excludes, adds ); } public static LogicalDocumentProject create( AlgNode node, List includes, List includesName, List excludes ) { - return create( node, Pair.zip( includesName, includes ).stream().collect( Collectors.toMap( e -> e.left, e -> e.right ) ), excludes ); + return create( node, Pair.zip( includesName, includes ).stream().collect( Collectors.toMap( e -> e.left, e -> e.right ) ), excludes, Map.of() ); } @@ -69,7 +70,7 @@ public static LogicalDocumentProject create( PolyAlgArgs args, List chi @Override public LogicalDocumentProject copy( AlgTraitSet traitSet, List inputs ) { - return new LogicalDocumentProject( inputs.get( 0 ).getCluster(), traitSet, inputs.get( 0 ), includes, excludes ); + return new LogicalDocumentProject( inputs.get( 0 ).getCluster(), traitSet, inputs.get( 0 ), includes, excludes, adds ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgValues.java b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgValues.java index 7c98d9c8cb..c0b2b4e570 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgValues.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgValues.java @@ -180,7 +180,7 @@ private ImmutableList> getNodePropertyValues( Immutabl ImmutableList.Builder row = ImmutableList.builder(); row.add( id ); row.add( getStringLiteral( entry.getKey().value, LABEL_TYPE ) ); - row.add( getStringLiteral( entry.getValue().toString(), VALUE_TYPE ) ); + row.add( getStringLiteral( entry.getValue().toJson(), VALUE_TYPE ) ); rows.add( row.build() ); } } diff --git a/core/src/main/java/org/polypheny/db/algebra/operators/OperatorName.java b/core/src/main/java/org/polypheny/db/algebra/operators/OperatorName.java index 188325ad64..24e7ab90ca 100644 --- a/core/src/main/java/org/polypheny/db/algebra/operators/OperatorName.java +++ b/core/src/main/java/org/polypheny/db/algebra/operators/OperatorName.java @@ -1629,12 +1629,26 @@ public enum OperatorName { MQL_MERGE( LangFunctionOperator.class ), + MQL_MERGE_ADD( LangFunctionOperator.class ), + MQL_PROJECT_INCLUDES( LangFunctionOperator.class ), MQL_REPLACE_ROOT( LangFunctionOperator.class ), MQL_NOT_UNSET( LangFunctionOperator.class ), + MQL_GEO_INTERSECTS( LangFunctionOperator.class ), + + MQL_GEO_WITHIN( LangFunctionOperator.class ), + + MQL_GEO_DISTANCE( LangFunctionOperator.class ), + + MQL_NEAR( LangFunctionOperator.class ), + + MQL_NEAR_SPHERE( LangFunctionOperator.class ), + + MQL_GEO_NEAR( LangFunctionOperator.class ), + //------------------------------------------------------------- // OPENCYPHER OPERATORS //------------------------------------------------------------- @@ -1708,6 +1722,14 @@ public enum OperatorName { CYPHER_GEO_WITHIN( LangFunctionOperator.class ), + CYPHER_POINT( LangFunctionOperator.class ), + + CYPHER_WITHIN_BBOX( LangFunctionOperator.class ), + + CYPHER_WITHIN_GEOMETRY( LangFunctionOperator.class ), + + DISTANCE_NEO4J( LangFunctionOperator.class ), + // CROSS MODEL FUNCTION CROSS_MODEL_ITEM( LangFunctionOperator.class ), diff --git a/core/src/main/java/org/polypheny/db/algebra/polyalg/PolyAlgUtils.java b/core/src/main/java/org/polypheny/db/algebra/polyalg/PolyAlgUtils.java index c85b7c4088..70f531ded4 100644 --- a/core/src/main/java/org/polypheny/db/algebra/polyalg/PolyAlgUtils.java +++ b/core/src/main/java/org/polypheny/db/algebra/polyalg/PolyAlgUtils.java @@ -34,6 +34,7 @@ import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.algebra.constant.Syntax; import org.polypheny.db.algebra.logical.relational.LogicalRelProject; +import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.algebra.polyalg.PolyAlgDeclaration.ParamType; import org.polypheny.db.algebra.polyalg.PolyAlgMetadata.GlobalStats; import org.polypheny.db.algebra.polyalg.arguments.ListArg; @@ -456,6 +457,12 @@ private void appendOperands( RexCall call, StringBuilder sb ) { includeType = RexDigestIncludeType.NO_TYPE; } } + // Argument of CYPHER_POINT not used (=unknown). We can safely skip it. + if ( call.getOperator().getOperatorName() == OperatorName.CYPHER_POINT ) { + if ( ((RexLiteral) operand).value == null ) { + continue; + } + } sb.append( visitLiteral( (RexLiteral) operand, includeType ) ); } } diff --git a/core/src/main/java/org/polypheny/db/algebra/rules/CalcMergeRule.java b/core/src/main/java/org/polypheny/db/algebra/rules/CalcMergeRule.java index e787366552..2cd5cdcf09 100644 --- a/core/src/main/java/org/polypheny/db/algebra/rules/CalcMergeRule.java +++ b/core/src/main/java/org/polypheny/db/algebra/rules/CalcMergeRule.java @@ -38,6 +38,7 @@ import org.polypheny.db.algebra.core.AlgFactories; import org.polypheny.db.algebra.core.Calc; import org.polypheny.db.algebra.logical.relational.LogicalCalc; +import org.polypheny.db.catalog.logistic.DataModel; import org.polypheny.db.plan.AlgOptRule; import org.polypheny.db.plan.AlgOptRuleCall; import org.polypheny.db.rex.RexOver; @@ -72,6 +73,11 @@ public void onMatch( AlgOptRuleCall call ) { final Calc topCalc = call.alg( 0 ); final Calc bottomCalc = call.alg( 1 ); + // RB: I had problems with merging programs, because the row type did not match. + if ( topCalc.getModel() == DataModel.DOCUMENT && bottomCalc.getModel() == DataModel.DOCUMENT ) { + return; + } + // Don't merge a calc which contains windowed aggregates onto a calc. That would effectively be pushing a windowed aggregate down through a filter. RexProgram topProgram = topCalc.getProgram(); if ( RexOver.containsOver( topProgram ) ) { diff --git a/core/src/main/java/org/polypheny/db/algebra/rules/ScanRule.java b/core/src/main/java/org/polypheny/db/algebra/rules/ScanRule.java index 1228fb8b17..5287275c49 100644 --- a/core/src/main/java/org/polypheny/db/algebra/rules/ScanRule.java +++ b/core/src/main/java/org/polypheny/db/algebra/rules/ScanRule.java @@ -53,7 +53,10 @@ public void onMatch( AlgOptRuleCall call ) { if ( oldAlg.getEntity().unwrap( TranslatableEntity.class ).isEmpty() ) { return; } - AlgNode newAlg = oldAlg.getEntity().unwrap( TranslatableEntity.class ).get().toAlg( oldAlg.getCluster(), oldAlg.getTraitSet() ); + TranslatableEntity one = oldAlg.getEntity().unwrap( TranslatableEntity.class ).orElseThrow(); + AlgCluster oldCluster = oldAlg.getCluster(); + AlgTraitSet newTraitSet = oldAlg.getTraitSet(); + AlgNode newAlg = one.toAlg( oldCluster, newTraitSet ); call.transformTo( newAlg ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/rules/SortProjectTransposeRule.java b/core/src/main/java/org/polypheny/db/algebra/rules/SortProjectTransposeRule.java index cf06cc684a..3bb041db38 100644 --- a/core/src/main/java/org/polypheny/db/algebra/rules/SortProjectTransposeRule.java +++ b/core/src/main/java/org/polypheny/db/algebra/rules/SortProjectTransposeRule.java @@ -121,7 +121,7 @@ public void onMatch( AlgOptRuleCall call ) { sort.getTraitSet().replace( newCollation ), project.getInput(), newCollation, - null, + sort.getFieldExps(), sort.offset, sort.fetch ); AlgNode newProject = project.copy( sort.getTraitSet(), ImmutableList.of( newSort ) ); diff --git a/core/src/main/java/org/polypheny/db/catalog/Catalog.java b/core/src/main/java/org/polypheny/db/catalog/Catalog.java index 3d39662cab..2814ac9cf5 100644 --- a/core/src/main/java/org/polypheny/db/catalog/Catalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/Catalog.java @@ -26,7 +26,6 @@ import java.util.function.Function; import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; -import org.jetbrains.annotations.NotNull; import org.pf4j.ExtensionPoint; import org.polypheny.db.adapter.DeployMode; import org.polypheny.db.adapter.java.AdapterTemplate; diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalCollection.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalCollection.java index f7b8143bca..8ca6c304b3 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalCollection.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalCollection.java @@ -26,7 +26,6 @@ import org.polypheny.db.catalog.entity.PolyObject; import org.polypheny.db.catalog.logistic.DataModel; import org.polypheny.db.catalog.logistic.EntityType; -import org.polypheny.db.transaction.locking.Lockable; @EqualsAndHashCode(callSuper = true) @Value diff --git a/core/src/main/java/org/polypheny/db/catalog/impl/PolyCatalog.java b/core/src/main/java/org/polypheny/db/catalog/impl/PolyCatalog.java index 5390f5159c..c862796033 100644 --- a/core/src/main/java/org/polypheny/db/catalog/impl/PolyCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/impl/PolyCatalog.java @@ -379,6 +379,7 @@ public void renameNamespace( long id, String name ) { @Override public void dropNamespace( long id ) { logicalCatalogs.remove( id ); + allocationCatalogs.remove( id ); change(); } diff --git a/core/src/main/java/org/polypheny/db/catalog/impl/logical/RelationalCatalog.java b/core/src/main/java/org/polypheny/db/catalog/impl/logical/RelationalCatalog.java index 8640125ee8..86aa1e52c3 100644 --- a/core/src/main/java/org/polypheny/db/catalog/impl/logical/RelationalCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/impl/logical/RelationalCatalog.java @@ -27,8 +27,8 @@ import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.Set; import java.util.Optional; +import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import lombok.Value; import lombok.experimental.SuperBuilder; @@ -255,10 +255,11 @@ public LogicalIndex addIndex( long tableId, List columnIds, boolean unique private long getOrAddKey( long tableId, List columnIds, EnforcementTime enforcementTime ) { - return getKey(tableId, columnIds, enforcementTime) + return getKey( tableId, columnIds, enforcementTime ) .orElse( addKey( tableId, columnIds, enforcementTime ) ); } - + + private Optional getKey( long tableId, List columnIds, EnforcementTime enforcementTime ) { return Catalog.snapshot() .rel() @@ -499,11 +500,11 @@ public void addForeignKey( long tableId, List columnIds, long referencesTa @Override public void addUniqueConstraint( long tableId, String constraintName, List columnIds, Statement statement ) { Optional keyId = getKey( tableId, columnIds, EnforcementTime.ON_QUERY ); - if (keyId.isPresent()) { + if ( keyId.isPresent() ) { // Check if there is already a unique constraint List logicalConstraints = constraints.values().stream() - .filter( c -> c.keyId == keyId.get() && c.type == ConstraintType.UNIQUE ) - .toList(); + .filter( c -> c.keyId == keyId.get() && c.type == ConstraintType.UNIQUE ) + .toList(); if ( !logicalConstraints.isEmpty() ) { throw new GenericRuntimeException( "There is already a unique constraint!" ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java index e6e990ac85..85924e87cb 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalRelSnapshotImpl.java @@ -422,7 +422,7 @@ public boolean isIndex( long keyId ) { @Override public boolean isConstraint( long keyId ) { - return constraints.entrySet().stream().anyMatch( c -> c.getValue().getKeyId() == keyId ); + return constraints.entrySet().stream().anyMatch( c -> c.getValue().getKeyId() == keyId ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotImpl.java index f5a8e7fd12..80b9f9d2c5 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/SnapshotImpl.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -145,7 +145,7 @@ public List getAdapters() { @Override public @NotNull Optional getAdapter( String uniqueName ) { - return Optional.ofNullable( adapterNames.get( uniqueName ) ); + return Optional.ofNullable( adapterNames.get( uniqueName.toLowerCase() ) ); } diff --git a/core/src/main/java/org/polypheny/db/functions/CypherFunctions.java b/core/src/main/java/org/polypheny/db/functions/CypherFunctions.java index aad3fe2921..5d4944e77a 100644 --- a/core/src/main/java/org/polypheny/db/functions/CypherFunctions.java +++ b/core/src/main/java/org/polypheny/db/functions/CypherFunctions.java @@ -16,16 +16,26 @@ package org.polypheny.db.functions; +import static java.lang.Math.toRadians; +import static org.polypheny.db.functions.spatial.GeoDistanceFunctions.EARTH_RADIUS_M; +import static org.polypheny.db.type.entity.spatial.PolyGeometry.WGS_84; +import static org.polypheny.db.type.entity.spatial.PolyGeometry.WGS_84_3D; + import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Set; import lombok.extern.slf4j.Slf4j; import org.apache.calcite.linq4j.Enumerable; import org.apache.calcite.linq4j.Linq4j; import org.apache.calcite.linq4j.function.Deterministic; +import org.locationtech.jts.geom.Coordinate; +import org.locationtech.jts.geom.GeometryFactory; +import org.locationtech.jts.geom.PrecisionModel; +import org.polypheny.db.catalog.exceptions.GenericRuntimeException; import org.polypheny.db.type.entity.PolyBoolean; import org.polypheny.db.type.entity.PolyList; import org.polypheny.db.type.entity.PolyString; @@ -38,7 +48,11 @@ import org.polypheny.db.type.entity.graph.PolyGraph; import org.polypheny.db.type.entity.graph.PolyNode; import org.polypheny.db.type.entity.graph.PolyPath; +import org.polypheny.db.type.entity.numerical.PolyDouble; import org.polypheny.db.type.entity.relational.PolyMap; +import org.polypheny.db.type.entity.spatial.GeometryTopologicalException; +import org.polypheny.db.type.entity.spatial.PolyGeometry; +import org.polypheny.db.type.entity.spatial.PolyPoint; @Deterministic @@ -422,4 +436,272 @@ public static PolyBoolean like( PolyValue b0, PolyValue b1 ) { } + /** + * 5 possible argument names with values: + * - 3 coordinates + * - 2 options + */ + @SuppressWarnings("unused") + public static PolyGeometry point( + PolyValue argName1, + PolyValue argValue1, + PolyValue argName2, + PolyValue argValue2, + PolyValue argName3, + PolyValue argValue3, + PolyValue argName4, + PolyValue argValue4, + PolyValue argName5, + PolyValue argValue5 ) { + Map map = new HashMap<>(); + if ( argName1 != null && argValue1 != null ) { + map.put( argName1, argValue1 ); + } + if ( argName2 != null && argValue2 != null ) { + map.put( argName2, argValue2 ); + } + if ( argName3 != null && argValue3 != null ) { + map.put( argName3, argValue3 ); + } + if ( argName4 != null && argValue4 != null ) { + map.put( argName4, argValue4 ); + } + if ( argName5 != null && argValue5 != null ) { + map.put( argName5, argValue5 ); + } + return point( PolyMap.of( map ) ); + } + + + @SuppressWarnings("unused") + public static PolyGeometry point( PolyValue map ) { + if ( !map.isMap() ) { + throw new GenericRuntimeException( "point() expects a map." ); + } + + PolyMap polyMap = map.asMap(); + PolyString x = new PolyString( "x" ); + PolyString y = new PolyString( "y" ); + PolyString z = new PolyString( "z" ); + PolyString longitude = new PolyString( "longitude" ); + PolyString latitude = new PolyString( "latitude" ); + PolyString height = new PolyString( "height" ); + PolyString srid = new PolyString( "srid" ); + PolyString crs = new PolyString( "crs" ); + + // In Cypher, it is possible to define the following four SRIDs + // 7203: cartesian with 2 coordinates (no relation to ESPG:7203) -> 0 + // 9157: cartesian with 3 coordinates (no relation to ESPG:9157) -> 0 + // 4326: spherical with 2 coordinates -> 4326 + // 4979: spherical with 3 coordinates -> 4979 + int SRID = 0; + Coordinate coordinate = new Coordinate(); + + if ( polyMap.containsKey( x ) && polyMap.containsKey( y ) ) { + if ( polyMap.get( x ).isNull() ) { + return null; + } + coordinate.setX( convertPolyValueToDouble( polyMap.get( x ) ) ); + if ( polyMap.get( y ).isNull() ) { + return null; + } + coordinate.setY( convertPolyValueToDouble( polyMap.get( y ) ) ); + if ( polyMap.containsKey( z ) ) { + if ( polyMap.get( z ).isNull() ) { + return null; + } + coordinate.setZ( convertPolyValueToDouble( polyMap.get( z ) ) ); + } + + if ( polyMap.containsKey( srid ) ) { + SRID = switch ( polyMap.get( srid ).asInteger().intValue() ) { + case WGS_84 -> WGS_84; + case WGS_84_3D -> WGS_84_3D; + default -> 0; + }; + } else if ( polyMap.containsKey( crs ) ) { + SRID = switch ( polyMap.get( crs ).asString().value ) { + case "WGS-84-2D" -> WGS_84; + case "WGS-84-3D" -> WGS_84_3D; + default -> 0; + }; + } + + } else if ( polyMap.containsKey( longitude ) && polyMap.containsKey( latitude ) ) { + if ( polyMap.get( longitude ).isNull() ) { + return null; + } + coordinate.setX( convertPolyValueToDouble( polyMap.get( longitude ) ) ); + if ( polyMap.get( latitude ).isNull() ) { + return null; + } + coordinate.setY( convertPolyValueToDouble( polyMap.get( latitude ) ) ); + if ( polyMap.containsKey( height ) ) { + if ( polyMap.get( height ).isNull() ) { + return null; + } + coordinate.setZ( convertPolyValueToDouble( polyMap.get( height ) ) ); + SRID = WGS_84_3D; + } else { + SRID = WGS_84; + } + } + GeometryFactory geometryFactory = new GeometryFactory( new PrecisionModel(), SRID ); + return PolyGeometry.of( geometryFactory.createPoint( coordinate ) ); + } + + + @SuppressWarnings("unused") + public static PolyDouble distance( PolyValue p1, PolyValue p2 ) { + PolyPoint g1 = p1.asGeometry().asPoint(); + PolyPoint g2 = p2.asGeometry().asPoint(); + + if ( !Objects.equals( g1.getSRID(), g2.getSRID() ) ) { + throw new GenericRuntimeException( "Cannot compute point.distance(%s, %s) because of different SRIDs.".formatted( g1, g2 ) ); + } + Integer srid = g1.getSRID(); + + try { + if ( g1.hasZ() && g2.hasZ() ) { + if ( srid == 0 ) { + return new PolyDouble( + Math.sqrt( Math.pow( g2.getX() - g1.getX(), 2 ) + Math.pow( g2.getY() - g1.getY(), 2 ) + Math.pow( g2.getZ() - g1.getZ(), 2 ) ) + ); + } else if ( srid == WGS_84_3D ) { + // See https://github.com/neo4j/neo4j/blob/5.20/community/values/src/main/java/org/neo4j/values/storable/CRSCalculator.java + double greatCircleDistance = getGreatCircleDistance( g1, g2 ); + double avgHeight = (g1.getZ() + g2.getZ()) / 2; + // Note: Neo4j uses a different earth radius of 6378140.0, which is why the same calculation + // in Neo4j does not yield (exactly) the same results. + double distance2D = (EARTH_RADIUS_M + avgHeight) * greatCircleDistance; + double heightDifference = g1.getZ() - g2.getZ(); + return new PolyDouble( Math.sqrt( Math.pow( distance2D, 2 ) + Math.pow( heightDifference, 2 ) ) ); + } + } else { + return new PolyDouble( g1.distance( g2 ) ); + } + } catch ( GeometryTopologicalException e ) { + throw new GenericRuntimeException( e ); + } + + throw new GenericRuntimeException( "This should not be possible!" ); + } + + + @SuppressWarnings("unused") + public static PolyDouble distanceNeo4j( PolyValue p1, PolyValue p2 ) { + PolyPoint g1 = p1.asGeometry().asPoint(); + PolyPoint g2 = p2.asGeometry().asPoint(); + + if ( !Objects.equals( g1.getSRID(), g2.getSRID() ) ) { + throw new GenericRuntimeException( "Cannot compute point.distance(%s, %s) because of different SRIDs.".formatted( g1, g2 ) ); + } + Integer srid = g1.getSRID(); + + try { + if ( g1.hasZ() && g2.hasZ() ) { + if ( srid == 0 ) { + return distance( g1, g2 ); + } else if ( srid == WGS_84_3D ) { + // See https://github.com/neo4j/neo4j/blob/5.20/community/values/src/main/java/org/neo4j/values/storable/CRSCalculator.java + double greatCircleDistance = getGreatCircleDistance( g1, g2 ); + double avgHeight = (g1.getZ() + g2.getZ()) / 2; + final double EARTH_RADIUS_M_NEO4J = 6378140.0; + double distance2D = (EARTH_RADIUS_M_NEO4J + avgHeight) * greatCircleDistance; + double heightDifference = g1.getZ() - g2.getZ(); + return new PolyDouble( Math.sqrt( Math.pow( distance2D, 2 ) + Math.pow( heightDifference, 2 ) ) ); + } + } else { + return new PolyDouble( g1.distance( g2 ) ); + } + } catch ( GeometryTopologicalException e ) { + throw new GenericRuntimeException( e ); + } + + throw new GenericRuntimeException( "This should not be possible!" ); + } + + + /** + * Use same logic as Neo4j to calculate the spherical distance. + * See: GitHub + */ + private static double getGreatCircleDistance( PolyPoint g1, PolyPoint g2 ) { + double lat1 = toRadians( g1.getY() ); + double lat2 = toRadians( g2.getY() ); + double latDifference = lat2 - lat1; + double lonDifference = toRadians( g2.getX() - g1.getX() ); + double alpha = Math.pow( Math.sin( latDifference / 2 ), 2 ) + + Math.cos( lat1 ) * Math.cos( lat2 ) * Math.pow( Math.sin( lonDifference / 2 ), 2 ); + return 2.0 * Math.atan2( Math.sqrt( alpha ), Math.sqrt( 1 - alpha ) ); + } + + + @SuppressWarnings("unused") + public static PolyBoolean withinBBox( PolyValue point, PolyValue lowerLeft, PolyValue upperRight ) { + PolyPoint g = point.asGeometry().asPoint(); + PolyPoint lowerLeftGeometry = lowerLeft.asGeometry().asPoint(); + PolyPoint upperRightGeometry = upperRight.asGeometry().asPoint(); + + if ( !(g.getSRID().equals( lowerLeftGeometry.getSRID() ) && lowerLeftGeometry.getSRID().equals( upperRightGeometry.getSRID() )) ) { + // Return null if the CRS of all points are not the same. + //return PolyNull.NULL; + return null; + } + if ( !(g.hasZ() == lowerLeftGeometry.hasZ() && lowerLeftGeometry.hasZ() == upperRightGeometry.hasZ()) ) { + // Return null if the CRS of all points are not the same. + //return PolyNull.NULL; + return null; + } + + PolyGeometry gBBox = createBbox( lowerLeftGeometry, upperRightGeometry ); + return new PolyBoolean( g.coveredBy( gBBox ) ); + } + + + @SuppressWarnings("unused") + public static PolyBoolean withinGeometry( PolyValue point, PolyValue geometry ) { + PolyPoint g = point.asGeometry().asPoint(); + PolyGeometry polyGeometry = geometry.asGeometry(); + + if ( !(g.getSRID().equals( polyGeometry.getSRID() )) ) { + // Return null if the CRS of all points are not the same. + return null; + } + + return new PolyBoolean( g.coveredBy( polyGeometry ) ); + } + + + private static PolyGeometry createBbox( PolyPoint lowerLeft, PolyPoint upperRight ) { + Coordinate bottomLeft = new Coordinate( lowerLeft.getX(), lowerLeft.getY() ); + Coordinate topRight = new Coordinate( upperRight.getX(), upperRight.getY() ); + Coordinate topLeft = new Coordinate( bottomLeft.x, topRight.y ); + Coordinate bottomRight = new Coordinate( topRight.x, bottomLeft.y ); + // Form a closed Ring, starting on the bottom left and going clockwise. + Coordinate[] linearRing = new Coordinate[]{ + bottomLeft, + topLeft, + topRight, + bottomRight, + bottomLeft + }; + GeometryFactory geoFactory = new GeometryFactory(); + return new PolyGeometry( geoFactory.createPolygon( linearRing ) ); + } + + + private static double convertPolyValueToDouble( PolyValue value ) { + // This should be sufficient, as all numerical values from Cypher are stored as BigDecimal. + if ( value.isString() ) { + return Double.parseDouble( value.toString() ); + } + if ( value.isDouble() ) { + return value.asDouble().doubleValue(); + } + + assert value.isBigDecimal() : "Extend method to handle other numerical data types."; + return Objects.requireNonNull( value.asBigDecimal().getValue() ).doubleValue(); + } + } diff --git a/core/src/main/java/org/polypheny/db/functions/MqlFunctions.java b/core/src/main/java/org/polypheny/db/functions/MqlFunctions.java index 92be76c405..aaf17e78b9 100644 --- a/core/src/main/java/org/polypheny/db/functions/MqlFunctions.java +++ b/core/src/main/java/org/polypheny/db/functions/MqlFunctions.java @@ -16,6 +16,8 @@ package org.polypheny.db.functions; +import static org.polypheny.db.functions.spatial.GeoDistanceFunctions.EARTH_RADIUS_M; + import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; @@ -23,6 +25,7 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; +import java.util.function.Function; import java.util.function.Supplier; import java.util.regex.Pattern; import java.util.stream.Collectors; @@ -30,8 +33,13 @@ import org.apache.commons.lang3.NotImplementedException; import org.bson.BsonDocument; import org.bson.BsonValue; +import org.locationtech.jts.geom.Coordinate; +import org.locationtech.jts.geom.GeometryFactory; +import org.locationtech.jts.geom.Point; +import org.locationtech.jts.geom.PrecisionModel; import org.polypheny.db.catalog.exceptions.GenericRuntimeException; import org.polypheny.db.schema.document.DocumentUtil; +import org.polypheny.db.type.PolyType; import org.polypheny.db.type.entity.PolyBoolean; import org.polypheny.db.type.entity.PolyList; import org.polypheny.db.type.entity.PolyNull; @@ -39,7 +47,11 @@ import org.polypheny.db.type.entity.PolyValue; import org.polypheny.db.type.entity.category.PolyNumber; import org.polypheny.db.type.entity.document.PolyDocument; +import org.polypheny.db.type.entity.numerical.PolyFloat; import org.polypheny.db.type.entity.numerical.PolyInteger; +import org.polypheny.db.type.entity.spatial.GeometryTopologicalException; +import org.polypheny.db.type.entity.spatial.InvalidGeometryException; +import org.polypheny.db.type.entity.spatial.PolyGeometry; import org.polypheny.db.util.Pair; @@ -57,9 +69,16 @@ private MqlFunctions() { @SuppressWarnings("UnusedDeclaration") public static PolyValue docQueryValue( PolyValue input, List filters ) { - if ( input == null || !input.isDocument() ) { + if ( input == null ) { return null; } + if ( !input.isDocument() ) { + if ( input.isString() ) { + input = PolyDocument.fromTypedJson( input.asString().value, PolyDocument.class ); + } else { + return null; + } + } PolyValue temp = input; for ( PolyString filter : filters ) { if ( !temp.isDocument() ) { @@ -425,10 +444,26 @@ public static PolyValue docItem( PolyValue input, int index ) { public static PolyDocument mergeDocument( PolyValue value, List> names, PolyValue... documents ) { assert names.size() == documents.length; Map doc = new HashMap<>(); + addFieldsToDoc( doc, names, documents ); + return PolyDocument.ofDocument( doc ); + } + + + @SuppressWarnings("UnusedDeclaration") + public static PolyDocument mergeDocumentAdd( PolyValue value, List> names, PolyValue... documents ) { + assert names.size() == documents.length; + Map doc = new HashMap<>(); + if ( value.isDocument() ) { + doc.putAll( value.asDocument() ); + } + addFieldsToDoc( doc, names, documents ); + return PolyDocument.ofDocument( doc ); + } + + private static void addFieldsToDoc( Map doc, List> names, PolyValue... documents ) { Iterator iter; Map temp; - for ( int i = 0; i < documents.length; i++ ) { if ( documents[i].isDocument() && documents[i].asDocument().isUnset ) { continue; @@ -450,7 +485,6 @@ public static PolyDocument mergeDocument( PolyValue value, List 0 ) { + if ( geometryFilter.getSRID() != 0 ) { + // In the case of $centerSphere, we first have to convert radians to meters. + distanceValue = EARTH_RADIUS_M * distanceValue; + } + return inputGeometry.isWithinDistance( geometryFilter, distanceValue ) ? PolyBoolean.TRUE : PolyBoolean.FALSE; + } + // coveredBy also works if the input geometry lies along the edges of the filter geometry. + // For example: A point [0,0] is inside a box [0,0 to 1,1], because it lies on a corner / edge. + return inputGeometry.coveredBy( geometryFilter ) ? PolyBoolean.TRUE : PolyBoolean.FALSE; + } catch ( GeometryTopologicalException e ) { + throw new GenericRuntimeException( "$geometry could not be parsed as GeoJSON" ); + } + } + + + @SuppressWarnings("UnusedDeclaration") + public static PolyNumber docGeoDistance( PolyValue input, PolyValue geometry, PolyValue distanceMultiplier ) { + PolyGeometry geometryFilter = geometry.asGeometry(); + PolyGeometry inputGeometry = convertInputToPolyGeometry( input, geometryFilter.getSRID() ); + PolyNumber distance = GeoFunctions.stDistance( inputGeometry, geometryFilter ); + float distanceFloat = distance.asFloat().floatValue(); + + if ( distanceMultiplier.isInteger() ) { + int distanceMultiplierInt = distanceMultiplier.asInteger().intValue(); + return PolyFloat.of( distance.asFloat().floatValue() * distanceMultiplierInt ); + } else if ( distanceMultiplier.isFloat() ) { + float distanceMultiplierFloat = distanceMultiplier.asFloat().floatValue(); + return PolyFloat.of( distance.asFloat().floatValue() * distanceMultiplierFloat ); + } else if ( distanceMultiplier.isDouble() ) { + double distanceMultiplierDouble = distanceMultiplier.asDouble().doubleValue(); + return PolyFloat.of( distance.asFloat().floatValue() * distanceMultiplierDouble ); + } + // TODO: Is this exhaustive? Is there a better way? + throw new GenericRuntimeException( "Probably forgot to implement conersion for distanceMultiplier in docGeoDistance." ); + } + +// @SuppressWarnings("UnusedDeclaration") +// public static PolyBoolean docNear( PolyValue input, PolyValue geometry, PolyValue minDistance, PolyValue maxDistance ) { +// throw new GenericRuntimeException( " Something went wrong in the DocumentNearUnwrap conversion rule. This method should not be called. " ); +// } +// +// @SuppressWarnings("UnusedDeclaration") +// public static PolyBoolean docNearSphere( PolyValue input, PolyValue geometry, PolyValue minDistance, PolyValue maxDistance ) { +// throw new GenericRuntimeException( " Something went wrong in the DocumentNearUnwrap conversion rule. This method should not be called. " ); +// } + + + /** + * Converts a PolyValue into a PolyGeometry type. We support the following cases: + * 1. Legacy Coordinates: + * - Array: The input value is a list of 2 numbers (integer, double), which represent the + * x and y values. + * - Embedded Document: The input value is a document with two key-value pairs. The first + * will be the x and the second will be the y value. + * Points are created without an SRID, which means that calculations will be done in the cartesian coordinate system by default. + * 2. GeoJSON: + * - A document that adheres to the GeoJSON specification. + */ + public static PolyGeometry convertInputToPolyGeometry( PolyValue input, Integer srid ) { + if ( input.isGeometry() ) { + return input.asGeometry(); + } + + GeometryFactory geoFactory = new GeometryFactory( new PrecisionModel(), srid ); + + // Legacy Coordinates + if ( input.isList() ) { + PolyList inputList = input.asList(); + + Function getDouble = ( PolyValue polyValue ) -> { + Double result = null; + if ( polyValue.isDouble() ) { + result = polyValue.asDouble().value; + } + if ( polyValue.isInteger() ) { + Integer intValue = polyValue.asInteger().value; + if ( intValue != null ) { + result = (double) intValue; + } + } + if ( result == null ) { + throw new GenericRuntimeException( "Legacy Coordinates needs to be of type INTEGER or DOUBLE." ); + } + return result; + }; + + if ( inputList.size() != 2 ) { + throw new GenericRuntimeException( "Legacy Coordinate Pairs stored as Array can only be of the form [x,y]" ); + } + + Double x = getDouble.apply( inputList.get( 0 ) ); + Double y = getDouble.apply( inputList.get( 1 ) ); + Coordinate coordinate = new Coordinate( x, y ); + Point point = geoFactory.createPoint( coordinate ); + return new PolyGeometry( point ); + } + + // Embedded Document + if ( input.isDocument() ) { + PolyDocument inputDocument = input.asDocument(); + + if ( inputDocument.keySet().isEmpty() ) { + // TODO: Should we detect this in each spatial function, and then just return false, if the document + // does not contain the field / document? + throw new GenericRuntimeException( "Field selected in query is empty or does not exist." ); + } + + try { + // In GeoJSON, WGS84 is assumed by default. This is also the case for MongoDB. + return PolyGeometry.fromGeoJson( inputDocument.toJson() ); + } catch ( InvalidGeometryException e ) { + + // If the documents contain two fields that are of numeric type, then the user wanted to + // use legacy coordinates embedded inside a document. This is currently not supported, because + // document fields are by definition unordered, and supporting this would require a lot of changes. + if ( inputDocument.size() == 2 && inputDocument.values().stream().allMatch( value -> PolyType.NUMERIC_TYPES.contains( value.getType() ) ) ) { + throw new GenericRuntimeException( "Legacy coordinates inside an embedded document is currently not supported, as document fields are not ordered.", e ); + } + throw new GenericRuntimeException( "$geometry operand of $geoIntersects could not be parsed as GeoJSON.", e ); + } + } + + throw new GenericRuntimeException( "Geometry type could not be determined." ); + } + + /** * Transforms a provided Bson object into a primitive from * diff --git a/core/src/main/java/org/polypheny/db/functions/spatial/GeoDistanceFunctions.java b/core/src/main/java/org/polypheny/db/functions/spatial/GeoDistanceFunctions.java index 326667c594..dfcb89e562 100644 --- a/core/src/main/java/org/polypheny/db/functions/spatial/GeoDistanceFunctions.java +++ b/core/src/main/java/org/polypheny/db/functions/spatial/GeoDistanceFunctions.java @@ -28,7 +28,7 @@ public class GeoDistanceFunctions { // Define the radius of the Earth's sphere (in meters) - private static final double EARTH_RADIUS_M = 6371.0 * 1000; + public static final double EARTH_RADIUS_M = 6371.0 * 1000; private GeoDistanceFunctions() { diff --git a/core/src/main/java/org/polypheny/db/rex/RexBuilder.java b/core/src/main/java/org/polypheny/db/rex/RexBuilder.java index 94ecbf1f97..af168391f8 100644 --- a/core/src/main/java/org/polypheny/db/rex/RexBuilder.java +++ b/core/src/main/java/org/polypheny/db/rex/RexBuilder.java @@ -46,6 +46,7 @@ import java.util.ArrayList; import java.util.Calendar; import java.util.Date; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; @@ -85,6 +86,7 @@ import org.polypheny.db.type.entity.category.PolyNumber; import org.polypheny.db.type.entity.numerical.PolyBigDecimal; import org.polypheny.db.type.entity.numerical.PolyDouble; +import org.polypheny.db.type.entity.relational.PolyMap; import org.polypheny.db.type.entity.temporal.PolyDate; import org.polypheny.db.type.entity.temporal.PolyTime; import org.polypheny.db.type.entity.temporal.PolyTimestamp; @@ -952,7 +954,6 @@ public RexNode ensureType( AlgDataType type, RexNode node, boolean matchNullabil // Special handling for arrays if ( node instanceof RexCall && ((RexCall) node).op.getKind() == Kind.ARRAY_VALUE_CONSTRUCTOR ) { ArrayType arrayType = (ArrayType) node.getType(); - log.warn( "why" ); return new RexLiteral( (PolyValue) List.of( ((RexCall) node).operands ), arrayType, arrayType.getPolyType() ); } else if ( !node.getType().equals( targetType ) ) { return makeCast( targetType, node ); @@ -1232,11 +1233,17 @@ private RexLiteral makeArray( List value, AlgDataType type, boolean a /** * Converts the type of value to comply with {@link RexLiteral#valueMatchesType}. */ - private static PolyValue clean( Object o, AlgDataType type ) { + private PolyValue clean( Object o, AlgDataType type ) { if ( o == null ) { return null; } - switch ( type.getPolyType() ) { + + PolyType polyType = type.getPolyType(); + if ( polyType == PolyType.ANY ) { + polyType = guessType( o ).getPolyType(); + } + + switch ( polyType ) { case TINYINT: case SMALLINT: case INTEGER: @@ -1331,11 +1338,20 @@ private static PolyValue clean( Object o, AlgDataType type ) { } break; case ARRAY: - ArrayType arrayType = (ArrayType) type; + AlgDataType compType; + List unknownList = (List) o; + if ( type instanceof ArrayType arrayType ) { + compType = arrayType.getComponentType(); + } else if ( !unknownList.isEmpty() ) { + compType = guessType( unknownList.get( 0 ) ); + } else { + throw new AssertionError(); + } + List list = new ArrayList<>(); - for ( Object object : (List) o ) { - list.add( clean( object, arrayType.getComponentType() ) ); + for ( Object object : unknownList ) { + list.add( clean( object, compType ) ); } return PolyList.copyOf( list ); case BOOLEAN: @@ -1355,6 +1371,13 @@ private static PolyValue clean( Object o, AlgDataType type ) { return PolyBinary.of( new ByteString( bytes ) ); } break; + case MAP: + if ( o instanceof PolyMap map ) { + return map; + } else if ( o instanceof Map map ) { + return PolyMap.of( map ); + } + break; default: if ( o instanceof PolyValue ) { return (PolyValue) o; @@ -1378,11 +1401,20 @@ private AlgDataType guessType( Object value ) { if ( value instanceof Boolean ) { return typeFactory.createPolyType( PolyType.BOOLEAN ); } - if ( value instanceof String ) { - return typeFactory.createPolyType( PolyType.CHAR, ((String) value).length() ); + if ( value instanceof String string ) { + return typeFactory.createPolyType( PolyType.CHAR, string.length() ); + } + if ( value instanceof ByteString string ) { + return typeFactory.createPolyType( PolyType.BINARY, string.length() ); + } + if ( value instanceof PolyList list ) { + if ( list.isEmpty() ) { + throw new RuntimeException( "List is empty, cannot derive the type automatically!" ); + } + return typeFactory.createArrayType( guessType( list.get( 0 ) ), list.size() ); } - if ( value instanceof ByteString ) { - return typeFactory.createPolyType( PolyType.BINARY, ((ByteString) value).length() ); + if ( value instanceof PolyBigDecimal ) { + return typeFactory.createPolyType( PolyType.BIGINT ); } throw new AssertionError( "unknown type " + value.getClass() ); } @@ -1411,7 +1443,12 @@ public RexLiteral makeArray( AlgDataType type, List operands ) { public RexLiteral makeMap( AlgDataType type, Map operands ) { - return new RexLiteral( null, type, type.getPolyType() ); // todo fix this + Map map = new HashMap<>(); + operands.forEach( ( key, value ) -> { + map.put( ((RexLiteral) key).value, ((RexLiteral) value).value ); + } ); + + return new RexLiteral( PolyMap.of( map ), type, type.getPolyType() ); // todo fix this } diff --git a/core/src/main/java/org/polypheny/db/rex/RexLiteral.java b/core/src/main/java/org/polypheny/db/rex/RexLiteral.java index 025a253114..f3e46196a6 100644 --- a/core/src/main/java/org/polypheny/db/rex/RexLiteral.java +++ b/core/src/main/java/org/polypheny/db/rex/RexLiteral.java @@ -293,6 +293,7 @@ public static boolean valueMatchesType( PolyValue value, PolyType typeName, bool case PATH -> value.isPath(); case MAP -> value.isMap(); case DOCUMENT -> true; + case GEOMETRY -> value.isGeometry(); default -> throw Util.unexpected( typeName ); }; } @@ -515,6 +516,10 @@ private static void printAsJava( PolyValue value, PrintWriter pw, PolyType typeN pw.println( value ); } break; + case GEOMETRY: + assert value.isGeometry(); + pw.print( value.asGeometry().toWKT() ); + break; default: assert valueMatchesType( value, typeName, true ); throw Util.needToImplement( typeName ); diff --git a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java index 2678bea70e..1b480dab19 100644 --- a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java +++ b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java @@ -1385,7 +1385,7 @@ public AlgBuilder documentScan( Entity collection ) { public AlgBuilder documentProject( Map includes, List excludes ) { - stack.add( new Frame( LogicalDocumentProject.create( build(), includes, excludes ) ) ); + stack.add( new Frame( LogicalDocumentProject.create( build(), includes, excludes, Map.of() ) ) ); return this; } diff --git a/core/src/main/java/org/polypheny/db/tools/Programs.java b/core/src/main/java/org/polypheny/db/tools/Programs.java index 788b6891bd..687e4aacd8 100644 --- a/core/src/main/java/org/polypheny/db/tools/Programs.java +++ b/core/src/main/java/org/polypheny/db/tools/Programs.java @@ -49,6 +49,8 @@ import org.polypheny.db.algebra.enumerable.common.EnumerableModifyToStreamerRule; import org.polypheny.db.algebra.enumerable.document.DocumentAggregateToAggregateRule; import org.polypheny.db.algebra.enumerable.document.DocumentFilterToCalcRule; +import org.polypheny.db.algebra.enumerable.document.DocumentGeoNearUnwrap; +import org.polypheny.db.algebra.enumerable.document.DocumentNearUnwrap; import org.polypheny.db.algebra.enumerable.document.DocumentProjectToCalcRule; import org.polypheny.db.algebra.enumerable.document.DocumentSortToSortRule; import org.polypheny.db.algebra.metadata.AlgMetadataProvider; @@ -105,6 +107,8 @@ public class Programs { EnumerableRules.ENUMERABLE_PROJECT_TO_CALC_RULE, DocumentProjectToCalcRule.INSTANCE, DocumentFilterToCalcRule.INSTANCE, + DocumentNearUnwrap.INSTANCE, + DocumentGeoNearUnwrap.INSTANCE, DocumentAggregateToAggregateRule.INSTANCE, DocumentSortToSortRule.INSTANCE, CalcMergeRule.INSTANCE, @@ -161,6 +165,8 @@ public class Programs { EnumerableRules.ENUMERABLE_PROJECT_TO_CALC_RULE, DocumentProjectToCalcRule.INSTANCE, DocumentFilterToCalcRule.INSTANCE, + DocumentNearUnwrap.INSTANCE, + DocumentGeoNearUnwrap.INSTANCE, DocumentAggregateToAggregateRule.INSTANCE, DocumentSortToSortRule.INSTANCE, SemiJoinRules.PROJECT, diff --git a/core/src/main/java/org/polypheny/db/type/PolyTypeUtil.java b/core/src/main/java/org/polypheny/db/type/PolyTypeUtil.java index 538921a6f8..5843e01071 100644 --- a/core/src/main/java/org/polypheny/db/type/PolyTypeUtil.java +++ b/core/src/main/java/org/polypheny/db/type/PolyTypeUtil.java @@ -519,6 +519,12 @@ public static boolean canAssignFrom( AlgDataType toType, AlgDataType fromType ) return false; } + if ( toType.getPolyType().getFamily() == PolyTypeFamily.GEO ) { + if ( fromType.getPolyType() == PolyType.CHAR || fromType.getPolyType() == PolyType.VARCHAR ) { + return true; + } + } + return toType.getFamily() == fromType.getFamily(); } diff --git a/core/src/main/java/org/polypheny/db/type/entity/PolyValue.java b/core/src/main/java/org/polypheny/db/type/entity/PolyValue.java index 8aafe529da..6fbff0db74 100644 --- a/core/src/main/java/org/polypheny/db/type/entity/PolyValue.java +++ b/core/src/main/java/org/polypheny/db/type/entity/PolyValue.java @@ -96,6 +96,7 @@ import org.polypheny.db.type.entity.numerical.PolyLong.PolyLongSerializerDef; import org.polypheny.db.type.entity.relational.PolyMap; import org.polypheny.db.type.entity.relational.PolyMap.PolyMapSerializerDef; +import org.polypheny.db.type.entity.spatial.InvalidGeometryException; import org.polypheny.db.type.entity.spatial.PolyGeometry; import org.polypheny.db.type.entity.spatial.PolyGeometry.PolyGeometryDeserializer; import org.polypheny.db.type.entity.spatial.PolyGeometry.PolyGeometrySerializer; @@ -572,8 +573,9 @@ public boolean isDocument() { public PolyDocument asDocument() { if ( isDocument() ) { return (PolyDocument) this; + } else { + return new PolyDocument( PolyString.of( "value" ), this ); } - throw cannotParse( this, PolyDocument.class ); } @@ -875,6 +877,30 @@ public boolean isGeometry() { @NotNull public PolyGeometry asGeometry() { + if ( this.type == PolyType.VARCHAR ) { + String value = this.asString().getValue(); + PolyGeometry geometry = PolyGeometry.of( value ); + if ( geometry == null ) { + if ( value.startsWith( "ST_GeomFromText" ) ) { + value = value.replace( "ST_GeomFromText", "" ); + value = value.trim(); + value = value.substring( 1, value.length() - 2 ); + String[] splits = value.split( "," ); + try { + if ( splits.length == 2 ) { + return PolyGeometry.fromWKT( splits[0].replace( "'", "" ), Integer.parseInt( splits[1].trim() ) ); + } + return PolyGeometry.fromWKT( value ); + } catch ( InvalidGeometryException e ) { + throw cannotParse( this, PolyGeometry.class ); + } + } + + throw cannotParse( this, PolyGeometry.class ); + } + return geometry; + } + if ( isGeometry() ) { return (PolyGeometry) this; } @@ -901,7 +927,7 @@ public static PolyValue convert( PolyValue value, PolyType type ) { switch ( type ) { case INTEGER: return PolyInteger.convert( value ); - case DOCUMENT: + case DOCUMENT, GEOMETRY: // docs accept all return value; case BIGINT: diff --git a/core/src/main/java/org/polypheny/db/type/entity/graph/PolyPath.java b/core/src/main/java/org/polypheny/db/type/entity/graph/PolyPath.java index ca303bffff..ef57589be3 100644 --- a/core/src/main/java/org/polypheny/db/type/entity/graph/PolyPath.java +++ b/core/src/main/java/org/polypheny/db/type/entity/graph/PolyPath.java @@ -189,6 +189,14 @@ public GraphObject get( PolyString name ) { } } + int i = 0; + for ( PolyString n : this.names ) { + if ( name.equals( n ) ) { + return path.get( i ); + } + i++; + } + return null; } diff --git a/core/src/main/java/org/polypheny/db/type/entity/spatial/PolyGeometry.java b/core/src/main/java/org/polypheny/db/type/entity/spatial/PolyGeometry.java index 38824509de..a02758b0d4 100644 --- a/core/src/main/java/org/polypheny/db/type/entity/spatial/PolyGeometry.java +++ b/core/src/main/java/org/polypheny/db/type/entity/spatial/PolyGeometry.java @@ -47,7 +47,9 @@ import org.apache.commons.lang3.NotImplementedException; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; +import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; +import org.locationtech.jts.geom.Point; import org.locationtech.jts.geom.TopologyException; import org.locationtech.jts.io.ParseException; import org.locationtech.jts.io.WKTReader; @@ -55,6 +57,7 @@ import org.locationtech.jts.io.geojson.GeoJsonWriter; import org.locationtech.jts.io.twkb.TWKBReader; import org.locationtech.jts.io.twkb.TWKBWriter; +import org.polypheny.db.catalog.exceptions.GenericRuntimeException; import org.polypheny.db.functions.spatial.GeoDistanceFunctions; import org.polypheny.db.type.PolySerializable; import org.polypheny.db.type.PolyType; @@ -78,6 +81,8 @@ public class PolyGeometry extends PolyValue { public static final int NO_SRID = 0; // World Geodetic System 1984; default for GeoJSON public static final int WGS_84 = 4326; + // WGS84 with 3 dimensions (+height) + public static final int WGS_84_3D = 4979; /** * Wrap the JTS {@link Geometry} class. @@ -145,7 +150,7 @@ public PolyGeometry( String wkt, int srid ) throws InvalidGeometryException { * @param inputFormat describes the representation format of the geometry * @throws InvalidGeometryException if {@link PolyGeometry} is invalid or provided input is invalid. */ - private PolyGeometry( String input, int srid, GeometryInputFormat inputFormat ) throws InvalidGeometryException { + public PolyGeometry( String input, int srid, GeometryInputFormat inputFormat ) throws InvalidGeometryException { this( PolyType.GEOMETRY ); switch ( inputFormat ) { case WKT: @@ -197,6 +202,20 @@ public static PolyGeometry ofNullable( String wkt ) { } + /** + * Used for generated code, so that we do not have to add handling for the InvalidGeometryException. + */ + @SuppressWarnings("UnusedDeclaration") + public static PolyGeometry ofOrThrow( String wkt ) { + try { + return new PolyGeometry( wkt ); + } catch ( InvalidGeometryException e ) { + // hack to deal that InvalidGeometryException is not caught in code generation + throw new GenericRuntimeException( e ); + } + } + + public static PolyGeometry ofNullable( @Nullable PolyString wkt ) { return wkt == null ? null : of( wkt.value ); } @@ -881,8 +900,10 @@ public PolySerializable copy() { @Override public Expression asExpression() { - // this basically calls a constructor with WKT - return Expressions.new_( PolyGeometry.class, Expressions.constant( this.toString() ) ); + // During code generation, we cannot throw an InvalidGeometryException nor return null. This is why we use + // this method to create the PolyGeometry which throws a GenericRuntimeException if it fails (which should + // never be this case). + return Expressions.call( PolyGeometry.class, "ofOrThrow", Expressions.constant( this.toString() ) ); } @@ -918,10 +939,36 @@ public String toString() { public @NotNull String toWKT() { + if ( isPoint() + && jtsGeometry instanceof Point point + && point.getCoordinate() != null + && !Double.isNaN( point.getCoordinate().getZ() ) + && point.getCoordinate().getZ() != 0 ) { + Coordinate coordinate = point.getCoordinate(); + return String.format( + "SRID=%d; POINT Z (%s %s %s)", + SRID, + formatDouble( coordinate.x ), + formatDouble( coordinate.y ), + formatDouble( coordinate.z ) + ); + } + return String.format( "SRID=%d;%s", SRID, jtsGeometry.toString() ); } + /** + * Remove comma with trailing numbers if they are 0. This is how + * doubles are formatted when converted to WKT by JTS Geometry. + */ + private static String formatDouble( double value ) { + return value % 1 == 0 + ? String.format( "%.0f", value ) + : String.format( "%s", value ); + } + + /** * Output the {@link PolyGeometry} in a GeoJson format with its SRID */ @@ -948,7 +995,7 @@ public String toBinary() { /** * Describe the input format of Geometry */ - enum GeometryInputFormat { + public enum GeometryInputFormat { WKT( "wkt" ), // Well-known Text TWKB( "twkb" ), // Tiny Well-known Binary diff --git a/core/src/main/java/org/polypheny/db/util/BsonUtil.java b/core/src/main/java/org/polypheny/db/util/BsonUtil.java index 50a7620253..e841d51684 100644 --- a/core/src/main/java/org/polypheny/db/util/BsonUtil.java +++ b/core/src/main/java/org/polypheny/db/util/BsonUtil.java @@ -201,6 +201,15 @@ public static BsonValue getAsBson( PolyValue obj, PolyType type, GridFSBucket bu case AUDIO, IMAGE, VIDEO, FILE -> handleMultimedia( bucket, obj ); case INTERVAL -> handleInterval( obj ); case JSON -> handleDocument( obj ); + case DOCUMENT -> { + if ( obj.getType() != PolyType.DOCUMENT ) { + // Fixes the case where the document contains only a single of another type. This influences + // how a value is converted to JSON, when executing inside MongoBD, e.g. a number inside a + // document is converted as string (see default case) without this check. + yield getAsBson( obj, obj.getType(), bucket ); + } + yield new BsonString( obj.toString() ); + } default -> new BsonString( obj.toString() ); }; } diff --git a/core/src/main/java/org/polypheny/db/util/BuiltInMethod.java b/core/src/main/java/org/polypheny/db/util/BuiltInMethod.java index c82e000baa..9358020b0b 100644 --- a/core/src/main/java/org/polypheny/db/util/BuiltInMethod.java +++ b/core/src/main/java/org/polypheny/db/util/BuiltInMethod.java @@ -508,8 +508,17 @@ public enum BuiltInMethod { MQL_GET_ARRAY( MqlFunctions.class, "docGetArray", PolyValue.class ), MQL_EXISTS( MqlFunctions.class, "docExists", PolyValue.class, PolyValue.class, List.class ), MQL_MERGE( MqlFunctions.class, "mergeDocument", PolyValue.class, PolyList.class, PolyValue[].class ), + MQL_MERGE_ADD( MqlFunctions.class, "mergeDocumentAdd", PolyValue.class, PolyList.class, PolyValue[].class ), MQL_NOT_UNSET( MqlFunctions.class, "notUnset", PolyValue.class ), + // GeoFunctions + MQL_GEO_INTERSECTS( MqlFunctions.class, "docGeoIntersects", PolyValue.class, PolyValue.class ), + MQL_GEO_WITHIN( MqlFunctions.class, "docGeoWithin", PolyValue.class, PolyValue.class, PolyValue.class ), + MQL_GEO_DISTANCE( MqlFunctions.class, "docGeoDistance", PolyValue.class, PolyValue.class, PolyValue.class ), +// MQL_NEAR( MqlFunctions.class, "docNear", PolyValue.class, PolyValue.class, PolyValue.class, PolyValue.class ), +// MQL_NEAR_SPHERE( MqlFunctions.class, "docNearSphere", PolyValue.class, PolyValue.class, PolyValue.class, PolyValue.class ), +// MQL_GEO_NEAR( MqlFunctions.class, "notUnset", PolyValue.class ), + MQL_PROJECT_INCLUDES( MqlFunctions.class, "projectIncludes", PolyValue.class, PolyList.class, PolyValue[].class ), MQL_REPLACE_ROOT( MqlFunctions.class, "replaceRoot", PolyValue.class ), CYPHER_LIKE( CypherFunctions.class, "like", PolyValue.class, PolyValue.class ), @@ -532,6 +541,11 @@ public enum BuiltInMethod { CYPHER_SET_LABELS( CypherFunctions.class, "setLabels", GraphPropertyHolder.class, List.class, PolyBoolean.class ), CYPHER_REMOVE_LABELS( CypherFunctions.class, "removeLabels", GraphPropertyHolder.class, List.class ), CYPHER_REMOVE_PROPERTY( CypherFunctions.class, "removeProperty", GraphPropertyHolder.class, String.class ), + CYPHER_POINT( CypherFunctions.class, "point", PolyValue.class, PolyValue.class, PolyValue.class, PolyValue.class, PolyValue.class, PolyValue.class, PolyValue.class, PolyValue.class, PolyValue.class, PolyValue.class ), + CYPHER_DISTANCE( CypherFunctions.class, "distance", PolyValue.class, PolyValue.class ), + CYPHER_DISTANCE_NEO4J( CypherFunctions.class, "distanceNeo4j", PolyValue.class, PolyValue.class ), + CYPHER_WITHIN_BBOX( CypherFunctions.class, "withinBBox", PolyValue.class, PolyValue.class, PolyValue.class ), + CYPHER_WITHIN_GEOMETRY( CypherFunctions.class, "withinGeometry", PolyValue.class, PolyValue.class ), TO_NODE( CypherFunctions.class, "toNode", Enumerable.class ), TO_EDGE( CypherFunctions.class, "toEdge", Enumerable.class ), TO_GRAPH( CypherFunctions.class, "toGraph", Enumerable.class, Enumerable.class ), diff --git a/dbms/src/main/java/org/polypheny/db/PolyphenyDb.java b/dbms/src/main/java/org/polypheny/db/PolyphenyDb.java index 2182f39e5b..b481f1aed3 100644 --- a/dbms/src/main/java/org/polypheny/db/PolyphenyDb.java +++ b/dbms/src/main/java/org/polypheny/db/PolyphenyDb.java @@ -21,7 +21,7 @@ import com.github.rvesse.airline.annotations.Command; import com.github.rvesse.airline.annotations.Option; import com.github.rvesse.airline.parser.errors.ParseException; -import java.awt.SystemTray; +import java.awt.*; import java.io.BufferedReader; import java.io.File; import java.io.FileOutputStream; @@ -126,7 +126,7 @@ public class PolyphenyDb { public boolean daemonMode = false; @Option(name = { "-defaultStore" }, description = "Type of default storeId") - public String defaultStoreName = "hsqldb"; + public static String defaultStoreName = "hsqldb"; @Option(name = { "-defaultSource" }, description = "Type of default source") public static String defaultSourceName = "csv"; @@ -188,6 +188,10 @@ public void runPolyphenyDb() { log.warn( "[-resetDocker] option is set, this option is only for development." ); } + // TODO: Remove after testing + String currentPath = System.getProperty( "user.home" ); + log.info( "user.home: {}", currentPath ); + // Configuration shall not be persisted ConfigManager.memoryMode = (mode == RunMode.TEST || memoryCatalog); ConfigManager.resetCatalogOnStartup = resetCatalog; diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 8fa94ce368..168f383a13 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -1933,7 +1933,13 @@ public long createGraphPlacement( long graphId, List> stores, State @Override public void dropGraphPlacement( long graphId, DataStore store, Statement statement ) { - AllocationPlacement placement = statement.getTransaction().getSnapshot().alloc().getPlacement( store.getAdapterId(), graphId ).orElseThrow(); + Optional optPlacement = statement.getTransaction().getSnapshot().alloc().getPlacement( store.getAdapterId(), graphId ); + + if ( optPlacement.isEmpty() ) { + log.warn( "Store: {} does not have a placement", store.adapterName ); + } + + AllocationPlacement placement = optPlacement.orElseThrow(); List partitions = statement.getTransaction().getSnapshot().alloc().getPartitionsFromLogical( graphId ); diff --git a/dbms/src/main/java/org/polypheny/db/processing/VolcanoQueryProcessor.java b/dbms/src/main/java/org/polypheny/db/processing/VolcanoQueryProcessor.java index 748cad1897..0ca75f6617 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/VolcanoQueryProcessor.java +++ b/dbms/src/main/java/org/polypheny/db/processing/VolcanoQueryProcessor.java @@ -28,6 +28,8 @@ import org.polypheny.db.algebra.enumerable.common.EnumerableModifyToStreamerRule; import org.polypheny.db.algebra.enumerable.document.DocumentAggregateToAggregateRule; import org.polypheny.db.algebra.enumerable.document.DocumentFilterToCalcRule; +import org.polypheny.db.algebra.enumerable.document.DocumentGeoNearUnwrap; +import org.polypheny.db.algebra.enumerable.document.DocumentNearUnwrap; import org.polypheny.db.algebra.enumerable.document.DocumentProjectToCalcRule; import org.polypheny.db.algebra.enumerable.document.DocumentSortToSortRule; import org.polypheny.db.algebra.rules.AggregateExpandDistinctAggregatesRule; @@ -91,6 +93,8 @@ public class VolcanoQueryProcessor extends AbstractQueryProcessor { EnumerableRules.ENUMERABLE_PROJECT_TO_CALC_RULE, DocumentProjectToCalcRule.INSTANCE, DocumentFilterToCalcRule.INSTANCE, + DocumentNearUnwrap.INSTANCE, + DocumentGeoNearUnwrap.INSTANCE, DocumentAggregateToAggregateRule.INSTANCE, DocumentSortToSortRule.INSTANCE, EnumerableRules.ENUMERABLE_PROJECT_RULE, diff --git a/dbms/src/main/java/org/polypheny/db/processing/shuttles/QueryParameterizer.java b/dbms/src/main/java/org/polypheny/db/processing/shuttles/QueryParameterizer.java index f48e26af18..1f3c3c24d9 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/shuttles/QueryParameterizer.java +++ b/dbms/src/main/java/org/polypheny/db/processing/shuttles/QueryParameterizer.java @@ -79,7 +79,7 @@ public class QueryParameterizer extends AlgShuttleImpl implements RexVisitor>> docs; - private final List excluded = List.of( OperatorName.MQL_REGEX_MATCH, OperatorName.MQL_QUERY_VALUE ); + private final List excluded = List.of( OperatorName.MQL_REGEX_MATCH, OperatorName.MQL_QUERY_VALUE, OperatorName.MQL_GEO_WITHIN, OperatorName.MQL_GEO_INTERSECTS, OperatorName.MQL_GEO_DISTANCE, OperatorName.MQL_NEAR, OperatorName.MQL_NEAR_SPHERE, OperatorName.MQL_GEO_NEAR ); @Getter private final List types; @@ -184,7 +184,8 @@ public AlgNode visit( LogicalDocumentProject oProject ) { project.getTraitSet(), project.getInput(), Pair.zip( new ArrayList<>( oProject.includes.keySet() ), newProjects ).stream().collect( Collectors.toMap( e -> e.left, e -> e.right ) ), - project.excludes + project.excludes, + project.adds ); } diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java index ef1be6c5d4..7ef01e66e3 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java @@ -36,6 +36,7 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgRoot; import org.polypheny.db.algebra.core.JoinAlgType; +import org.polypheny.db.algebra.core.common.Scan; import org.polypheny.db.algebra.core.document.DocumentScan; import org.polypheny.db.algebra.core.document.DocumentValues; import org.polypheny.db.algebra.core.lpg.LpgAlg; @@ -43,6 +44,7 @@ import org.polypheny.db.algebra.logical.document.LogicalDocumentScan; import org.polypheny.db.algebra.logical.document.LogicalDocumentValues; import org.polypheny.db.algebra.logical.lpg.LogicalLpgScan; +import org.polypheny.db.algebra.logical.relational.LogicalRelUnion; import org.polypheny.db.algebra.logical.relational.LogicalRelValues; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; @@ -196,10 +198,26 @@ protected List handleGeneric( AlgNode node, List builder.replaceTop( node.copy( node.getTraitSet(), ImmutableList.of( builder.peek( 1 ), builder.peek( 0 ) ) ), 2 ) + builder -> builder.replaceTop( + node.copy( node.getTraitSet(), ImmutableList.of( builder.peek( 1 ), builder.peek( 0 ) ) ) + , 2 ) ); break; default: + if ( node instanceof LogicalRelUnion l ) { + builders.forEach( + builder -> { + List inputs = new ArrayList<>(); + for ( int i = 0; i < node.getInputs().size(); i++ ) { + inputs.add( builder.peek( i ) ); + } + builder.replaceTop( + node.copy( node.getTraitSet(), inputs ) + , l.getInputs().size() ); + } + ); + break; + } throw new GenericRuntimeException( "Unexpected number of input elements: " + node.getInputs().size() ); } return builders; @@ -415,6 +433,25 @@ public AlgNode routeDocument( RoutedAlgBuilder builder, AlgNode alg, Statement s builder.push( handleDocScan( (DocumentScan) alg, statement, null ) ); return alg; } else if ( alg instanceof DocumentValues ) { + return alg; + } else if ( alg.getModel() != DataModel.DOCUMENT ) { + // cross model + if ( alg instanceof Scan scan ) { + switch ( alg.getModel() ) { + case RELATIONAL -> { + handleRelScan( builder, statement, scan.entity ); + } + case DOCUMENT -> { + throw new GenericRuntimeException( "Error while routing graph query." ); + } + case GRAPH -> { + handleGraphScan( (LogicalLpgScan) alg, statement, (AllocationEntity) scan.entity, List.of() ); + } + } + } else { + this.handleGeneric( alg, builder ); + } + return alg; } throw new UnsupportedOperationException(); diff --git a/dbms/src/test/java/org/polypheny/db/TestHelper.java b/dbms/src/test/java/org/polypheny/db/TestHelper.java index 07399fc83e..19a3f6bd4f 100644 --- a/dbms/src/test/java/org/polypheny/db/TestHelper.java +++ b/dbms/src/test/java/org/polypheny/db/TestHelper.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -207,6 +207,12 @@ public static void addHsqldb( String name, Statement statement ) throws SQLExcep } + public static void addMongodb( String name, Statement statement ) throws SQLException { + executeSQL( statement, """ + ALTER ADAPTERS ADD "%s" USING 'mongodb' AS 'Store' WITH '{trxLifetimeLimit:"1209600",mode:docker,instanceId:"%s"}'""".formatted( name, 0 ) ); + } + + public static void addCsv( String name, Statement statement ) throws SQLException { executeSQL( statement, "ALTER ADAPTERS ADD \"" + name + "\" USING 'Csv' AS 'Store'" + " WITH '{}'" ); @@ -464,6 +470,14 @@ public boolean storeSupportsIndex() { public static abstract class HttpConnection { + static { + // TODO: remove this (is there a way to only disable the timeout, when I am actually debugging?) + Unirest.config() + .socketTimeout( 0 ) + .connectTimeout( 0 ); + } + + public static HttpRequest buildQuery( String route, String query, String database ) { JsonObject data = new JsonObject(); data.addProperty( "query", query ); diff --git a/dbms/src/test/java/org/polypheny/db/cypher/CypherGeoFunctionsTest.java b/dbms/src/test/java/org/polypheny/db/cypher/CypherGeoFunctionsTest.java new file mode 100644 index 0000000000..f28b0042c2 --- /dev/null +++ b/dbms/src/test/java/org/polypheny/db/cypher/CypherGeoFunctionsTest.java @@ -0,0 +1,495 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.cypher; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.sql.Connection; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.polypheny.db.PolyphenyDb; +import org.polypheny.db.TestHelper.JdbcConnection; +import org.polypheny.db.adapter.AdapterManager; +import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.type.entity.PolyString; +import org.polypheny.db.type.entity.PolyValue; +import org.polypheny.db.type.entity.spatial.InvalidGeometryException; +import org.polypheny.db.type.entity.spatial.PolyGeometry; +import org.polypheny.db.type.entity.spatial.PolyGeometry.GeometryInputFormat; +import org.polypheny.db.webui.models.results.GraphResult; + +@SuppressWarnings("SqlNoDataSourceInspection") +@Tag("adapter") +@Tag("docker") +public class CypherGeoFunctionsTest extends CypherTestTemplate { + + final static String neo4jAdapterName = "neo4j"; + final static String neo4jDatabaseName = "neo4j_database"; + + + @BeforeAll + public static void init() { + try ( JdbcConnection polyphenyDbConnection = new JdbcConnection( true ) ) { + Connection connection = polyphenyDbConnection.getConnection(); + try ( Statement statement = connection.createStatement() ) { + statement.execute( """ + ALTER ADAPTERS ADD "%s" USING 'Neo4j' AS 'Store' WITH '{mode:docker,instanceId:"%d"}'""".formatted( neo4jAdapterName, 0 ) ); + } + } catch ( SQLException e ) { + // If there is an error while adding the adapter, the most likely reason it does not work + // is that docker is not running! + throw new RuntimeException( e ); + } + } + + + @AfterAll + public static void close() { + tearDown(); + try ( JdbcConnection polyphenyDbConnection = new JdbcConnection( true ) ) { + + if ( Catalog.getInstance().getAdapters().values().stream().noneMatch( a -> a.uniqueName.equals( neo4jAdapterName ) ) ) { + System.out.println( "Already shutting down neo4j adapter" ); + return; + } + + Connection connection = polyphenyDbConnection.getConnection(); + try ( Statement statement = connection.createStatement() ) { + statement.execute( "ALTER ADAPTERS DROP \"" + neo4jAdapterName + "\"" ); + } + AdapterManager adapterManager = AdapterManager.getInstance(); + if ( adapterManager.getAdapters().containsKey( neo4jAdapterName ) ) { + fail(); + } + } catch ( SQLException e ) { + // If there is an error while adding the adapter, the most likely reason it does not work + // is that docker is not running! + throw new RuntimeException( e ); + } + } + + + @BeforeEach + public void reset() { + tearDown(); + createGraph(); + } + + + /** + * Measures percentages change, relative to the larger number. + */ + private static boolean isWithinPercentageChange( double a, double b, double percentage ) { + double diff = Math.abs( a - b ); + double maxAllowedDiff = (percentage / 100) * Math.max( a, b ); + return diff <= maxAllowedDiff; + } + + + private List runQueries( List queries ) { + return runQueries( queries, true, true ); + } + + + private List runQueries( List queries, boolean onHsqlDb, boolean onNeo4j ) { + try { + List results = new ArrayList<>(); + GraphResult finalResult = null; + + // 1. Run queries internally + if ( onHsqlDb ) { + tearDown(); + createGraph(); + for ( String query : queries ) { + finalResult = execute( query, GRAPH_NAME ); + } + results.add( finalResult ); + } + + // 2. Run queries in Docker + if ( onNeo4j ) { + deleteData( neo4jDatabaseName ); + createGraph( neo4jDatabaseName, neo4jAdapterName ); + for ( String query : queries ) { + finalResult = execute( query, neo4jDatabaseName ); + } + results.add( finalResult ); + } + + if ( onHsqlDb && onNeo4j ) { + assertEquals( 2, results.size() ); + } + return results; + } finally { + deleteData( neo4jDatabaseName ); + } + } + + + private Map assertResultsAreEqual( List results ) { + return assertResultsAreEqual( results.get( 0 ), results.get( 1 ) ); + } + + + private Map assertResultsAreEqual( GraphResult hsqlResult, GraphResult neo4jResult ) { + Map hsqlJson = Map.of(); + assertEquals( hsqlResult.data.length, neo4jResult.data.length ); + + for ( int i = 0; i < neo4jResult.data.length; i++ ) { + hsqlJson = convertResultToMap( hsqlResult ).get( 0 ); + Map neo4jJson = convertResultToMap( neo4jResult ).get( 0 ); + assertEquals( neo4jJson.keySet(), hsqlJson.keySet() ); + + for ( Entry entry : hsqlJson.entrySet() ) { + String key = entry.getKey(); + Object value = hsqlJson.get( key ); + Object neo4jValue = neo4jJson.get( key ); + assertEquals( neo4jValue, value ); + } + } + + return hsqlJson; + } + + + private List> convertResultToMap( GraphResult result ) { + List> results = new ArrayList<>(); + + ObjectMapper objectMapper = new ObjectMapper(); + try { + for ( int i = 0; i < result.data.length; i++ ) { + Map map = objectMapper.readValue( result.data[i][0], new TypeReference<>() { + } ); + results.add( map ); + } + } catch ( JsonProcessingException e ) { + throw new RuntimeException( e ); + } + + return results; + } + + + private Object extractValueAtPath( Map map, List path ) { + Object currentMap = map; + for ( String key : path ) { + if ( currentMap instanceof Map m ) { + currentMap = m.get( key ); + } else if ( currentMap instanceof ArrayList listOfLists ) { + for ( Object o : listOfLists ) { + if ( o instanceof ArrayList keyValueList ) { + // 0 -> Key + // 1 -> Value + if ( keyValueList.get( 0 ).equals( key ) ) { + currentMap = keyValueList.get( 1 ); + break; + } + } + + } + } + } + return currentMap; + } + + + @Test + public void distanceNeo4jTest() { + List queries = new ArrayList<>(); + queries.add( "CREATE (berlin:Location {name: \"Berlin\", lat: 52.5200, lon: 13.4050})" ); + queries.add( "CREATE (paris:Location {name: \"Paris\", lat: 48.8566, lon: 2.3522})" ); + + queries.add( """ + MATCH (a:Location {name: "Berlin"}), (b:Location {name: "Paris"}) + WITH + point({latitude: a.lat, longitude: a.lon}) AS pointBerlin, + point({latitude: b.lat, longitude: b.lon}) AS pointParis + RETURN point.distance(pointBerlin, pointParis, 'neo4j') AS distance_meters; + """ ); + List results = runQueries( queries ); + Map hsqldbResult = convertResultToMap( results.get( 0 ) ).get( 0 ); + Map neo4jResult = convertResultToMap( results.get( 1 ) ).get( 0 ); + + // Validate that the difference change between both numbers is smaller than a threshold, e.g. 0,2% ( + assert isWithinPercentageChange( ((Number) hsqldbResult.get( "value" )).doubleValue(), (Integer) neo4jResult.get( "value" ), 0.2 ); + } + + + @Test + public void createPointTest() { + List queries = new ArrayList<>(); + queries.add( "CREATE (bob:User)" ); + queries.add( "MATCH (n) RETURN point({longitude: 56.7, latitude: 12}) AS point" ); + + List results = runQueries( queries ); + Map res = assertResultsAreEqual( results ); + PolyGeometry geometry = PolyGeometry.ofOrThrow( (String) res.get( "wkt" ) ); + assert geometry.getSRID() == PolyGeometry.WGS_84; + assert geometry.asPoint().getX() == 56.7; + assert geometry.asPoint().getY() == 12.0; + + queries.remove( 1 ); + queries.add( "MATCH (n) RETURN point({x: 15, y: 5}) AS point" ); + results = runQueries( queries ); + res = assertResultsAreEqual( results ); + geometry = PolyGeometry.ofOrThrow( (String) res.get( "wkt" ) ); + assert geometry.getSRID() == 0; + assert geometry.asPoint().getX() == 15.0; + assert geometry.asPoint().getY() == 5.0; + + queries.remove( 1 ); + queries.add( "MATCH (n) RETURN point({x: 1, y: 2, z: 3}) AS point" ); + results = runQueries( queries ); + res = assertResultsAreEqual( results ); + geometry = PolyGeometry.ofOrThrow( (String) res.get( "wkt" ) ); + assert geometry.getSRID() == 0; + assert geometry.asPoint().getX() == 1.0; + assert geometry.asPoint().getY() == 2.0; + assert geometry.asPoint().getZ() == 3.0; + + queries.remove( 1 ); + queries.add( "MATCH (n) RETURN point({longitude: 55.5, latitude: 12.2, height: 100}) AS point" ); + results = runQueries( queries ); + res = assertResultsAreEqual( results ); + geometry = PolyGeometry.ofOrThrow( (String) res.get( "wkt" ) ); + assert geometry.getSRID() == PolyGeometry.WGS_84_3D; + assert geometry.asPoint().getX() == 55.5; + assert geometry.asPoint().getY() == 12.2; + assert geometry.asPoint().getZ() == 100.0; + } + + + @Test + @Tag("fileExcluded") + public void createNodeWithPointTest() throws InvalidGeometryException { + List results = runQueries( List.of( + "CREATE (z:Station {name: 'Zürich', location: point({latitude: 47.3769, longitude: 8.5417})})", + "MATCH (n) RETURN n;" + ) ); + assert results.size() == 2; + String name = "value"; + + boolean usesNeo4j = PolyphenyDb.defaultStoreName.equals( "neo4j" ); + + if ( usesNeo4j ) { + name = "wkt"; + } + + Object hsqlValue = extractValueAtPath( convertResultToMap( results.get( 0 ) ).get( 0 ), List.of( "properties", "_ps", "location", name ) ); + Object neo4jValue = extractValueAtPath( convertResultToMap( results.get( 1 ) ).get( 0 ), List.of( "properties", "_ps", "location", "wkt" ) ); + PolyGeometry neo4jGeometry = PolyGeometry.of( neo4jValue.toString() ); + PolyGeometry hsqlGeometry = usesNeo4j ? PolyGeometry.of( hsqlValue.toString() ) : new PolyGeometry( hsqlValue.toString(), 4326, GeometryInputFormat.GEO_JSON ); + assertEquals( neo4jGeometry, hsqlGeometry ); + + results = runQueries( List.of( + "CREATE (z:Station {name: 'Zürich', location: point({x: 15, y: 30})})", + "MATCH (n) RETURN n;" + ) ); + assert results.size() == 2; + hsqlValue = extractValueAtPath( convertResultToMap( results.get( 0 ) ).get( 0 ), List.of( "properties", "_ps", "location", name ) ); + neo4jValue = extractValueAtPath( convertResultToMap( results.get( 1 ) ).get( 0 ), List.of( "properties", "_ps", "location", "wkt" ) ); + neo4jGeometry = PolyGeometry.of( neo4jValue.toString() ); + hsqlGeometry = usesNeo4j ? PolyGeometry.of( hsqlValue.toString() ) : new PolyGeometry( hsqlValue.toString(), 0, GeometryInputFormat.GEO_JSON ); + assertEquals( neo4jGeometry, hsqlGeometry ); + } + + + @Test + public void createPointFromNodeFields() { + execute( "CREATE (c:Coordinate { lon: 56.7, lat: 12 })" ); + GraphResult res = execute( "MATCH (c:Coordinate) RETURN point({longitude: c.lon, latitude: c.lat}) AS point" ); + PolyGeometry geometry = convertJsonToPolyGeometry( res.data[0][0] ); + assert geometry.getSRID() == PolyGeometry.WGS_84; + assert geometry.asPoint().getX() == 56.7; + assert geometry.asPoint().getY() == 12.0; + } + + + @Test + public void distanceTest() { + // Compute distance in spherical coordinate system (2 dimensions) + execute( """ + CREATE (basel:City {name: 'Basel', latitude: 47.5595, longitude: 7.5885}), + (zurich:City {name: 'Zürich', latitude: 47.3770, longitude: 8.5416}); + """ ); + GraphResult res = execute( """ + MATCH (basel:City {name: 'Basel'}), (zurich:City {name: 'Zürich'}) + WITH basel, zurich, + point({latitude: basel.latitude, longitude: basel.longitude}) AS point1, + point({latitude: zurich.latitude, longitude: zurich.longitude}) AS point2 + RETURN basel.name, zurich.name, point.distance(point1, point2) AS distance; + """ ); + assert res.data[0].length == 3; + assert Math.abs( PolyValue.fromJson( res.data[0][2] ).asDocument().get( new PolyString( "value" ) ).asDouble().doubleValue() - 74460.31287583392 ) < 1e-9; + + // Compute distance in spherical coordinate system (3 dimensions) + execute( """ + CREATE (basel:City {name: 'Basel', latitude: 47.5595, longitude: 7.5885}), + (zurich:City {name: 'Zürich', latitude: 47.3770, longitude: 8.5416}); + """ ); + res = execute( """ + MATCH (basel:City {name: 'Basel'}), (zurich:City {name: 'Zürich'}) + WITH basel, zurich, + point({latitude: basel.latitude, longitude: basel.longitude, height: 100}) AS point1, + point({latitude: zurich.latitude, longitude: zurich.longitude, height: 200}) AS point2 + RETURN basel.name, zurich.name, point.distance(point1, point2) AS distance; + """ ); + assert res.data[0].length == 3; + assert Math.abs( PolyValue.fromJson( res.data[0][2] ).asDocument().get( new PolyString( "value" ) ).asDouble().doubleValue() - 74462.13313143898 ) < 1e-9; + + // Compute distance in euclidean coordinate system (2 dimensions) + execute( """ + CREATE (a:Dot {x: 1, y: 1}), + (b:Dot {x: 2, y: 2}), + (a)-[:CONNECTED]->(b); + """ ); + res = execute( """ + MATCH (a:Dot)-[:CONNECTED]->(b:Dot) + WITH a, b, + point({x: a.x, y: a.y}) AS d1, + point({x: b.x, y: b.y}) AS d2 + RETURN point.distance(d1, d2) AS distance; + """ ); + assert res.data[0].length == 1; + assert Math.abs( PolyValue.fromJson( res.data[0][0] ).asDocument().get( new PolyString( "value" ) ).asDouble().doubleValue() - Math.sqrt( 2 ) ) < 1e-9; + + // Compute distance in euclidean coordinate system (3 dimensions) + execute( """ + CREATE (a:Dot3D {x: 1, y: 1, z:1}), + (b:Dot3D {x: 2, y: 2, z:2}), + (a)-[:CONNECTED]->(b); + """ ); + res = execute( """ + MATCH (a:Dot3D)-[:CONNECTED]->(b:Dot3D) + WITH a, b, + point({x: a.x, y: a.y, z: a.z}) AS d1, + point({x: b.x, y: b.y, z: b.z}) AS d2 + RETURN point.distance(d1, d2) AS distance; + """ ); + assert res.data[0].length == 1; + assert Math.abs( PolyValue.fromJson( res.data[0][0] ).asDocument().get( new PolyString( "value" ) ).asDouble().doubleValue() - 1.7320508075688772 ) < 1e-9; + } + + + @Test + public void withinBBoxTest() { + List queries = new ArrayList<>(); + + queries.add( """ + CREATE (a:Dot {x: 1, y: 1, name: 'on edge'}), + (b:Dot {x: 1.5, y: 1.5, name: 'inside'}), + (c:Dot {x: 3, y: 3, name: 'outside'}); + """ ); + queries.add( """ + MATCH (d:Dot {name: 'inside'}) + WITH point({x: d.x, y: d.y}) AS dPoint, d + RETURN point.withinBBox(dPoint, point({x: 1, y: 1}), point({x: 2, y: 2})) AS result, d.name + """ ); + List results = runQueries( queries ); + assert convertResultToMap( results.get( 0 ) ).get( 0 ).get( "value" ).equals( true ); + assert convertResultToMap( results.get( 1 ) ).get( 0 ).get( "value" ).equals( true ); + + queries.remove( 1 ); + queries.add( """ + MATCH (d:Dot {name: 'outside'}) + WITH point({x: d.x, y: d.y}) AS dPoint, d + RETURN point.withinBBox(dPoint, point({x: 1, y: 1}), point({x: 2, y: 2})) AS result, d.name + """ ); + results = runQueries( queries ); + assert convertResultToMap( results.get( 0 ) ).get( 0 ).get( "value" ).equals( false ); + assert convertResultToMap( results.get( 1 ) ).get( 0 ).get( "value" ).equals( false ); + + queries.remove( 1 ); + queries.add( """ + MATCH (d:Dot {name: 'on edge'}) + WITH point({x: d.x, y: d.y}) AS dPoint, d + RETURN point.withinBBox(dPoint, point({x: 1, y: 1}), point({x: 2, y: 2})) AS result, d.name + """ ); + results = runQueries( queries ); + assert convertResultToMap( results.get( 0 ) ).get( 0 ).get( "value" ).equals( true ); + assert convertResultToMap( results.get( 1 ) ).get( 0 ).get( "value" ).equals( true ); + } + + + /** + * The function call point.withinGeometry used in this test does not exist in Cypher / Neo4j. + * It is possible to extend Neo4j with additional functions, however, this has not (yet) been done, + * which is why it will only be executed internally for now. + */ + @Test + public void withinGeometryTest() { + List queries = new ArrayList<>(); + queries.add( """ + CREATE (a:Dot {x: 1, y: 1, name: 'on edge'}), + (b:Dot {x: 1.5, y: 1.5, name: 'inside'}), + (c:Dot {x: 3, y: 3, name: 'outside'}); + """ ); + queries.add( """ + MATCH (d:Dot {name: 'inside'}) + WITH point({x: d.x, y: d.y}) AS dPoint, d + RETURN point.withinGeometry(dPoint, 'POLYGON((0 0, 2 0, 2 2, 0 2, 0 0))') AS result, d.name + """ ); + List results = runQueries( queries, true, false ); + assert convertResultToMap( results.get( 0 ) ).get( 0 ).get( "value" ).equals( true ); + + queries.remove( 1 ); + queries.add( """ + MATCH (d:Dot {name: 'outside'}) + WITH point({x: d.x, y: d.y}) AS dPoint, d + RETURN point.withinGeometry(dPoint, point({x: 1, y: 1}), point({x: 2, y: 2})) AS result, d.name + """ ); + results = runQueries( queries, true, false ); + assert convertResultToMap( results.get( 0 ) ).get( 0 ).get( "value" ).equals( false ); + + queries.remove( 1 ); + queries.add( """ + MATCH (d:Dot {name: 'on edge'}) + WITH point({x: d.x, y: d.y}) AS dPoint, d + RETURN point.withinGeometry(dPoint, point({x: 1, y: 1}), point({x: 2, y: 2})) AS result, d.name + """ ); + results = runQueries( queries, true, false ); + assert convertResultToMap( results.get( 0 ) ).get( 0 ).get( "value" ).equals( true ); + } + + + private PolyGeometry convertJsonToPolyGeometry( String json ) { + try { + ObjectMapper objectMapper = new ObjectMapper(); + JsonNode jsonNode = objectMapper.readTree( json ); + String wkt = jsonNode.get( "wkt" ).asText(); + return PolyGeometry.of( wkt ); + } catch ( JsonProcessingException e ) { + throw new RuntimeException( e ); + } + } + + +} diff --git a/dbms/src/test/java/org/polypheny/db/cypher/CypherTestTemplate.java b/dbms/src/test/java/org/polypheny/db/cypher/CypherTestTemplate.java index f58c85b88e..1e3f56c633 100644 --- a/dbms/src/test/java/org/polypheny/db/cypher/CypherTestTemplate.java +++ b/dbms/src/test/java/org/polypheny/db/cypher/CypherTestTemplate.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -30,9 +30,6 @@ import org.junit.jupiter.api.BeforeAll; import org.polypheny.db.TestHelper; import org.polypheny.db.TestHelper.CypherConnection; -import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.catalogs.AdapterCatalog; -import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.cypher.helper.TestEdge; import org.polypheny.db.cypher.helper.TestGraphObject; import org.polypheny.db.cypher.helper.TestLiteral; @@ -45,13 +42,14 @@ import org.polypheny.db.type.entity.graph.PolyEdge; import org.polypheny.db.type.entity.graph.PolyNode; import org.polypheny.db.type.entity.graph.PolyPath; +import org.polypheny.db.type.entity.spatial.PolyGeometry; import org.polypheny.db.util.Pair; import org.polypheny.db.webui.models.results.GraphResult; @Slf4j public class CypherTestTemplate { - private static final String GRAPH_NAME = "test"; + protected static final String GRAPH_NAME = "test"; protected static final String SINGLE_NODE_PERSON_1 = "CREATE (p:Person {name: 'Max'})"; protected static final String SINGLE_NODE_PERSON_2 = "CREATE (p:Person {name: 'Hans'})"; @@ -84,7 +82,12 @@ public static void createGraph() { public static void createGraph( String name ) { - execute( format( "CREATE DATABASE %s", name ) ); + createGraph( name, "hsqldb" ); + } + + + public static void createGraph( String name, String store ) { + execute( format( "CREATE DATABASE %s ON STORE %s", name, store ) ); execute( format( "USE GRAPH %s", name ) ); } @@ -97,8 +100,6 @@ public static void tearDown() { public static void deleteData( String graph ) { execute( format( "DROP DATABASE %s IF EXISTS", graph ) ); - Snapshot snapshot = Catalog.snapshot(); - AdapterCatalog adapterCatalog = Catalog.getInstance().getAdapterCatalog( 0 ).orElseThrow(); } @@ -290,7 +291,8 @@ public enum Type { EDGE( "edge", TestEdge.class, PolyEdge.class ), PATH( "path", TestPath.class, PolyPath.class ), ANY( "any", TestNode.class, PolyValue.class ), - STRING( "varchar", TestLiteral.class, PolyString.class ); + STRING( "varchar", TestLiteral.class, PolyString.class ), + GEOMETRY( "geometry", TestLiteral.class, PolyGeometry.class ); private final String typeName; diff --git a/dbms/src/test/java/org/polypheny/db/cypher/DdlTest.java b/dbms/src/test/java/org/polypheny/db/cypher/DdlTest.java index 6f85af33b3..63410965a3 100644 --- a/dbms/src/test/java/org/polypheny/db/cypher/DdlTest.java +++ b/dbms/src/test/java/org/polypheny/db/cypher/DdlTest.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -24,6 +24,7 @@ import java.sql.Connection; import java.sql.SQLException; import java.sql.Statement; +import java.util.List; import lombok.extern.slf4j.Slf4j; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; @@ -32,8 +33,11 @@ import org.polypheny.db.TestHelper; import org.polypheny.db.TestHelper.JdbcConnection; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.LogicalAdapter; +import org.polypheny.db.catalog.entity.allocation.AllocationEntity; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalNamespace; +import org.polypheny.db.catalog.impl.PolyCatalog; import org.polypheny.db.webui.models.results.GraphResult; @Tag("adapter") @@ -85,9 +89,9 @@ public void addPlacementTest() throws SQLException { assertEquals( 1, catalog.getSnapshot().alloc().getFromLogical( graph.id ).size() ); - addStore( "store1" ); + addStore( "storeCypherDdl" ); - execute( String.format( "CREATE PLACEMENT OF %s ON STORE %s", graphName, "store1" ), graphName ); + execute( String.format( "CREATE PLACEMENT OF %s ON STORE %s", graphName, "storeCypherDdl" ), graphName ); namespace = catalog.getSnapshot().getNamespace( graphName ).orElseThrow(); graph = catalog.getSnapshot().graph().getGraph( namespace.id ).orElseThrow(); @@ -97,8 +101,8 @@ public void addPlacementTest() throws SQLException { execute( "DROP DATABASE " + graphName ); } finally { - - removeStore( "store1" ); + checkAllocationsSize( 0 ); + removeStore( "storeCypherDdl" ); } } @@ -108,9 +112,9 @@ public void addPlacementTest() throws SQLException { public void initialPlacementTest() throws SQLException { Catalog catalog = Catalog.getInstance(); try { - addStore( "store1" ); + addStore( "storeCypherDdl" ); - execute( String.format( "CREATE DATABASE %s IF NOT EXISTS ON STORE %s ", graphName, "store1" ) ); + execute( String.format( "CREATE DATABASE %s IF NOT EXISTS ON STORE %s ", graphName, "storeCypherDdl" ) ); LogicalNamespace namespace = catalog.getSnapshot().getNamespace( graphName ).orElseThrow(); LogicalGraph graph = catalog.getSnapshot().graph().getGraph( namespace.id ).orElseThrow(); @@ -126,7 +130,8 @@ public void initialPlacementTest() throws SQLException { execute( "DROP DATABASE " + graphName ); } finally { - removeStore( "store1" ); + checkAllocationsSize( 0 ); + removeStore( "storeCypherDdl" ); } } @@ -144,21 +149,22 @@ public void deletePlacementTest() throws SQLException { assertEquals( 1, catalog.getSnapshot().alloc().getFromLogical( graph.id ).size() ); - addStore( "store1" ); + addStore( "storeCypherDdl" ); - execute( String.format( "CREATE PLACEMENT OF %s ON STORE %s", graphName, "store1" ), graphName ); + execute( String.format( "CREATE PLACEMENT OF %s ON STORE %s", graphName, "storeCypherDdl" ), graphName ); namespace = catalog.getSnapshot().getNamespace( graphName ).orElseThrow(); graph = catalog.getSnapshot().graph().getGraph( namespace.id ).orElseThrow(); assertEquals( 2, catalog.getSnapshot().alloc().getFromLogical( graph.id ).size() ); - execute( String.format( "DROP PLACEMENT OF %s ON STORE %s", graphName, "store1" ), graphName ); + execute( String.format( "DROP PLACEMENT OF %s ON STORE %s", graphName, "storeCypherDdl" ), graphName ); execute( "DROP DATABASE " + graphName ); } finally { - removeStore( "store1" ); + checkAllocationsSize( 0 ); + removeStore( "storeCypherDdl" ); } } @@ -172,9 +178,9 @@ public void deletePlacementDataTest() throws SQLException { execute( DmlInsertTest.CREATE_COMPLEX_GRAPH_2, graphName ); try { - addStore( "store1" ); + addStore( "storeCypherDdl" ); - execute( String.format( "CREATE PLACEMENT OF %s ON STORE %s", graphName, "store1" ), graphName ); + execute( String.format( "CREATE PLACEMENT OF %s ON STORE %s", graphName, "storeCypherDdl" ), graphName ); execute( String.format( "DROP PLACEMENT OF %s ON STORE %s", graphName, "hsqldb" ), graphName ); @@ -188,13 +194,22 @@ public void deletePlacementDataTest() throws SQLException { execute( "DROP DATABASE " + graphName ); + checkAllocationsSize( 0 ); } finally { - removeStore( "store1" ); + removeStore( "storeCypherDdl" ); } } + private static void checkAllocationsSize( int size ) { + PolyCatalog catalog = (PolyCatalog) Catalog.getInstance(); + LogicalAdapter store = catalog.getSnapshot().getAdapter( "storeCypherDdl" ).orElseThrow(); + List entities = catalog.getSnapshot().alloc().getEntitiesOnAdapter( store.id ).orElseThrow(); + assertEquals( size, entities.size() ); + } + + private void addStore( String name ) throws SQLException { try ( JdbcConnection polyphenyDbConnection = new JdbcConnection( true ) ) { Connection connection = polyphenyDbConnection.getConnection(); diff --git a/dbms/src/test/java/org/polypheny/db/jdbc/JdbcMetaTest.java b/dbms/src/test/java/org/polypheny/db/jdbc/JdbcMetaTest.java index 5899a416d3..e450fedd2c 100644 --- a/dbms/src/test/java/org/polypheny/db/jdbc/JdbcMetaTest.java +++ b/dbms/src/test/java/org/polypheny/db/jdbc/JdbcMetaTest.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -367,10 +367,10 @@ public void testSortNullsAtEnd() throws SQLException { boolean trigger = false; while ( rs.next() ) { - Integer value = rs.getInt( "some_value" ); + int value = rs.getInt( "some_value" ); if ( value == 0 ) { trigger = true; - } else if ( trigger && value != null ) { + } else if ( trigger ) { fail( "Values are not sorted correctly." ); } } @@ -379,10 +379,10 @@ public void testSortNullsAtEnd() throws SQLException { trigger = false; while ( rs2.next() ) { - Integer value = rs2.getInt( "some_value" ); + int value = rs2.getInt( "some_value" ); if ( value == 0 ) { trigger = true; - } else if ( trigger && value != null ) { + } else if ( trigger ) { fail( "Values are not sorted correctly." ); } } diff --git a/dbms/src/test/java/org/polypheny/db/mql/AggregateTest.java b/dbms/src/test/java/org/polypheny/db/mql/AggregateTest.java index 4b2b24187b..00c93db42d 100644 --- a/dbms/src/test/java/org/polypheny/db/mql/AggregateTest.java +++ b/dbms/src/test/java/org/polypheny/db/mql/AggregateTest.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -340,11 +340,16 @@ public void setTest() { @Test public void skipTest() { + List data = Arrays.asList( + "{\"test\":1,\"key\":4}", + "{\"test\":1.3,\"key\":0}", + "{\"test\":\"test\",\"key\":13}" ); + List expected = ImmutableList.of( - "{\"test\":1.3,\"key\":{\"key\":\"val\"}}", + "{\"test\":1,\"key\":4}", "{\"test\":\"test\",\"key\":13}" ); - insertMany( DATA_0 ); + insertMany( data ); // we sort to assure correct order DocResult result = aggregate( $sort( document( kv( string( "key" ), 1 ) ) ), $skip( 1 ) ); diff --git a/dbms/src/test/java/org/polypheny/db/mql/DdlTest.java b/dbms/src/test/java/org/polypheny/db/mql/DdlTest.java index 479ebb2905..720c4915ea 100644 --- a/dbms/src/test/java/org/polypheny/db/mql/DdlTest.java +++ b/dbms/src/test/java/org/polypheny/db/mql/DdlTest.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -85,7 +85,7 @@ public void differentNamespaceSyntaxTest() { @Test public void addPlacementTest() throws SQLException { - String placement = "store1"; + String placement = "storeMongoDdl"; try { LogicalNamespace namespace = Catalog.snapshot().getNamespace( MqlTestTemplate.namespace ).orElseThrow(); @@ -117,7 +117,7 @@ public void addPlacementTest() throws SQLException { @Test public void deletePlacementTest() throws SQLException { - String placement = "store1"; + String placement = "storeMongoDdl"; execute( "db.createCollection(\"" + collectionName + "\")" ); @@ -154,7 +154,7 @@ public void deletePlacementTest() throws SQLException { @Test public void deletePlacementDataTest() throws SQLException { - String placement = "store1"; + String placement = "storeMongoDdl"; final String DATA = "{ \"key\": \"value\", \"key1\": \"value1\"}"; execute( "db.createCollection(\"" + collectionName + "\")" ); @@ -190,7 +190,6 @@ private void addStore( String name ) throws SQLException { try ( JdbcConnection polyphenyDbConnection = new JdbcConnection( true ) ) { Connection connection = polyphenyDbConnection.getConnection(); try ( Statement statement = connection.createStatement() ) { - statement.executeUpdate( "ALTER ADAPTERS ADD \"" + name + "\" USING 'Hsqldb' AS 'Store'" + " WITH '{maxConnections:\"25\",trxControlMode:locks,trxIsolationLevel:read_committed,type:Memory,tableType:Memory,mode:embedded}'" ); diff --git a/dbms/src/test/java/org/polypheny/db/mql/DmlTest.java b/dbms/src/test/java/org/polypheny/db/mql/DmlTest.java index 440d80be58..f1ecdbd230 100644 --- a/dbms/src/test/java/org/polypheny/db/mql/DmlTest.java +++ b/dbms/src/test/java/org/polypheny/db/mql/DmlTest.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/dbms/src/test/java/org/polypheny/db/mql/FindTest.java b/dbms/src/test/java/org/polypheny/db/mql/FindTest.java index 8aee3b60ca..e889a3fc39 100644 --- a/dbms/src/test/java/org/polypheny/db/mql/FindTest.java +++ b/dbms/src/test/java/org/polypheny/db/mql/FindTest.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/dbms/src/test/java/org/polypheny/db/mql/MqlGeoFunctionsTest.java b/dbms/src/test/java/org/polypheny/db/mql/MqlGeoFunctionsTest.java new file mode 100644 index 0000000000..4009c9cbdc --- /dev/null +++ b/dbms/src/test/java/org/polypheny/db/mql/MqlGeoFunctionsTest.java @@ -0,0 +1,580 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.mql; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.sql.Connection; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import lombok.extern.slf4j.Slf4j; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.polypheny.db.TestHelper; +import org.polypheny.db.TestHelper.JdbcConnection; +import org.polypheny.db.webui.models.results.DocResult; + +@SuppressWarnings("SqlNoDataSourceInspection") +@Tag("adapter") +@Tag("docker") +@Slf4j +public class MqlGeoFunctionsTest extends MqlTestTemplate { + + final static String mongoAdapterName = "mongo_gis"; + final static String mongoCollection = "mongo"; + final static String defaultCollection = "default"; + final static List collections = List.of( + mongoCollection, + defaultCollection + ); + final static Map collectionToStore = Map.of( mongoCollection, mongoAdapterName, defaultCollection, "hsqldb" ); + final static String clearCollection = """ + db.%s.deleteMany({}) + """; + + + @BeforeAll + public static void init() { + addMongoDbAdapter(); + + // Create collection and save it to either the internal store or MongoDB. + // This way, we can compare if the implementations match. + for ( String collection : collections ) { + String createCollection = """ + db.createCollection(%s).store(%s) + """.formatted( collection, collectionToStore.get( collection ) ); + execute( createCollection, namespace ); + } + } + + + @BeforeEach + public void beforeEach() { + // Make sure collections are emptied before each test. + clearCollections(); + } + + + @Test + @Tag("fileExcluded") + public void docGeoIntersectsTest() { + ArrayList queries = new ArrayList<>(); + queries.add( """ + db.%s.insertMany([ + { + name: "Legacy [0,0]", + num: 1, + legacy: [0,0] + }, + { + name: "Legacy [1,1]", + num: 2, + legacy: [1,1] + }, + { + name: "Legacy [2,2]", + num: 3, + legacy: [2,2] + } + ]) + """ ); + queries.add( """ + db.%s.find({ + legacy: { + $geoIntersects: { + $geometry: { + type: "Polygon", + coordinates: [[ [0,0], [0,1], [1,1], [1,0], [0,0] ]] + } + } + } + }) + """ ); + List results = runQueries( queries ); + compareResults( results ); + } + + + @Test + @Tag("fileExcluded") + public void docGeoWithinGeoJsonTest() { + List results; + String insertMany = """ + db.%s.insertMany([ + { + name: "GeoJSON [0,0]", + num: 1, + location: { + type: "Point", + coordinates: [0,0] + } + }, + { + name: "GeoJSON [1,1]", + num: 2, + location: { + type: "Point", + coordinates: [1,1] + } + }, + { + name: "GeoJSON [2,2]", + num: 3, + location: { + type: "Point", + coordinates: [2,2] + } + } + ]) + """; + String geoWithinBox = """ + db.%s.find({ + location: { + $geoWithin: { + $box: [ + [0,0], + [1,1] + ] + } + } + }) + """; + results = runQueries( Arrays.asList( insertMany, geoWithinBox ) ); + compareResults( results ); + } + + + @Test + @Tag("fileExcluded") + public void docGeoWithinLegacyCoordinatesTest() { + List results; + String insertMany = """ + db.%s.insertMany([ + { + name: "Legacy [0,0]", + num: 1, + legacy: [0,0] + }, + { + name: "Legacy [1,1]", + num: 2, + legacy: [1,1] + }, + { + name: "Legacy [2,2]", + num: 3, + legacy: [2,2] + } + ]) + """; + String geoWithinBox = """ + db.%s.find({ + legacy: { + $geoWithin: { + $box: [ + [0,0], + [1,1] + ] + } + } + }) + """; + results = runQueries( Arrays.asList( insertMany, geoWithinBox ) ); + compareResults( results ); + + String geoWithinGeometry = """ + db.%s.find({ + legacy: { + $geoWithin: { + $geometry: { + type: "Polygon", + coordinates: [[ [0,0], [0,1], [1,1], [1,0], [0,0] ]] + } + } + } + }) + """; + results = runQueries( Arrays.asList( clearCollection, insertMany, geoWithinGeometry ) ); + compareResults( results ); + + String geoWithinPolygon = """ + db.%s.find({ + legacy: { + $geoWithin: { + $polygon: [ [0,0], [0,1], [1,1], [1,0], [0,0] ] + } + } + }) + """; + results = runQueries( Arrays.asList( clearCollection, insertMany, geoWithinPolygon ) ); + compareResults( results ); + + String geoWithinCenter = """ + db.%s.find({ + legacy: { + $geoWithin: { + $center: [ + [ 0, 0 ], + 1.5 + ] + } + } + }) + """; + results = runQueries( Arrays.asList( clearCollection, insertMany, geoWithinCenter ) ); + compareResults( results ); + + String insertCoordinates = """ + db.%s.insertMany([ + { + name: "Kirchgebäude Mittlere Brücke", + legacy: [7.5898043, 47.5600440] + }, + { + name: "Mitte Rhein Johanniterbrücke", + legacy: [7.585512, 47.564843] + }, + ]) + """; + + String geoWithinCenterSphere = """ + db.%s.find({ + legacy: { + $geoWithin: { + $centerSphere: [ + [7.5872232, 47.5601937], + 0.00004 + ] + } + } + }) + """; + results = runQueries( Arrays.asList( clearCollection, insertCoordinates, geoWithinCenterSphere ) ); + compareResults( results ); + } + + + @Test + @Tag("fileExcluded") + public void docsNearTestOnlMongoDb() { + String insertMany = """ + db.%s.insertMany([ + { + name: "Legacy [0,0]", + num: 1, + legacy: [0,0] + }, + { + name: "Legacy [1,1]", + num: 2, + legacy: [1,1] + }, + { + name: "Legacy [2,2]", + num: 3, + legacy: [2,2] + } + ]) + """; + execute( insertMany.formatted( mongoCollection ), namespace ); + + DocResult result = execute( """ + db.%s.find({ + legacy: { + $near: { + $geometry: { + type: "Point", + coordinates: [0,0] + } + }, + } + }) + """.formatted( mongoCollection ), namespace ); + } + + + @Test + @Tag("fileExcluded") + public void docsNear() { + String insertMany = """ + db.%s.insertMany( + [{"id": 0, "coordinates": [16.4, 48.25]}, {"id": 1, "coordinates": [2.29275, 48.79325]}, {"id": 2, "coordinates": [-2.09814, 57.14369]}, {"id": 3, "coordinates": [15.00913, 37.51803]}, {"id": 4, "coordinates": [0.30367, 51.38673]}, {"id": 5, "coordinates": [6.10237, 46.18396]}, {"id": 6, "coordinates": [18.28333, 59.33333]}, {"id": 7, "coordinates": [5.4384, 43.2907]}, {"id": 8, "coordinates": [6.10237, 46.18396]}, {"id": 9, "coordinates": [6.10237, 46.18396]}]) + """; + execute( insertMany.formatted( mongoCollection ), namespace ); + + execute( """ + db.%s.find({ + legacy: { + $near: { + $geometry: { + type: "Point", + coordinates: [0,0] + } + }, + } + }) + """.formatted( mongoCollection ), namespace ); + } + + + @Test + public void docsNearTestOnlHsqlDb() { + String insertMany = """ + db.%s.insertMany([ + { + name: "Legacy [0,0]", + num: 1, + legacy: [0,0] + }, + { + name: "Legacy [1,1]", + num: 2, + legacy: [1,1] + }, + { + name: "Legacy [2,2]", + num: 3, + legacy: [2,2] + } + ]) + """; + execute( insertMany.formatted( defaultCollection ), namespace ); + + DocResult result = execute( """ + db.%s.find({ + legacy: { + $near: { + $geometry: { + type: "Point", + coordinates: [0,0] + }, + }, + } + }) + """.formatted( defaultCollection ), namespace ); + System.out.println(); + } + + + @Test + public void docsNearTest() { + List queries = new ArrayList<>(); + queries.add( """ + db.%s.insertMany([ + { + name: "Legacy [2,2]", + num: 3, + legacy: [2,2] + }, + { + name: "Legacy [0,0]", + num: 1, + legacy: [0,0] + }, + { + name: "Legacy [3,3]", + num: 4, + legacy: [3,3] + }, + { + name: "Legacy [1,1]", + num: 2, + legacy: [1,1] + } + ]) + """ ); + queries.add( """ + db.%s.find({ + legacy: { + $near: [0,0], + $maxDistance: 10 + } + }) + """ ); + List results = runQueries( queries ); + compareResults( results ); + } + + + @Test + public void docGeoNearTest() { + List queries = new ArrayList<>(); + + queries.add( """ + db.%s.insertMany([ + { + name: "Legacy [2,2]", + num: 3, + legacy: [2,2] + }, + { + name: "Legacy [0,0]", + num: 1, + legacy: [0,0] + }, + { + name: "Legacy [3,3]", + num: 4, + legacy: [3,3] + }, + { + name: "Legacy [1,1]", + num: 2, + legacy: [1,1] + } + ]) + """ ); + queries.add( """ + db.%s.aggregate([ + { + "$geoNear": { + near: [0,0], + key: "legacy", + spherical: false, + includeLocs: "nearLocation.nested", + distanceField: "distanced.nested", + distanceMultiplier: 2, + query: { "num": { "$gte": 2 } } + } + } + ]) + """ ); + + List results = runQueries( queries ); + compareResults( results ); + } + + + private void clearCollections() { + for ( String collection : collections ) { + execute( clearCollection.formatted( collection ), namespace ); + } + } + + + private static void addMongoDbAdapter() { + try ( JdbcConnection polyphenyDbConnection = new JdbcConnection( true ) ) { + Connection connection = polyphenyDbConnection.getConnection(); + try ( Statement statement = connection.createStatement() ) { + TestHelper.addMongodb( mongoAdapterName, statement ); + initDatabase( "test_mongo" ); + } + } catch ( SQLException e ) { + // If there is an error while adding the adapter, the most likely reason it does not work + // is that docker is not running! + throw new RuntimeException( e ); + } + } + + + /** + * Runs the queries for each collection, and saves the result of the + * final query to a list. Afterward, we assert if the result of + * both systems match. + * + * @param queries Queries which are run for each system. Make sure + * the query contains a placeholder %s for the collection. + */ + private List runQueries( List queries ) { + List results = new ArrayList<>(); + + for ( String collection : collections ) { + DocResult finalResult = null; + for ( String queryWithPlaceholder : queries ) { + String query = queryWithPlaceholder.formatted( collection ); + finalResult = execute( query, namespace ); + } + results.add( finalResult ); + } + + // There should be 1 result per queries run for each collection. + assertEquals( collections.size(), results.size() ); + return results; + } + + + private void compareResults( List results ) { + compareResults( results.get( 0 ), results.get( 1 ) ); + } + + + private void compareResults( DocResult mongoResult, DocResult result ) { + assertEquals( mongoResult.data.length, result.data.length ); + + ObjectMapper objectMapper = new ObjectMapper(); + for ( int i = 0; i < result.data.length; i++ ) { + String document = result.data[i]; + String mongoDocument = mongoResult.data[i]; + + Map documentMap; + Map mongoDocumentMap; + try { + documentMap = objectMapper.readValue( document, new TypeReference<>() { + } ); + mongoDocumentMap = objectMapper.readValue( mongoDocument, new TypeReference<>() { + } ); + } catch ( JsonProcessingException e ) { + throw new RuntimeException( e ); + } + assertEquals( mongoDocumentMap.keySet(), documentMap.keySet() ); + + for ( Map.Entry entry : documentMap.entrySet() ) { + String key = entry.getKey(); + if ( Objects.equals( key, "_id" ) ) { + // Do not compare the _id, as this will be different. + continue; + } + Object value = entry.getValue(); + Object mongoValue = mongoDocumentMap.get( key ); + + compareValues( mongoValue, value ); + } + } + } + + + private static void compareValues( Object mongoValue, Object value ) { + if ( mongoValue instanceof Map val1 && value instanceof Map val2 ) { + assertEquals( val1.size(), val2.size() ); + assertEquals( val1.keySet(), val2.keySet() ); + for ( Object key : val1.keySet() ) { + Object subVal1 = val1.get( key ); + Object subVal2 = val2.get( key ); + compareValues( subVal1, subVal2 ); + } + return; + + } else if ( mongoValue instanceof Number val && value instanceof Number val2 ) { + if ( val.doubleValue() - val2.doubleValue() > 0.000001 ) { + throw new RuntimeException( "Floating point numbers are not withing accepted delta" ); + } + return; + } + assertEquals( mongoValue, value ); + } + +} diff --git a/dbms/src/test/java/org/polypheny/db/mql/MqlTestTemplate.java b/dbms/src/test/java/org/polypheny/db/mql/MqlTestTemplate.java index 90aec8084e..76125e6929 100644 --- a/dbms/src/test/java/org/polypheny/db/mql/MqlTestTemplate.java +++ b/dbms/src/test/java/org/polypheny/db/mql/MqlTestTemplate.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/dbms/src/test/java/org/polypheny/db/polyalg/PolyAlgParsingTest.java b/dbms/src/test/java/org/polypheny/db/polyalg/PolyAlgParsingTest.java index 378f233db3..fabb13b8ba 100644 --- a/dbms/src/test/java/org/polypheny/db/polyalg/PolyAlgParsingTest.java +++ b/dbms/src/test/java/org/polypheny/db/polyalg/PolyAlgParsingTest.java @@ -32,6 +32,7 @@ import org.jetbrains.annotations.NotNull; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import org.polypheny.db.ResultIterator; import org.polypheny.db.TestHelper; @@ -169,7 +170,7 @@ private static void testMqlRoundTrip( String query ) throws NodeParseException { *

* Then we check whether PolyAlg1 equals PolyAlg2 and Result1 equals Result2. */ - private static void testQueryRoundTrip( String query, QueryLanguage ql, String namespace ) throws NodeParseException { + private static void testQueryRoundTrip( String query, QueryLanguage ql, String namespace ) { long ns = namespace == null ? Catalog.defaultNamespaceId : Catalog.snapshot().getNamespace( namespace ).orElseThrow().id; TransactionManager transactionManager = TransactionManagerImpl.getInstance(); Transaction transaction = transactionManager.startTransaction( Catalog.defaultUserId, ns, new QueryAnalyzer(), ORIGIN ); @@ -525,6 +526,7 @@ public void mongoElementRefTest() throws NodeParseException { @Test + @Disabled // this leads to error as it tries to insert multiple _id with the same value, which is not possible in MongoDB public void mongoInsertTest() throws NodeParseException { testMqlRoundTrip( "db." + DOC_COLL + ".insertOne({item: \"canvas\"})" ); } diff --git a/dbms/src/test/java/org/polypheny/db/sql/fun/GeoFunctionsTest.java b/dbms/src/test/java/org/polypheny/db/sql/fun/GeoFunctionsTest.java index 6c6188ef51..f66b7ca483 100644 --- a/dbms/src/test/java/org/polypheny/db/sql/fun/GeoFunctionsTest.java +++ b/dbms/src/test/java/org/polypheny/db/sql/fun/GeoFunctionsTest.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,6 +18,7 @@ import com.google.common.collect.ImmutableList; import java.sql.Connection; +import java.sql.PreparedStatement; import java.sql.SQLException; import java.sql.Statement; import lombok.extern.slf4j.Slf4j; @@ -88,6 +89,20 @@ public void readGeo() throws SQLException { } } + + @Test + public void testInsert() throws SQLException { + try ( TestHelper.JdbcConnection polyphenyDbConnection = new TestHelper.JdbcConnection( true ) ) { + Connection connection = polyphenyDbConnection.getConnection(); + + PreparedStatement preparedInsert = connection.prepareStatement( "INSERT INTO TEST_GIS(id,geom) VALUES (?, ?)" ); + + preparedInsert.setInt( 1, 11 ); + preparedInsert.setString( 2, "ST_GeomFromText('POINT (9.289382 48.741588)', 4326))" ); + preparedInsert.execute(); + } + } + // --------------- Test spatial functions without the actual persisted data ---------------------- @@ -367,6 +382,36 @@ public void distanceFunctions() throws SQLException { } + @Test + public void persistance() throws SQLException { + try ( TestHelper.JdbcConnection polyphenyDbConnection = new TestHelper.JdbcConnection( true ) ) { + Connection connection = polyphenyDbConnection.getConnection(); + try ( Statement statement = connection.createStatement() ) { + TestHelper.checkResultSet( + statement.executeQuery( "SELECT count(*) from TEST_GIS where ST_Distance(geom, ST_GeomFromText('POINT (9.289382 48.741588)', 4326)) < 135555" ), + ImmutableList.of( + new Object[]{ 2 } + ) ); + } + } + } + + + @Test + public void persistance2() throws SQLException { + try ( TestHelper.JdbcConnection polyphenyDbConnection = new TestHelper.JdbcConnection( true ) ) { + Connection connection = polyphenyDbConnection.getConnection(); + try ( Statement statement = connection.createStatement() ) { + TestHelper.checkResultSet( + statement.executeQuery( "SELECT Count(*) FROM TEST_GIS WHERE ST_DWithin(ST_GeomFromText('POINT (7.852923 47.998949)', 4326), geom, 135000)" ), + ImmutableList.of( + new Object[]{ 2 } + ) ); + } + } + } + + @Test public void setOperationsFunctions() throws SQLException { try ( TestHelper.JdbcConnection polyphenyDbConnection = new TestHelper.JdbcConnection( true ) ) { diff --git a/dbms/src/test/java/org/polypheny/db/transaction/GraphDeadlockDetectorTest.java b/dbms/src/test/java/org/polypheny/db/transaction/GraphDeadlockDetectorTest.java index 18d9f7aee9..f87b242c4e 100644 --- a/dbms/src/test/java/org/polypheny/db/transaction/GraphDeadlockDetectorTest.java +++ b/dbms/src/test/java/org/polypheny/db/transaction/GraphDeadlockDetectorTest.java @@ -9,7 +9,6 @@ import java.util.Set; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import org.mockito.Mock; import org.mockito.Mockito; import org.polypheny.db.transaction.deadlocks.GraphDeadlockDetector; import org.polypheny.db.transaction.locking.Lockable; diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/CypherRegisterer.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/CypherRegisterer.java index e48972f6e4..2f915d7336 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/CypherRegisterer.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/CypherRegisterer.java @@ -85,6 +85,16 @@ public static void registerOperators() { register( OperatorName.CYPHER_GEO_WITHIN, new LangFunctionOperator( "GEO_WITHIN", Kind.CYPHER_FUNCTION, PolyType.BOOLEAN ) ); + register( OperatorName.CYPHER_POINT, new LangFunctionOperator( "CYPHER_POINT", Kind.CYPHER_FUNCTION, PolyType.DOCUMENT ) ); + + register( OperatorName.DISTANCE, new LangFunctionOperator( "DISTANCE", Kind.CYPHER_FUNCTION, PolyType.DOUBLE ) ); + + register( OperatorName.DISTANCE_NEO4J, new LangFunctionOperator( "DISTANCE_NEO4J", Kind.CYPHER_FUNCTION, PolyType.DOUBLE ) ); + + register( OperatorName.CYPHER_WITHIN_BBOX, new LangFunctionOperator( "CYPHER_WITHIN_BBOX", Kind.CYPHER_FUNCTION, PolyType.BOOLEAN ) ); + + register( OperatorName.CYPHER_WITHIN_GEOMETRY, new LangFunctionOperator( "CYPHER_WITHIN_GEOMETRY", Kind.CYPHER_FUNCTION, PolyType.BOOLEAN ) ); + isInit = true; } diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/clause/CypherReturnItem.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/clause/CypherReturnItem.java index 9f7f91f1fe..404ef57b9d 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/clause/CypherReturnItem.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/clause/CypherReturnItem.java @@ -23,6 +23,7 @@ import org.polypheny.db.cypher.expression.CypherAggregate; import org.polypheny.db.cypher.expression.CypherExpression; import org.polypheny.db.cypher.expression.CypherExpression.ExpressionType; +import org.polypheny.db.cypher.expression.CypherFunctionInvocation; import org.polypheny.db.cypher.expression.CypherVariable; import org.polypheny.db.languages.ParserPos; import org.polypheny.db.rex.RexNode; @@ -64,12 +65,17 @@ public CypherKind getCypherKind() { @Nullable public Pair getRex( CypherContext context, RexType type ) { if ( variable != null ) { + String name = variable.getName(); + if ( this.expression instanceof CypherFunctionInvocation func ) { + return Pair.of( PolyString.of( name ), func.getRexCall( context ) ); + } + // name -> aggregate // renaming of the field - String name = variable.getName(); if ( expression.getType() == ExpressionType.AGGREGATE ) { return ((CypherAggregate) expression).getAggregate( context, name ); } + return Pair.of( PolyString.of( name ), expression.getRex( context, type ).right ); } else { if ( expression.getType() == ExpressionType.AGGREGATE ) { diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/cypher2alg/CypherToAlgConverter.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/cypher2alg/CypherToAlgConverter.java index 30afea7005..8fee2dc8cf 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/cypher2alg/CypherToAlgConverter.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/cypher2alg/CypherToAlgConverter.java @@ -32,6 +32,7 @@ import java.util.stream.Stream; import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang3.NotImplementedException; import org.jetbrains.annotations.NotNull; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgRoot; @@ -404,6 +405,7 @@ public static class CypherContext { public final AlgDataType graphType; public final AlgDataType booleanType; + public final AlgDataType geometryType; public final AlgDataType nodeType; public final AlgDataType edgeType; public final AlgDataType pathType; @@ -430,6 +432,7 @@ private CypherContext( this.rexBuilder = rexBuilder; this.graphType = cluster.getTypeFactory().createPolyType( PolyType.GRAPH ); this.booleanType = cluster.getTypeFactory().createPolyType( PolyType.BOOLEAN ); + this.geometryType = cluster.getTypeFactory().createPolyType( PolyType.GEOMETRY ); this.nodeType = cluster.getTypeFactory().createPolyType( PolyType.NODE ); this.edgeType = cluster.getTypeFactory().createPolyType( PolyType.EDGE ); this.pathType = cluster.getTypeFactory().createPolyType( PolyType.PATH ); @@ -652,6 +655,8 @@ public AlgNode asValues( List> nameAndValues ) { List.of(), ImmutableList.of( ImmutableList.copyOf( nameAndValues.stream().map( e -> (RexLiteral) e.getValue() ).toList() ) ), new AlgRecordType( fields ) ); + } else if ( nameAndValues.stream().allMatch( v -> v.right.isA( Kind.CYPHER_FUNCTION ) ) ) { + throw new NotImplementedException( "RETURN statement with function call without MATCH currently not supported" ); } else { throw new UnsupportedOperationException(); } diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherAddPlacement.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherAddPlacement.java index 99a5dd9c15..37620111c0 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherAddPlacement.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherAddPlacement.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -75,7 +75,7 @@ public void execute( Context context, Statement statement, ParsedQueryContext pa .map( store -> adapterManager.getStore( store ).orElseThrow() ) .collect( Collectors.toList() ); - if ( !adapterManager.getAdapters().containsKey( store ) ) { + if ( !adapterManager.getAdapters().containsKey( store.toLowerCase() ) ) { throw new GenericRuntimeException( "The targeted storeId does not exist." ); } diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherDropPlacement.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherDropPlacement.java index 657d1a44cb..36bd381ab8 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherDropPlacement.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/ddl/CypherDropPlacement.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,7 +18,6 @@ import java.util.List; import java.util.Map; -import java.util.stream.Stream; import org.polypheny.db.adapter.AdapterManager; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.catalog.entity.logical.LogicalNamespace; @@ -59,9 +58,7 @@ public void execute( Context context, Statement statement, ParsedQueryContext pa List graphs = statement.getTransaction().getSnapshot().getNamespaces( new Pattern( this.databaseName ) ); - DataStore dataStore = Stream.of( storeName ) - .map( store -> adapterManager.getStore( storeName ).orElseThrow() ) - .toList().get( 0 ); + DataStore dataStore = adapterManager.getStore( storeName ).orElseThrow(); if ( graphs.size() != 1 ) { throw new GenericRuntimeException( "Error while adding graph placement" ); diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/expression/CypherExpression.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/expression/CypherExpression.java index 63223cc2b8..8a040e0b70 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/expression/CypherExpression.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/expression/CypherExpression.java @@ -76,6 +76,11 @@ public CypherExpression( ParserPos pos, ExpressionType type, CypherPattern patte public Pair getRex( CypherContext context, RexType type ) { + if ( this instanceof CypherFunctionInvocation func ) { + // var is null in case of function call + return Pair.of( PolyString.of( null ), func.getRexCall( context ) ); + } + OperatorName operatorName = switch ( this.type ) { // EveryPathPattern //return pattern.getPatternMatch( context ); diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/expression/CypherFunctionInvocation.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/expression/CypherFunctionInvocation.java index 0b2d227292..d16ab909dd 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/expression/CypherFunctionInvocation.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/expression/CypherFunctionInvocation.java @@ -16,13 +16,26 @@ package org.polypheny.db.cypher.expression; +import static org.polypheny.db.algebra.operators.OperatorName.CYPHER_POINT; + +import com.google.common.collect.ImmutableList; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Locale; import lombok.Getter; +import org.apache.commons.lang3.NotImplementedException; import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.catalog.exceptions.GenericRuntimeException; +import org.polypheny.db.cypher.cypher2alg.CypherToAlgConverter.CypherContext; +import org.polypheny.db.cypher.cypher2alg.CypherToAlgConverter.RexType; +import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.ParserPos; +import org.polypheny.db.languages.QueryLanguage; +import org.polypheny.db.rex.RexCall; +import org.polypheny.db.rex.RexNode; +import org.polypheny.db.type.entity.PolyString; +import org.polypheny.db.util.Pair; @Getter public class CypherFunctionInvocation extends CypherExpression { @@ -42,6 +55,12 @@ public CypherFunctionInvocation( ParserPos beforePos, ParserPos namePos, List getArguments() { + return ImmutableList.copyOf( arguments ); + } + + + public OperatorName getOperatorName() { + return op; + } + + + @Override + public Pair getRex( CypherContext context, RexType type ) { + // At this point, we do not know what is on the left side of the Pair. + // The caller has to discard the left side, and use a variable name or something else. + return Pair.of( PolyString.of( "???" ), getRexCall( context ) ); + } + + + public RexNode getRexCall( CypherContext context ) { + switch ( getOperatorName() ) { + case CYPHER_POINT: { + // VERY UGLY, but it works for now. This could be improved by using the function MAP_OF_ENTRIES, + // but I am not sure how to call it. + CypherLiteral mapExpression = (CypherLiteral) getArguments().get( 0 ); + List arguments = new ArrayList<>(); + mapExpression.getMapValue().forEach( ( key, value ) -> { + Pair pair = value.getRex( context, RexType.PROJECT ); + arguments.add( context.rexBuilder.makeLiteral( key ) ); + arguments.add( pair.right ); + } ); + // Fill with NULL to make sure we have the correct amount of arguments. + // 3 coordinates + 3 names + srid + crs = up to 8 possible + while ( arguments.size() < 10 ) { + arguments.add( context.rexBuilder.makeNullLiteral( context.typeFactory.createUnknownType() ) ); + } + return new RexCall( + context.geometryType, + OperatorRegistry.get( QueryLanguage.from( "cypher" ), CYPHER_POINT ), + arguments ); + } + case DISTANCE: { + return new RexCall( + context.numberType, + // If the third argument for the point.distance function is the string 'neo4j', then + // use the spherical distance approximation of Neo4j. This uses a different earth radius. + // Otherwise, the default behavior is to use the same distance measurement as the other + // models. + OperatorRegistry.get( QueryLanguage.from( "cypher" ), arguments.size() == 3 + && arguments.get( 2 ) instanceof CypherLiteral neo4jFlag + && neo4jFlag.getValue().toString().equals( "neo4j" ) + ? OperatorName.DISTANCE_NEO4J + : OperatorName.DISTANCE + ), + List.of( + arguments.get( 0 ).getRex( context, RexType.PROJECT ).getRight(), + arguments.get( 1 ).getRex( context, RexType.PROJECT ).getRight() + ) ); + } + case DISTANCE_NEO4J: { + return new RexCall( + context.numberType, + OperatorRegistry.get( QueryLanguage.from( "cypher" ), OperatorName.DISTANCE_NEO4J ), + List.of( + arguments.get( 0 ).getRex( context, RexType.PROJECT ).getRight(), + arguments.get( 1 ).getRex( context, RexType.PROJECT ).getRight() + ) ); + } + case CYPHER_WITHIN_BBOX: + return new RexCall( + context.booleanType, + OperatorRegistry.get( QueryLanguage.from( "cypher" ), OperatorName.CYPHER_WITHIN_BBOX ), + List.of( + arguments.get( 0 ).getRex( context, RexType.PROJECT ).getRight(), + // CypherFunctionInvocation.getRex -> throw + // Because create function logic is implemented in + arguments.get( 1 ).getRex( context, RexType.PROJECT ).getRight(), + arguments.get( 2 ).getRex( context, RexType.PROJECT ).getRight() + ) ); + case CYPHER_WITHIN_GEOMETRY: + return new RexCall( + context.booleanType, + OperatorRegistry.get( QueryLanguage.from( "cypher" ), OperatorName.CYPHER_WITHIN_GEOMETRY ), + List.of( + arguments.get( 0 ).getRex( context, RexType.PROJECT ).getRight(), + // CypherFunctionInvocation.getRex -> throw + // Because create function logic is implemented in + arguments.get( 1 ).getRex( context, RexType.PROJECT ).getRight() + ) ); + default: + throw new NotImplementedException( "Cypher Function to alg conversion missing: " + getOperatorName() ); + } + } + } diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/expression/CypherLiteral.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/expression/CypherLiteral.java index a41b6af1a3..2b73dc0a75 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/expression/CypherLiteral.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/expression/CypherLiteral.java @@ -24,6 +24,10 @@ import java.util.Map.Entry; import java.util.stream.Collectors; import lombok.Getter; +import org.locationtech.jts.geom.Coordinate; +import org.locationtech.jts.geom.GeometryFactory; +import org.locationtech.jts.geom.PrecisionModel; +import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.exceptions.GenericRuntimeException; import org.polypheny.db.cypher.cypher2alg.CypherToAlgConverter.CypherContext; @@ -41,6 +45,7 @@ import org.polypheny.db.type.entity.graph.PolyDictionary; import org.polypheny.db.type.entity.numerical.PolyDouble; import org.polypheny.db.type.entity.numerical.PolyInteger; +import org.polypheny.db.type.entity.spatial.PolyGeometry; import org.polypheny.db.util.Pair; @Getter @@ -98,7 +103,7 @@ public CypherLiteral( ParserPos pos, Literal literalType, String image, boolean public enum Literal { - TRUE, FALSE, NULL, LIST, MAP, STRING, DOUBLE, DECIMAL, HEX, OCTAL, STAR + TRUE, FALSE, NULL, LIST, MAP, STRING, DOUBLE, DECIMAL, HEX, OCTAL, STAR, POINT } @@ -113,17 +118,85 @@ public PolyValue getComparable() { yield new PolyList<>( list ); } case MAP -> { - Map map = mapValue.entrySet().stream().collect( Collectors.toMap( e -> PolyString.of( e.getKey() ), e -> e.getValue().getComparable() ) ); + Map map = mapValue.entrySet().stream().collect( Collectors.toMap( e -> PolyString.of( e.getKey() ), e -> { + if ( e.getValue() instanceof CypherFunctionInvocation func && func.getOperatorName() == OperatorName.CYPHER_POINT ) { + if ( func.getArguments().get( 0 ) instanceof CypherLiteral literal ) { + return convertMapToPolyGeometry( literal.getMapValue() ); + } + } + return e.getValue().getComparable(); + } ) ); yield new PolyDictionary( map ); } case STRING, HEX, OCTAL -> PolyString.of( (String) value ); case DOUBLE -> PolyDouble.of( (Double) value ); case DECIMAL -> PolyInteger.of( (Integer) value ); + case POINT -> { + // TODO: What do we have to do here? + throw new UnsupportedOperationException(); + } case STAR -> throw new UnsupportedOperationException(); }; } + public PolyGeometry convertMapToPolyGeometry( Map map ) { + Coordinate coordinate = new Coordinate(); + boolean isCartesian = false; + boolean isSpherical = false; + boolean is3d = false; + + for ( String key : map.keySet() ) { + CypherExpression value = map.get( key ); + + double doubleValue; + if ( value.getComparable().isInteger() ) { + doubleValue = value.getComparable().asInteger().intValue(); + } else { + doubleValue = value.getComparable().asDouble().doubleValue(); + } + + switch ( key ) { + case "x": + coordinate.setX( doubleValue ); + isCartesian = true; + break; + case "y": + coordinate.setY( doubleValue ); + isCartesian = true; + break; + case "z": + coordinate.setZ( doubleValue ); + isCartesian = true; + is3d = true; + break; + case "latitude": + coordinate.setX( doubleValue ); + isSpherical = true; + break; + case "longitude": + coordinate.setY( doubleValue ); + isSpherical = true; + break; + case "height": + coordinate.setZ( doubleValue ); + isSpherical = true; + is3d = true; + break; + } + } + assert !(isCartesian && isSpherical) : "Mixing x/y and latitude/longitude when creating points is not allowed"; + + int WGS84_2D = 4326; + int WGS84_3D = 4947; + int srid = isCartesian + ? 0 + : (is3d ? WGS84_3D : WGS84_2D); + GeometryFactory geometryFactory = new GeometryFactory( new PrecisionModel(), srid ); + return PolyGeometry.of( geometryFactory.createPoint( coordinate ) ); + } + + @Override public Pair getRex( CypherContext context, RexType type ) { RexNode node = switch ( literalType ) { @@ -143,6 +216,10 @@ public Pair getRex( CypherContext context, RexType type ) { case STRING -> context.rexBuilder.makeLiteral( (String) value ); case DOUBLE -> context.rexBuilder.makeApproxLiteral( BigDecimal.valueOf( (Double) value ) ); case DECIMAL -> context.rexBuilder.makeExactLiteral( BigDecimal.valueOf( (Integer) value ) ); + case POINT -> { + AlgDataType dataType = context.typeFactory.createPolyType( PolyType.GEOMETRY ); + yield context.rexBuilder.makeLiteral( PolyGeometry.of( "SRID=0;POINT(56.7 12.78)" ), dataType, false ); + } }; return Pair.of( null, node ); } diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcUtils.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcUtils.java index d41a2ba804..305e2991c2 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcUtils.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcUtils.java @@ -56,9 +56,9 @@ import org.polypheny.db.information.InformationPage; import org.polypheny.db.information.InformationTable; import org.polypheny.db.sql.language.util.SqlTypeRepresentation; +import org.polypheny.db.type.entity.PolyBinary; import org.polypheny.db.type.entity.PolyBoolean; import org.polypheny.db.type.entity.PolyString; -import org.polypheny.db.type.entity.PolyBinary; import org.polypheny.db.type.entity.PolyValue; import org.polypheny.db.type.entity.numerical.PolyBigDecimal; import org.polypheny.db.type.entity.numerical.PolyDouble; diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/ResultSetEnumerable.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/ResultSetEnumerable.java index 8f16f52b53..06c1cf849b 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/ResultSetEnumerable.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/ResultSetEnumerable.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoAlg.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoAlg.java index 954873b502..e7990971e8 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoAlg.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoAlg.java @@ -53,7 +53,6 @@ import org.polypheny.db.algebra.logical.relational.LogicalRelProject; import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.exceptions.GenericRuntimeException; import org.polypheny.db.plan.Convention; import org.polypheny.db.util.Pair; @@ -111,6 +110,17 @@ class Implementor extends AlgShuttleImpl implements Serializable { @Setter private Operation operation; + /** + * This list will be used to create necessary indexes when performing $near, + * $nearSphere or a $geoNear query. It contains the index and index type, + * separated by a newline character (because that can't be part of a field name). + * The index type can be '2d' or '2dsphere' for now. + * + * List[String] instead of List[Pair[String,String]] because Pair is difficult + * to serialize. + */ + public final List indexAndIndexType = new ArrayList<>(); + public Implementor() { isDML = false; diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoEntity.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoEntity.java index 2a8c4cf453..5ee94c3c7e 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoEntity.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoEntity.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -64,6 +64,7 @@ import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; import org.bson.BsonDocument; +import org.bson.BsonString; import org.bson.Document; import org.bson.conversions.Bson; import org.bson.json.JsonMode; @@ -394,7 +395,7 @@ private MongoEntity getEntity() { * @see MongoMethod#MONGO_QUERYABLE_AGGREGATE */ @SuppressWarnings("UnusedDeclaration") - public Enumerable aggregate( MongoTupleType tupleType, List operations, List preProjections, List logicalCols ) { + public Enumerable aggregate( MongoTupleType tupleType, List operations, List preProjections, List logicalCols, List ddls ) { ClientSession session = getEntity().getTransactionProvider().getSession( dataContext.getStatement().getTransaction().getXid() ); dataContext.getStatement().getTransaction().registerInvolvedAdapter( AdapterManager.getInstance().getStore( (int) this.getEntity().getStoreId() ).orElseThrow() ); @@ -403,6 +404,17 @@ public Enumerable aggregate( MongoTupleType tupleType, List values = dataContext.getParameterValues().get( 0 ); } + if ( !ddls.isEmpty() ) { + for ( String ddl : ddls ) { + String[] ddlArray = ddl.split( "\n" ); + String field = ddlArray[0]; + String indexType = ddlArray[1]; + BsonDocument index = new BsonDocument(); + index.put( field, new BsonString( indexType ) ); + getEntity().getCollection().createIndex( index ); + } + } + return getEntity().aggregate( dataContext, session, @@ -469,6 +481,7 @@ private long doDML( Operation operation, String filter, List operations, // prepared MongoDynamic util = MongoDynamic.create( BsonDocument.parse( operations.get( 0 ) ), bucket, getEntity().getDataModel() ); List inserts = util.getAll( dataContext.getParameterValues() ); + entity.getCollection().insertMany( session, inserts ); return inserts.size(); } else { diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoMethod.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoMethod.java index 5f61f68972..105aca61d2 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoMethod.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoMethod.java @@ -47,7 +47,7 @@ */ public enum MongoMethod { MONGO_QUERYABLE_FIND( MongoEntity.MongoQueryable.class, "find", String.class, String.class, MongoTupleType.class ), - MONGO_QUERYABLE_AGGREGATE( MongoEntity.MongoQueryable.class, "aggregate", MongoTupleType.class, List.class, List.class, List.class ), + MONGO_QUERYABLE_AGGREGATE( MongoEntity.MongoQueryable.class, "aggregate", MongoTupleType.class, List.class, List.class, List.class, List.class ), HANDLE_DIRECT_DML( MongoEntity.MongoQueryable.class, "handleDirectDML", Operation.class, String.class, List.class, boolean.class, boolean.class ); public final Method method; diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoDocumentProject.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoDocumentProject.java index d93805e2f3..dc99a93788 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoDocumentProject.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoDocumentProject.java @@ -44,8 +44,8 @@ public class MongoDocumentProject extends DocumentProject implements MongoAlg { * @param includes * @param excludes */ - protected MongoDocumentProject( AlgCluster cluster, AlgTraitSet traits, AlgNode input, @NotNull Map includes, @NotNull List excludes ) { - super( cluster, traits, input, includes, excludes ); + protected MongoDocumentProject( AlgCluster cluster, AlgTraitSet traits, AlgNode input, @NotNull Map includes, @NotNull List excludes, @NotNull Map adds ) { + super( cluster, traits, input, includes, excludes, adds ); } @@ -80,7 +80,7 @@ private void adjustRoot( Implementor implementor, RexToMongoTranslator translato @Override public AlgNode copy( AlgTraitSet traitSet, List inputs ) { - return new MongoDocumentProject( getCluster(), traitSet, sole( inputs ), includes, excludes ); + return new MongoDocumentProject( getCluster(), traitSet, sole( inputs ), includes, excludes, adds ); } } diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoFilter.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoFilter.java index 0a0bc060c6..2605b583d9 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoFilter.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoFilter.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -26,12 +26,15 @@ import java.util.Map; import java.util.Map.Entry; import java.util.Objects; +import java.util.UUID; import java.util.function.Function; import java.util.stream.Collectors; import javax.annotation.Nullable; +import org.apache.commons.lang3.NotImplementedException; import org.bson.BsonArray; import org.bson.BsonBoolean; import org.bson.BsonDocument; +import org.bson.BsonDouble; import org.bson.BsonInt32; import org.bson.BsonNull; import org.bson.BsonRegularExpression; @@ -39,6 +42,7 @@ import org.bson.BsonValue; import org.bson.json.JsonMode; import org.bson.json.JsonWriterSettings; +import org.locationtech.jts.geom.Coordinate; import org.polypheny.db.adapter.mongodb.MongoAlg; import org.polypheny.db.adapter.mongodb.bson.BsonDynamic; import org.polypheny.db.adapter.mongodb.bson.BsonFunctionHelper; @@ -63,7 +67,13 @@ import org.polypheny.db.rex.RexNode; import org.polypheny.db.sql.language.fun.SqlItemOperator; import org.polypheny.db.type.PolyType; +import org.polypheny.db.type.entity.PolyString; import org.polypheny.db.type.entity.PolyValue; +import org.polypheny.db.type.entity.category.PolyNumber; +import org.polypheny.db.type.entity.numerical.PolyDouble; +import org.polypheny.db.type.entity.spatial.PolyGeometry; +import org.polypheny.db.type.entity.spatial.PolyLinearRing; +import org.polypheny.db.type.entity.spatial.PolyPoint; import org.polypheny.db.util.BsonUtil; import org.polypheny.db.util.JsonBuilder; @@ -98,9 +108,18 @@ public void implement( Implementor implementor ) { implementor.visitChild( 0, getInput() ); // to not break the existing functionality for now we have to handle it this way Translator translator; - translator = new Translator( MongoRules.mongoFieldNames( getTupleType() ), getTupleType(), implementor ); - translator.translateMatch( condition, implementor ); + + if ( condition.getKind() == Kind.MQL_NEAR ) { + // $near, $nearSphere and $geoNear are not allowed in an aggregation pipeline, which is why we cannot translate them inside the match stage. + // Instead, we will translate them into a $geoNear stage. + translator.translateNearAsGeoNear( condition, implementor ); + } else if ( condition.getKind() == Kind.MQL_GEO_NEAR ) { + // Translate $geoNear to $geoNear, without a match stage. + translator.translateGeoNear( condition, implementor ); + } else { + translator.translateMatch( condition, implementor ); + } } @@ -308,6 +327,15 @@ private void translateMatch2( RexNode node ) { case MQL_ELEM_MATCH: translateElemMatch( (RexCall) node ); return; + case MQL_GEO_INTERSECTS: + translateGeoIntersects( (RexCall) node ); + return; + case MQL_GEO_WITHIN: + translateGeoWithin( (RexCall) node ); + return; + case MQL_NEAR: + translateNear( (RexCall) node ); + return; case IS_NOT_TRUE: translateIsTrue( (RexCall) node, true ); return; @@ -643,6 +671,244 @@ private void translateRegex( RexCall node ) { } + private void translateGeoWithin( RexCall node ) { + String left = getParamAsKey( node.operands.get( 0 ) ); + PolyGeometry filterGeometry = getLiteralAs( node, 1, PolyValue::asGeometry ); + double distance = getLiteralAs( node, 2, p -> p.asDouble().doubleValue() ); + + // If we have an SRID we either have a GeoJSON object or a $centerSphere operand. + if ( filterGeometry.getSRID() != 0 ) { + + // $centerSphere + if ( distance > 0 ) { + BsonDocument centerSphere = new BsonDocument(); + BsonArray array = new BsonArray(); + PolyPoint point = filterGeometry.asPoint(); + BsonArray pointArray = new BsonArray(); + pointArray.add( new BsonDouble( point.getX() ) ); + pointArray.add( new BsonDouble( point.getY() ) ); + array.add( pointArray ); + array.add( new BsonDouble( distance ) ); + + centerSphere.put( "$centerSphere", array ); + BsonDocument geoWithin = new BsonDocument(); + geoWithin.put( "$geoWithin", centerSphere ); + attachCondition( null, left, geoWithin ); + return; + } + // GeoJSON + else { + BsonDocument geometry = new BsonDocument(); + geometry.put( "$geometry", BsonDocument.parse( filterGeometry.toJson() ) ); + BsonDocument geoWithin = new BsonDocument(); + geoWithin.put( "$geoWithin", geometry ); + attachCondition( null, left, geoWithin ); + return; + } + + } else { + // $sphere + if ( distance > 0 ) { + BsonDocument center = new BsonDocument(); + BsonArray array = new BsonArray(); + PolyPoint point = filterGeometry.asPoint(); + BsonArray pointArray = new BsonArray(); + pointArray.add( new BsonDouble( point.getX() ) ); + pointArray.add( new BsonDouble( point.getY() ) ); + array.add( pointArray ); + array.add( new BsonDouble( distance ) ); + + center.put( "$center", array ); + BsonDocument geoWithin = new BsonDocument(); + geoWithin.put( "$geoWithin", center ); + attachCondition( null, left, geoWithin ); + return; + } + + // $box, $polygon + if ( filterGeometry.isPolygon() ) { + PolyLinearRing linearRing = filterGeometry.asPolygon().getExteriorRing(); + Coordinate[] coordinates = linearRing.getJtsGeometry().getCoordinates(); + + BsonDocument polygon = new BsonDocument(); + BsonArray coordinateArray = new BsonArray(); + for ( Coordinate coordinate : coordinates ) { + BsonArray coordinatePair = new BsonArray(); + coordinatePair.add( new BsonDouble( coordinate.getX() ) ); + coordinatePair.add( new BsonDouble( coordinate.getY() ) ); + coordinateArray.add( coordinatePair ); + } + polygon.put( "$polygon", coordinateArray ); + + BsonDocument geoWithin = new BsonDocument(); + geoWithin.put( "$geoWithin", polygon ); + attachCondition( null, left, geoWithin ); + return; + } + } + + // Something went wrong. Either we did not handle all cases, or the input is not as expected, + // and should never have been parsed correctly in the first place. + throw new GenericRuntimeException( "Cannot translate $geoWithin to MongoDB query." ); + } + + + private void translateNearAsGeoNear( RexNode condition, Implementor implementor ) { + BsonDocument value = translateFinalOr( condition ); + if ( !value.isEmpty() ) { + if ( !preProjections.isEmpty() ) { + implementor.add( null, MongoAlg.Implementor.toJson( new BsonDocument( "$addFields", preProjections ) ) ); + } + + BsonValue near = value.getDocument( value.getFirstKey() ).get( "$near" ); + String fieldName = value.getFirstKey(); + if ( near.isDocument() ) { + // Point is specified as GeoJSON -> Create a 2dsphere index + implementor.indexAndIndexType.add( "%s\n2dsphere".formatted( fieldName ) ); + } else if ( near.isArray() ) { + // Point is specified as legacy coordiantes -> Create a 2d index + implementor.indexAndIndexType.add( "%s\n2d".formatted( fieldName ) ); + } else { + throw new NotImplementedException( "Unexpected value for $near." ); + } + + final String distanceField = "__temp_%s".formatted( UUID.randomUUID().toString() ); + BsonDocument geoNearOptions = new BsonDocument(); + geoNearOptions.put( "distanceField", new BsonString( distanceField ) ); + geoNearOptions.put( "key", new BsonString( fieldName ) ); + geoNearOptions.put( "near", near ); + implementor.add( null, MongoAlg.Implementor.toJson( new BsonDocument( "$geoNear", geoNearOptions ) ) ); + implementor.add( null, MongoAlg.Implementor.toJson( new BsonDocument( "$unset", new BsonString( distanceField ) ) ) ); + } + + } + + + private void translateGeoNear( RexNode condition, Implementor implementor ) { + if ( !(condition instanceof RexCall node) ) { + throw new GenericRuntimeException( "Was expecting MQL_GEO_NEAR to be of type RexCall" ); + } + + assert node.operands.size() == 8; + PolyGeometry nearGeometry = getLiteralAs( node, 0, PolyValue::asGeometry ); + RexNameRef distanceField = (RexNameRef) node.operands.get( 1 ); + PolyDouble distanceMultiplier = getLiteralAs( node, 2, PolyValue::asDouble ); + RexNameRef includeLocs = (RexNameRef) node.operands.get( 3 ); + PolyString key = getLiteralAs( node, 4, PolyValue::asString ); + PolyDouble maxDistance = getLiteralAs( node, 5, PolyValue::asDouble ); + PolyDouble minDistance = getLiteralAs( node, 6, PolyValue::asDouble ); + RexNode query = node.operands.get( 7 ); + + BsonDocument geoNearOptions = new BsonDocument(); + // Required + geoNearOptions.put( "distanceField", new BsonString( distanceField.name ) ); + if ( nearGeometry.getSRID() == 0 ) { + // We have no SRID -> Use legacy coordinates like this: + // $near: [ 0, 0 ] + BsonDocument leftBody = new BsonDocument(); + BsonArray legacyCoordinates = new BsonArray(); + legacyCoordinates.add( new BsonDouble( nearGeometry.asPoint().getX() ) ); + legacyCoordinates.add( new BsonDouble( nearGeometry.asPoint().getY() ) ); + geoNearOptions.put( "near", legacyCoordinates ); + } else { + // We have an SRID. This could be because of two cases: + // We use $nearSphere with legacy coordinates OR We use $near or $nearSphere with a GeoJSON object + // In both cases can we convert the object to a GeoJSON object + geoNearOptions.put( "near", BsonDocument.parse( nearGeometry.toJson() ) ); + } + + // Optional + if ( distanceMultiplier.getValue() > 1.0 ) { + geoNearOptions.put( "distanceMultiplier", new BsonDouble( distanceMultiplier.getValue() ) ); + } + if ( !includeLocs.name.isEmpty() ) { + geoNearOptions.put( "includeLocs", new BsonString( includeLocs.name ) ); + } + if ( !key.getValue().isEmpty() ) { + geoNearOptions.put( "key", new BsonString( key.getValue() ) ); + // If the user specified which field should be used for the query, then we can create the index + // if it does not exist. If there is no index, and key is not set, the operation will fail. + // Normally this field is used if there are multiple indexes, so that we can decide which to use. + implementor.indexAndIndexType.add( + "%s\n%s".formatted( key.getValue(), nearGeometry.getSRID() == 0 ? "2d" : "2dsphere" ) + ); + } + if ( maxDistance.getValue() != -1.0 ) { + geoNearOptions.put( "maxDistance", new BsonDouble( maxDistance.getValue() ) ); + } + if ( minDistance.getValue() != -1.0 ) { + geoNearOptions.put( "minDistance", new BsonDouble( minDistance.getValue() ) ); + } + + if ( query instanceof RexCall ) { + // TODO RB: Query is wrongly converted to JSON. Double gets converted to String. + // MQL_GTE(RexNameRef(names=[num], index=Optional.empty), 2) => {"num": {"$gte": "2"}} + BsonDocument queryDocument = translateFinalOr( query ); + geoNearOptions.put( "query", queryDocument ); + } + + implementor.add( null, MongoAlg.Implementor.toJson( new BsonDocument( "$geoNear", geoNearOptions ) ) ); + } + + + private void translateNear( RexCall node ) { + assert node.operands.size() == 4; + String left = getParamAsKey( node.operands.get( 0 ) ); + PolyGeometry filterGeometry = getLiteralAs( node, 1, PolyValue::asGeometry ); + PolyNumber minDistance = getLiteralAs( node, 2, p -> (PolyNumber) p ); + PolyNumber maxDistance = getLiteralAs( node, 3, p -> (PolyNumber) p ); + + if ( filterGeometry.getSRID() == 0 ) { + // We have no SRID -> Use legacy coordinates like this: + // $near: [ 0, 0 ] + BsonDocument leftBody = new BsonDocument(); + BsonArray legacyCoordinates = new BsonArray(); + legacyCoordinates.add( new BsonDouble( filterGeometry.asPoint().getX() ) ); + legacyCoordinates.add( new BsonDouble( filterGeometry.asPoint().getY() ) ); + leftBody.put( "$near", legacyCoordinates ); + + // Only $minDistance is allowed when $near is used with legacy coordinates. + if ( !maxDistance.isInteger() || maxDistance.intValue() != -1 ) { + leftBody.put( "$maxDistance", BsonUtil.getAsBson( maxDistance, maxDistance.type, bucket ) ); + } + + // Executing this query requires the 2d index to be created on the 'left' field. + attachCondition( null, left, leftBody ); + return; + } else { + // We have an SRID. This could be because of two cases: + // We use $nearSphere with legacy coordinates OR We use $near or $nearSphere with a GeoJSON object + // In both cases can we convert the object to a GeoJSON object + BsonDocument leftBody = new BsonDocument(); + BsonDocument near = new BsonDocument(); + leftBody.put( "$near", near ); + near.put( "$geometry", BsonDocument.parse( filterGeometry.toJson() ) ); + + if ( !minDistance.isInteger() || minDistance.intValue() != -1 ) { + near.put( "$minDistance", BsonUtil.getAsBson( minDistance, minDistance.type, bucket ) ); + } + if ( !maxDistance.isInteger() || maxDistance.intValue() != -1 ) { + near.put( "$maxDistance", BsonUtil.getAsBson( maxDistance, maxDistance.type, bucket ) ); + } + + // Executing this query requires the 2dsphere index to be created on the 'left' field. + attachCondition( null, left, leftBody ); + return; + } + } + + + private void translateGeoIntersects( RexCall node ) { + String left = getParamAsKey( node.operands.get( 0 ) ); + PolyGeometry filterGeometry = getLiteralAs( node, 1, PolyValue::asGeometry ); + BsonDocument geometry = new BsonDocument(); + geometry.put( "$geometry", BsonDocument.parse( filterGeometry.toJson() ) ); + BsonDocument geoWithin = new BsonDocument(); + geoWithin.put( "$geoIntersects", geometry ); + attachCondition( null, left, geoWithin ); + } + + private E getLiteralAs( RexCall node, int pos, Function transformer ) { return transformer.apply( node.operands.get( pos ).unwrapOrThrow( RexLiteral.class ).value ); } diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoRules.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoRules.java index 91b6ac5077..799edbd2c5 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoRules.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoRules.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -27,6 +27,7 @@ import org.polypheny.db.adapter.mongodb.MongoConvention; import org.polypheny.db.adapter.mongodb.MongoEntity; import org.polypheny.db.algebra.AlgCollations; +import org.polypheny.db.algebra.AlgFieldCollation; import org.polypheny.db.algebra.AlgFieldCollation.Direction; import org.polypheny.db.algebra.AlgFieldCollation.NullDirection; import org.polypheny.db.algebra.AlgNode; @@ -255,6 +256,16 @@ public static boolean supports( Sort sort ) { @Override public AlgNode convert( AlgNode alg ) { final Sort sort = (Sort) alg; + + for ( AlgFieldCollation field : sort.collation.getFieldCollations() ) { + // mongodb sorts nulls first for ascending (or desc, nulls last), so this requires expensive precalcs, + if ( field.nullDirection == NullDirection.LAST && field.direction == Direction.ASCENDING ) { + return null; + } else if ( field.direction == Direction.DESCENDING && field.nullDirection == NullDirection.FIRST ) { + return null; + } + } + final AlgTraitSet traitSet = sort.getTraitSet().replace( out ).replace( sort.getCollation() ); return new MongoSort( alg.getCluster(), @@ -423,7 +434,8 @@ public AlgNode convert( AlgNode alg ) { traitSet, convert( project.getInput(), out ), project.includes, - project.excludes ); + project.excludes, + project.adds ); } } @@ -470,6 +482,7 @@ public Void visitCall( RexCall call ) { || call.operands.stream().anyMatch( o -> o.isA( Kind.QUERY ) ) || operator.getOperatorName() == OperatorName.COT || operator.getOperatorName() == OperatorName.TRIM + || operator.getOperatorName() == OperatorName.IS_NULL || operator.getOperatorName() == OperatorName.INITCAP || operator.getOperatorName() == OperatorName.SUBSTRING || operator.getOperatorName() == OperatorName.FLOOR diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoSort.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoSort.java index 0d21c4a23f..7132db6794 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoSort.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoSort.java @@ -33,6 +33,7 @@ import org.polypheny.db.plan.AlgPlanner; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.rex.RexLiteral; +import org.polypheny.db.rex.RexNameRef; import org.polypheny.db.rex.RexNode; import org.polypheny.db.util.Util; @@ -68,9 +69,15 @@ public void implement( Implementor implementor ) { final List keys = new ArrayList<>(); final List fields = getTupleType().getFields(); for ( AlgFieldCollation fieldCollation : collation.getFieldCollations() ) { + String name; + if ( !fieldExps.isEmpty() && fieldExps.size() > fieldCollation.getFieldIndex() && fieldExps.get( fieldCollation.getFieldIndex() ) instanceof RexNameRef rexNameRef ) { + name = rexNameRef.getName(); + } else { + // for relational cases + name = fields.get( fieldCollation.getFieldIndex() ).getName(); + } + // we can only sort by field not by db.collection.field - String name = - fields.get( fieldCollation.getFieldIndex() ).getName(); String[] splits = name.split( "\\." ); name = splits[splits.length - 1]; name = MongoRules.adjustName( name ); diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoToEnumerableConverter.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoToEnumerableConverter.java index ce9f4c9b2d..c3ce68d2ac 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoToEnumerableConverter.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoToEnumerableConverter.java @@ -96,6 +96,7 @@ public Result implement( EnumerableAlgImplementor implementor, Prefer pref ) { Expression enumerable; if ( !mongoImplementor.isDML() ) { + final Expression ddls = list.append( list.newName( "ddls" ), constantArrayList( mongoImplementor.indexAndIndexType, String.class ) ); final Expression logicalCols = list.append( "logical", constantArrayList( @@ -103,7 +104,7 @@ public Result implement( EnumerableAlgImplementor implementor, Prefer pref ) { final Expression preProjects = list.append( "prePro", constantArrayList( mongoImplementor.getPreProjects(), String.class ) ); enumerable = list.append( list.newName( "enumerable" ), - Expressions.call( table, MongoMethod.MONGO_QUERYABLE_AGGREGATE.method, tupleTypes, ops, preProjects, logicalCols ) ); + Expressions.call( table, MongoMethod.MONGO_QUERYABLE_AGGREGATE.method, tupleTypes, ops, preProjects, logicalCols, ddls ) ); } else { final Expression operations = list.append( list.newName( "operations" ), constantArrayList( mongoImplementor.getOperations(), String.class ) ); final Expression operation = list.append( list.newName( "operation" ), Expressions.constant( mongoImplementor.getOperation(), Operation.class ) ); diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/util/RexToMongoTranslator.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/util/RexToMongoTranslator.java index dd88c0553a..8cc6a3c5a6 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/util/RexToMongoTranslator.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/util/RexToMongoTranslator.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/plugins/mql-language/src/main/codegen/DocumentParser.jj b/plugins/mql-language/src/main/codegen/DocumentParser.jj index de3ba704a9..0add2eb73f 100644 --- a/plugins/mql-language/src/main/codegen/DocumentParser.jj +++ b/plugins/mql-language/src/main/codegen/DocumentParser.jj @@ -14,6 +14,7 @@ import org.polypheny.db.runtime.PolyphenyDbContextException; import org.polypheny.db.languages.mql.MqlAggregate; import org.polypheny.db.languages.mql.MqlCount; import org.polypheny.db.languages.mql.MqlCreateCollection; +import org.polypheny.db.languages.mql.MqlCreateIndex; import org.polypheny.db.languages.mql.MqlCreateView; import org.polypheny.db.languages.mql.MqlDelete; import org.polypheny.db.languages.mql.MqlDrop; @@ -182,6 +183,8 @@ TOKEN : /* IDENTIFIERS */ // for nested documentValues check out Document_Splits | < CREATE_COLLECTION : "createCollection(" > | +< CREATE_INDEX : "createIndex(" > +| < RENAME_COLLECTION : "renameCollection(" > | < GET_COLLECTION : "getCollection(" > @@ -475,6 +478,8 @@ MqlNode Collection_Statement( String collection, String namespace ): n = Replace_Statement( collection, namespace ) | n = Rename_Statement( collection, namespace ) + | + n = CreateIndex_Statement( collection, namespace ) | n = Drop_Statement( collection, namespace ) | @@ -547,6 +552,17 @@ MqlNode Rename_Statement(String collection, String namespace): } } +MqlNode CreateIndex_Statement(String collection, String namespace): +{ + BsonDocument options = null; +} +{ + < CREATE_INDEX > options = Document() ")" + { + return new MqlCreateIndex(getPos(), collection, namespace, options); + } +} + MqlNode Drop_Statement(String collection, String namespace): { BsonDocument doc = null; diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/MongoLanguagePlugin.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/MongoLanguagePlugin.java index 0b3ab13366..df8b9d636f 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/MongoLanguagePlugin.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/MongoLanguagePlugin.java @@ -198,6 +198,8 @@ public static void registerOperators() { register( OperatorName.MQL_MERGE, new LangFunctionOperator( OperatorName.MQL_MERGE.name(), Kind.OTHER, PolyType.DOCUMENT ) ); + register( OperatorName.MQL_MERGE_ADD, new LangFunctionOperator( OperatorName.MQL_MERGE_ADD.name(), Kind.OTHER, PolyType.DOCUMENT ) ); + register( OperatorName.MQL_REPLACE_ROOT, new LangFunctionOperator( OperatorName.MQL_REPLACE_ROOT.name(), Kind.OTHER, PolyType.DOCUMENT ) ); register( OperatorName.MQL_PROJECT_INCLUDES, new LangFunctionOperator( OperatorName.MQL_PROJECT_INCLUDES.name(), Kind.OTHER, PolyType.DOCUMENT ) ); @@ -212,6 +214,20 @@ public static void registerOperators() { register( OperatorName.MINUS, new LangFunctionOperator( OperatorName.MINUS.name(), Kind.MINUS, PolyType.DOCUMENT ) ); + // Geospatial Functions + + register( OperatorName.MQL_GEO_INTERSECTS, new LangFunctionOperator( "MQL_GEO_INTERSECTS", Kind.MQL_GEO_INTERSECTS, PolyType.BOOLEAN ) ); + + register( OperatorName.MQL_GEO_WITHIN, new LangFunctionOperator( "MQL_GEO_WITHIN", Kind.MQL_GEO_WITHIN, PolyType.BOOLEAN ) ); + + register( OperatorName.MQL_GEO_DISTANCE, new LangFunctionOperator( "MQL_GEO_DISTANCE", Kind.MQL_GEO_DISTANCE, PolyType.BOOLEAN ) ); + + register( OperatorName.MQL_NEAR, new LangFunctionOperator( "MQL_NEAR", Kind.MQL_NEAR, PolyType.OTHER ) ); + + register( OperatorName.MQL_NEAR_SPHERE, new LangFunctionOperator( "MQL_NEAR_SPHERE", Kind.MQL_NEAR_SPHERE, PolyType.OTHER ) ); + + register( OperatorName.MQL_GEO_NEAR, new LangFunctionOperator( "MQL_GEO_NEAR", Kind.MQL_GEO_NEAR, PolyType.OTHER ) ); + isInit = true; } diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/Mql.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/Mql.java index aecf01f348..18dad57de6 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/Mql.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/Mql.java @@ -32,7 +32,9 @@ public class Mql { Type.DROP_DATABASE, Type.RENAME_COLLECTION, Type.ADD_PLACEMENT, - Type.DELETE_PLACEMENT ); + Type.DELETE_PLACEMENT, + Type.CREATE_INDEX + ); static List DQL = Arrays.asList( Type.SELECT, Type.FIND, @@ -83,6 +85,7 @@ public enum Type { COUNT, CREATE_VIEW, CREATE_COLLECTION, + CREATE_INDEX, ADD_PLACEMENT, DELETE_PLACEMENT, DROP_DATABASE, diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlCreateIndex.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlCreateIndex.java new file mode 100644 index 0000000000..ce25137774 --- /dev/null +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql/MqlCreateIndex.java @@ -0,0 +1,78 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.languages.mql; + +import java.util.Map; +import org.bson.BsonDocument; +import org.jetbrains.annotations.Nullable; +import org.polypheny.db.languages.ParserPos; +import org.polypheny.db.languages.mql.Mql.Type; +import org.polypheny.db.nodes.ExecutableStatement; +import org.polypheny.db.prepare.Context; +import org.polypheny.db.processing.QueryContext.ParsedQueryContext; +import org.polypheny.db.transaction.Statement; +import org.polypheny.db.transaction.locking.Lockable; +import org.polypheny.db.transaction.locking.Lockable.LockType; +import org.polypheny.db.transaction.locking.LockableUtils; + +public class MqlCreateIndex extends MqlNode implements ExecutableStatement { + + // Key = field on which index should be created + // Value = index type 2d or 2dsphere + private final BsonDocument options; + private final String collection; + + + public MqlCreateIndex( ParserPos pos, String collection, String namespace, BsonDocument options ) { + super( pos, namespace ); + this.collection = collection; + this.options = options; + } + + + @Override + public Type getMqlKind() { + return Type.CREATE_INDEX; + } + + + @Override + public String toString() { + return "MqlCreateIndex{" + + "name='" + collection + '\'' + + '}'; + } + + + @Override + public @Nullable String getEntity() { + return collection; + } + + + @Override + public void execute( Context context, Statement statement, ParsedQueryContext parsedQueryContext ) { + throw new UnsupportedOperationException(); + } + + + @Override + public Map deriveLockables( Context context, ParsedQueryContext parsedQueryContext ) { + return LockableUtils.getMapOfNamespaceLockableFromContext( context, parsedQueryContext, LockType.EXCLUSIVE ); + } + +} diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java index edc8372c9f..7e3c84d608 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java @@ -16,6 +16,8 @@ package org.polypheny.db.languages.mql2alg; +import static org.polypheny.db.type.entity.spatial.PolyGeometry.WGS_84; + import java.math.BigDecimal; import java.util.ArrayList; import java.util.Arrays; @@ -27,17 +29,22 @@ import java.util.Map.Entry; import java.util.Objects; import java.util.Optional; +import java.util.UUID; import java.util.function.BiFunction; import java.util.stream.Collectors; import org.bson.BsonArray; import org.bson.BsonBoolean; import org.bson.BsonDocument; +import org.bson.BsonDouble; import org.bson.BsonInt32; import org.bson.BsonNumber; import org.bson.BsonRegularExpression; import org.bson.BsonString; import org.bson.BsonValue; import org.jetbrains.annotations.Nullable; +import org.locationtech.jts.geom.Coordinate; +import org.locationtech.jts.geom.GeometryFactory; +import org.locationtech.jts.geom.PrecisionModel; import org.polypheny.db.algebra.AlgCollation; import org.polypheny.db.algebra.AlgCollations; import org.polypheny.db.algebra.AlgFieldCollation; @@ -104,6 +111,8 @@ import org.polypheny.db.type.entity.numerical.PolyBigDecimal; import org.polypheny.db.type.entity.numerical.PolyDouble; import org.polypheny.db.type.entity.numerical.PolyInteger; +import org.polypheny.db.type.entity.spatial.InvalidGeometryException; +import org.polypheny.db.type.entity.spatial.PolyGeometry; import org.polypheny.db.util.BsonUtil; import org.polypheny.db.util.DateString; import org.polypheny.db.util.Pair; @@ -779,6 +788,9 @@ private AlgNode convertAggregate( MqlAggregate query, AlgDataType rowType, AlgNo case "$replaceWith": node = combineReplaceRoot( value.asDocument().get( "$replaceWith" ), node, true ); break; + case "$geoNear": + node = convertGeoNear( value.asDocument().getDocument( "$geoNear" ), node, rowType ); + break; // todo dl add more pipeline statements default: throw new IllegalStateException( "Unexpected value: " + ((BsonDocument) value).getFirstKey() ); @@ -837,7 +849,8 @@ private AlgNode combineReplaceRoot( BsonValue value, AlgNode node, boolean isWit project = LogicalDocumentProject.create( node, nodes, - List.of() + List.of(), + Map.of() ); } return project; @@ -1131,13 +1144,177 @@ private AlgNode wrapLimit( AlgNode node, int limit ) { } + private AlgNode combineGeoNear( BsonDocument options, AlgNode node, AlgDataType rowType ) { + // 1. (optional) query + if ( options.containsKey( "query" ) ) { + node = combineFilter( options.getDocument( "query" ), node, rowType ); + } + + // 2. Handle conversion from near to $near/$nearSphere, minDistance, maxDistance, spherical + boolean isSpehrical = options.containsKey( "spherical" ) && options.get( "spherical" ).asBoolean().getValue(); + String nearKey = isSpehrical + ? "$nearSphere" + : "$near"; + BsonDocument nearDocument = new BsonDocument(); + BsonValue nearValue = options.get( "near" ); + + if ( nearValue.isArray() ) { + nearDocument.put( nearKey, nearValue ); + + // m for GeoJSON, radians for legacy coordinates + if ( options.containsKey( "minDistance" ) ) { + nearDocument.put( "$minDistance", options.get( "minDistance" ) ); + } + if ( options.containsKey( "maxDistance" ) ) { + nearDocument.put( "$maxDistance", options.get( "maxDistance" ) ); + } + } else if ( nearValue.isDocument() ) { + nearDocument.put( nearKey, new BsonDocument( "$geometry", nearValue ) ); + if ( options.containsKey( "minDistance" ) ) { + nearDocument.getDocument( nearKey ).put( "$minDistance", options.get( "minDistance" ) ); + } + if ( options.containsKey( "maxDistance" ) ) { + nearDocument.getDocument( nearKey ).put( "$maxDistance", options.get( "maxDistance" ) ); + } + } else { + throw new GenericRuntimeException( " Value of near can only be a GeoJSON object or a pair of legacy coordinates. " ); + } + + String inputDistanceField; + if ( options.containsKey( "key" ) ) { + inputDistanceField = options.getString( "key" ).getValue(); + } else { + throw new GenericRuntimeException( "The key option must be set to the field that contains the location. This is necessary, as we currently do not support geospatial indexes." ); + } + + String distanceField = options.getString( "distanceField" ).getValue(); + BsonNumber distanceMultiplier = options.containsKey( "distanceMultiplier" ) ? options.getNumber( "distanceMultiplier" ) : null; + + // TODO: includeLocs + // This option really only makes sense, if we have geospatial indexes, because then, we would not always know + // which index exists on which field. All fields from the input are anyway included in the output, if we manually + // add them here or not (unless they were projected away by the user). + node = combineNear( nearDocument, nearKey, inputDistanceField, distanceField, distanceMultiplier, node, rowType ); + return node; + } + + private AlgNode combineFilter( BsonDocument filter, AlgNode node, AlgDataType rowType ) { RexNode condition = translateDocument( filter, rowType, null ); - return LogicalDocumentFilter.create( node, condition ); } + /** + * This function is used to create the $near or $nearSphere operator in a normal query, as well as the + * $geoNear aggregation stage. + * + * @param options Document that contains the $near or $nearSphere key. + * @param nearOrNearSphere Whether we use spherical ($nearSphere) or flat geometry ($near). + * Attention: We use spherical geometry when the filter geometry is specified as GeoJSON, + * even when using $near. This is because the default for GeoJSON is SRID=4326. + * @param key Which field from the input is used to calculate the distance + * @param distanceField (optional) Which field in the output should store the calculated distance + * @param distanceMultiplier (optional) Factor to multiply the calculated distance. Can be used to convert + * radians to kilometers in a spherical query, by multiplying by the radius of the + * Earth. + */ + private AlgNode combineNear( BsonDocument options, String nearOrNearSphere, String key, String distanceField, BsonNumber distanceMultiplier, AlgNode node, AlgDataType rowType ) { + boolean isSpherical = Objects.equals( nearOrNearSphere, "$nearSphere" ); + boolean keepDistanceField = distanceField != null; + if ( distanceField == null ) { + distanceField = "__temp_%s".formatted( UUID.randomUUID().toString() ); + } + + if ( distanceMultiplier == null ) { + distanceMultiplier = new BsonInt32( 1 ); + } + + // + // Step 1: + // Projection that adds dynamically computed distance field. + Map adds = new HashMap<>(); + List excludes = List.of(); + + BsonValue innerNear = options.get( nearOrNearSphere ); + BsonDocument distanceProjection = new BsonDocument(); + BsonNumber minDistance = null; + BsonNumber maxDistance = null; + + if ( innerNear.isArray() ) { + BsonArray legacyCoordinates = innerNear.asArray(); + if ( options.containsKey( "$maxDistance" ) ) { + maxDistance = options.getNumber( "$maxDistance" ); + } + // Technically not allowed for a $near query, but we need this to support minDistance from the $geoNear stage. + if ( options.containsKey( "$minDistance" ) ) { + minDistance = options.getNumber( "$minDistance" ); + } + distanceProjection.put( "$distance", new BsonArray( List.of( + new BsonString( "$" + key ), + legacyCoordinates, + distanceMultiplier + ) ) ); + } else if ( innerNear.isDocument() ) { + // If the user specifies the geometry using GeoJSON, we use the spherical + // geometry by default, even if we use $near. + isSpherical = true; + + BsonDocument near = innerNear.asDocument(); + BsonDocument geometry = near.getDocument( "$geometry" ); + distanceProjection.put( "$distance", new BsonArray( List.of( + new BsonString( "$" + key ), + geometry + ) ) ); + if ( near.containsKey( "$minDistance" ) ) { + minDistance = near.get( "$minDistance" ).asNumber(); + } + if ( near.containsKey( "$maxDistance" ) ) { + maxDistance = near.get( "$maxDistance" ).asNumber(); + } + } + + adds.put( distanceField, convertDistance( distanceProjection.get( "$distance" ), isSpherical, rowType ) ); + node = LogicalDocumentProject.create( node, Map.of(), excludes, adds ); + node.getTupleType(); + + // + // Step 2: + // (Optional) Filter out by using fields $minDistance and $maxDistance + BsonDocument filterConditions = new BsonDocument(); + if ( minDistance != null ) { + filterConditions.put( "$gte", minDistance ); + } + if ( maxDistance != null ) { + filterConditions.put( "$lte", maxDistance ); + } + if ( !filterConditions.isEmpty() ) { + BsonDocument filterDistance = new BsonDocument( distanceField, filterConditions ); + RexNode distanceCondition = translateDocument( filterDistance, rowType, null ); + node = LogicalDocumentFilter.create( node, distanceCondition ); + node.getTupleType(); + } + + // + // Step 3: + // Sort by distanceField ascending + BsonDocument sortDocument = new BsonDocument( distanceField, new BsonInt32( 1 ) ); + node = combineSort( sortDocument, node, rowType ); + + // + // Step 4: + // Projection to remove field distanceField + if ( !keepDistanceField ) { + BsonDocument removeDistanceProjection = new BsonDocument( distanceField, new BsonInt32( 0 ) ); + List unsetExcludes = new ArrayList<>(); + translateProjection( rowType, false, true, Map.of(), unsetExcludes, removeDistanceProjection ); + node = LogicalDocumentProject.create( node, Map.of(), unsetExcludes, Map.of() ); + } + + return node; + } + + private static PolyValue getPolyValue( BsonValue value ) { switch ( value.getBsonType() ) { case DOUBLE: @@ -1334,6 +1511,43 @@ private RexNode convertMath( String key, String parentKey, BsonValue bsonValue, } + private RexNode convertDistance( BsonValue bsonValue, boolean isSpherical, AlgDataType rowType ) { + BsonArray bsonArray = bsonValue.asArray(); + List operands = new ArrayList<>(); + assert bsonArray.size() == 3; + BsonValue distanceField = bsonArray.get( 0 ); + BsonValue coordinates = bsonArray.get( 1 ); + BsonValue distanceMultiplier = bsonArray.get( 2 ); + + // Reference to field from document + operands.add( getIdentifier( distanceField.asString().getValue().substring( 1 ), rowType ) ); + + PolyGeometry polyGeometry; + if ( coordinates.isDocument() ) { + BsonDocument geometry = coordinates.asDocument(); + try { + polyGeometry = PolyGeometry.fromGeoJson( geometry.toJson() ); + } catch ( InvalidGeometryException e ) { + throw new RuntimeException( e ); + } + } else if ( coordinates.isArray() ) { + GeometryFactory geoFactory = isSpherical + ? new GeometryFactory( new PrecisionModel(), WGS_84 ) + : new GeometryFactory(); + Coordinate point = convertArrayToCoordinate( coordinates.asArray() ); + polyGeometry = new PolyGeometry( geoFactory.createPoint( point ) ); + } else { + throw new GenericRuntimeException( "$near supports either a legacy coordinate pair of the form [x, y] or a $geometry object." ); + } + // Geometry from filter + operands.add( convertGeometry( polyGeometry ) ); + + operands.add( convertLiteral( distanceMultiplier ) ); + + return getFixedCall( operands, OperatorRegistry.get( QueryLanguage.from( "mongo" ), OperatorName.MQL_GEO_DISTANCE ), PolyType.ANY ); + } + + private RexNode convertSingleMath( String key, BsonValue value, AlgDataType rowType ) { Operator op = singleMathOperators.get( key ); if ( value.isArray() ) { @@ -1514,11 +1728,32 @@ private RexNode translateDocument( BsonDocument bsonDocument, AlgDataType rowTyp List operands = new ArrayList<>(); for ( Entry entry : bsonDocument.entrySet() ) { - if ( entry.getKey().equals( "$regex" ) ) { - operands.add( convertRegex( bsonDocument, parentKey, rowType ) ); - } else if ( !entry.getKey().equals( "$options" ) ) { - // normal handling - operands.add( convertEntry( entry.getKey(), parentKey, entry.getValue(), rowType ) ); + switch ( entry.getKey() ) { + case "$regex": + operands.add( convertRegex( bsonDocument, parentKey, rowType ) ); + break; + case "$options": + // Already handled by $regex + break; + case "$geoIntersects": + operands.add( convertGeoIntersects( bsonDocument, parentKey, rowType ) ); + break; + case "$geoWithin": + operands.add( convertGeoWithin( bsonDocument, parentKey, rowType ) ); + break; + case "$near": + operands.add( convertNear( bsonDocument, parentKey, false, rowType ) ); + break; + case "$nearSphere": + operands.add( convertNear( bsonDocument, parentKey, true, rowType ) ); + break; + case "$minDistance", "$maxDistance": + // Already handled by $near or $nearSphere + break; + default: + // normal handling + operands.add( convertEntry( entry.getKey(), parentKey, entry.getValue(), rowType ) ); + break; } } return getFixedCall( operands, OperatorRegistry.get( OperatorName.AND ), PolyType.BOOLEAN ); @@ -1603,6 +1838,319 @@ private RexCall getRegex( String stringRegex, String options, String parentKey, } + private RexNode convertGeoIntersects( BsonValue bson, String parentKey, AlgDataType rowType ) { + // We convert the $geometry object to a PolyGeometry String. + BsonDocument geometry = bson.asDocument().get( "$geoIntersects" ).asDocument().get( "$geometry" ).asDocument(); + PolyGeometry polyGeometry; + try { + polyGeometry = PolyGeometry.fromGeoJson( geometry.toJson() ); + } catch ( InvalidGeometryException e ) { + throw new GenericRuntimeException( "$geometry operand of $geoIntersects could not be parsed as GeoJSON.", e ); + } + + return new RexCall( + cluster.getTypeFactory().createPolyType( PolyType.BOOLEAN ), + OperatorRegistry.get( QueryLanguage.from( MONGO ), OperatorName.MQL_GEO_INTERSECTS ), + Arrays.asList( + getIdentifier( parentKey, rowType ), + convertGeometry( polyGeometry ) + ) ); + } + + + private RexNode convertGeoWithin( BsonValue bson, String parentKey, AlgDataType rowType ) { + // We convert the $geometry object to a PolyGeometry String. + BsonDocument geometry = bson.asDocument().get( "$geoWithin" ).asDocument(); + PolyGeometry polyGeometry = null; + PolyDouble distance = new PolyDouble( -1d ); + + if ( geometry.containsKey( "$geometry" ) ) { + geometry = geometry.get( "$geometry" ).asDocument(); + try { + polyGeometry = PolyGeometry.fromGeoJson( geometry.toJson() ); + } catch ( InvalidGeometryException e ) { + throw new GenericRuntimeException( "$geometry operand of $geoWithin could not be parsed as GeoJSON.", e ); + } + } + + if ( geometry.containsKey( "$box" ) ) { + BsonArray box = geometry.get( "$box" ).asArray(); + Coordinate bottomLeft = convertArrayToCoordinate( box.get( 0 ).asArray() ); + Coordinate topRight = convertArrayToCoordinate( box.get( 1 ).asArray() ); + Coordinate topLeft = new Coordinate( bottomLeft.x, topRight.y ); + Coordinate bottomRight = new Coordinate( topRight.x, bottomLeft.y ); + // Form a closed Ring, starting on the bottom left and going clockwise. + Coordinate[] linearRing = new Coordinate[]{ + bottomLeft, + topLeft, + topRight, + bottomRight, + bottomLeft + }; + GeometryFactory geoFactory = new GeometryFactory(); + polyGeometry = new PolyGeometry( geoFactory.createPolygon( linearRing ) ); + } else if ( geometry.containsKey( "$polygon" ) ) { + BsonArray polygon = geometry.get( "$polygon" ).asArray(); + ArrayList linearRing = new ArrayList<>(); + for ( BsonValue coordinate : polygon ) { + linearRing.add( convertArrayToCoordinate( coordinate.asArray() ) ); + } + GeometryFactory geoFactory = new GeometryFactory(); + polyGeometry = new PolyGeometry( geoFactory.createPolygon( linearRing.toArray( new Coordinate[0] ) ) ); + } else if ( geometry.containsKey( "$center" ) ) { + BsonArray circle = geometry.get( "$center" ).asArray(); + Coordinate center = convertArrayToCoordinate( circle.get( 0 ).asArray() ); + double radius = convertBsonValueToDouble( circle.get( 1 ) ); + distance = new PolyDouble( radius ); + + // As GeoJSON does not define a circle shape, we will create a Point instead. Then we can + // check if the distance between the shape and the point is inside the radius. + GeometryFactory geoFactory = new GeometryFactory(); + polyGeometry = new PolyGeometry( geoFactory.createPoint( center ) ); + } else if ( geometry.containsKey( "$centerSphere" ) ) { + BsonArray circle = geometry.get( "$centerSphere" ).asArray(); + Coordinate center = convertArrayToCoordinate( circle.get( 0 ).asArray() ); + double radius = convertBsonValueToDouble( circle.get( 1 ) ); + distance = new PolyDouble( radius ); + + // As $centerSphere works in the spherical instead of the planar coordinate system, we need + // to use the default WGS84 CRS when creating the shape. + GeometryFactory geoFactory = new GeometryFactory( new PrecisionModel(), WGS_84 ); + // As GeoJSON does not define a circle shape, we will create a Point instead. Then we can + // check if the distance between the shape and the point is inside the radius. + polyGeometry = new PolyGeometry( geoFactory.createPoint( center ) ); + } + + if ( polyGeometry == null ) { + throw new GenericRuntimeException( "$geoWithin arguments needs to be one of the following: $geometry, $box, $polygon, $center, $centerSphere" ); + } + + return new RexCall( + cluster.getTypeFactory().createPolyType( PolyType.BOOLEAN ), + OperatorRegistry.get( QueryLanguage.from( MONGO ), OperatorName.MQL_GEO_WITHIN ), + Arrays.asList( + getIdentifier( parentKey, rowType ), + convertGeometry( polyGeometry ), + // TODO: Possible to have null? + convertLiteral( new BsonDouble( distance.doubleValue() ) ) + ) ); + } + + + private AlgNode convertGeoNear( BsonValue bson, AlgNode node, AlgDataType rowType ) { + if ( !bson.isDocument() ) { + throw new GenericRuntimeException( "$geoNear called without any options" ); + } + BsonDocument options = bson.asDocument(); + + // Required + // The output field that contains the calculated distsance. + BsonString distanceField = null; + PolyGeometry near = null; + + if ( !options.containsKey( "distanceField" ) || !options.containsKey( "near" ) ) { + throw new GenericRuntimeException( "distanceField and near for $geoNear are required." ); + } + + // Optional + // Factor to multiply all computed distances by + BsonDouble distanceMultiplier = new BsonDouble( 1 ); + // Includes a field in the output document with the geometry from the near field. + BsonString includeLocs = new BsonString( "" ); + // Specify which spatial index should be used when calculating distances + BsonString key = new BsonString( "" ); + // Filter: The maximum distance the result can be from the point specified in near + BsonDouble maxDistance = new BsonDouble( -1 ); + // Filter: The minimum distance the result can be from the point specified in near + BsonDouble minDistance = new BsonDouble( -1 ); + // Determines if $nearSphere or $near semantics should be used. Default: $near. + boolean spherical = false; + + distanceField = options.getString( "distanceField" ); + BsonValue bsonNear = options.get( "near" ); + + if ( options.containsKey( "spherical" ) ) { + spherical = options.getBoolean( "spherical" ).getValue(); + } + + if ( bsonNear.isArray() ) { + // When using legacy coordinates, only $minDistance is valid. + GeometryFactory geoFactory = spherical + ? new GeometryFactory( new PrecisionModel(), WGS_84 ) + : new GeometryFactory(); + Coordinate point = convertArrayToCoordinate( bsonNear.asArray() ); + near = new PolyGeometry( geoFactory.createPoint( point ) ); + } else if ( bsonNear.isDocument() ) { + if ( !spherical ) { + throw new GenericRuntimeException( "Cannot use spherical=false when using a GeoJSON object for the near key." ); + } + BsonDocument nearGeoJson = bsonNear.asDocument(); + BsonDocument geometry = nearGeoJson.getDocument( "$geometry" ); + try { + near = PolyGeometry.fromGeoJson( geometry.toJson() ); + } catch ( InvalidGeometryException e ) { + throw new RuntimeException( e ); + } + } else { + throw new GenericRuntimeException( " Value of %s must be either an array or a Document ", key ); + } + + if ( options.containsKey( "includeLocs" ) ) { + includeLocs = options.getString( "includeLocs" ); + } + + if ( options.containsKey( "key" ) ) { + key = options.getString( "key" ); + } + + if ( options.containsKey( "distanceMultiplier" ) ) { + BsonNumber multiplier = options.getNumber( "distanceMultiplier" ); + if ( multiplier.isInt32() ) { + distanceMultiplier = new BsonDouble( multiplier.asInt32().intValue() ); + } else if ( multiplier.isInt64() ) { + distanceMultiplier = new BsonDouble( (double) multiplier.asInt64().longValue() ); + } else if ( multiplier.isDouble() ) { + distanceMultiplier = multiplier.asDouble(); + } else { + throw new GenericRuntimeException( "distanceField must be a number type" ); + } + } + + if ( options.containsKey( "maxDistance" ) ) { + BsonNumber maxDist = options.getNumber( "maxDistance" ); + if ( maxDist.isInt32() ) { + maxDistance = new BsonDouble( maxDist.asInt32().intValue() ); + } else if ( maxDist.isInt64() ) { + maxDistance = new BsonDouble( (double) maxDist.asInt64().longValue() ); + } else if ( maxDist.isDouble() ) { + maxDistance = maxDist.asDouble(); + } else { + throw new GenericRuntimeException( "distanceField must be a number type" ); + } + } + + if ( options.containsKey( "minDistance" ) ) { + BsonNumber minDist = options.getNumber( "minDistance" ); + if ( minDist.isInt32() ) { + minDistance = new BsonDouble( minDist.asInt32().intValue() ); + } else if ( minDist.isInt64() ) { + minDistance = new BsonDouble( (double) minDist.asInt64().longValue() ); + } else if ( minDist.isDouble() ) { + minDistance = minDist.asDouble(); + } else { + throw new GenericRuntimeException( "distanceField must be a number type" ); + } + } + + RexNode query; + if ( options.containsKey( "query" ) ) { + query = translateDocument( options.getDocument( "query" ), rowType, null ); + } else { + // TODO RB: Is there a better way to represent an empty a non-existant RexCall? null is not allowed. + query = convertLiteral( new BsonBoolean( false ) ); + } + + return LogicalDocumentFilter.create( + node, + new RexCall( + cluster.getTypeFactory().createPolyType( PolyType.BOOLEAN ), + OperatorRegistry.get( QueryLanguage.from( MONGO ), OperatorName.MQL_GEO_NEAR ), + Arrays.asList( + // Required + convertGeometry( near ), + getIdentifier( distanceField.getValue(), rowType ), + // Optional + convertLiteral( distanceMultiplier ), + getIdentifier( includeLocs.getValue(), rowType ), + convertLiteral( key ), + convertLiteral( maxDistance ), + convertLiteral( minDistance ), + query + // Spherical is encoded in PolyGeometry as CRS. + ) ) + ); + } + + + private RexNode convertNear( BsonValue bson, String parentKey, boolean isSpherical, AlgDataType rowType ) { + String key = isSpherical ? "$nearSphere" : "$near"; + BsonDocument bsonDocument = bson.asDocument(); + BsonValue innerNear = bsonDocument.get( key ); + + BsonNumber minDistance = new BsonInt32( -1 ); + BsonNumber maxDistance = new BsonInt32( -1 ); + PolyGeometry polyGeometry; + + if ( innerNear.isArray() ) { + // When using legacy coordinates, only $minDistance is valid. + if ( bsonDocument.containsKey( "$maxDistance" ) ) { + maxDistance = bsonDocument.getNumber( "$maxDistance" ); + } + GeometryFactory geoFactory = isSpherical + ? new GeometryFactory( new PrecisionModel(), WGS_84 ) + : new GeometryFactory(); + Coordinate point = convertArrayToCoordinate( innerNear.asArray() ); + polyGeometry = new PolyGeometry( geoFactory.createPoint( point ) ); + } else if ( innerNear.isDocument() ) { + BsonDocument near = innerNear.asDocument(); + BsonDocument geometry = near.getDocument( "$geometry" ); + try { + polyGeometry = PolyGeometry.fromGeoJson( geometry.toJson() ); + } catch ( InvalidGeometryException e ) { + throw new RuntimeException( e ); + } + if ( near.containsKey( "$minDistance" ) ) { + minDistance = near.get( "$minDistance" ).asNumber(); + } + if ( near.containsKey( "$maxDistance" ) ) { + maxDistance = near.get( "$maxDistance" ).asNumber(); + } + } else { + throw new GenericRuntimeException( " Value of %s must be either an array or a Document ", key ); + } + + return new RexCall( + cluster.getTypeFactory().createPolyType( PolyType.BOOLEAN ), + OperatorRegistry.get( QueryLanguage.from( MONGO ), isSpherical ? OperatorName.MQL_NEAR_SPHERE : OperatorName.MQL_NEAR ), + Arrays.asList( + getIdentifier( parentKey, rowType ), + convertGeometry( polyGeometry ), + convertLiteral( minDistance ), + convertLiteral( maxDistance ) + ) ); + } + + + private static Coordinate convertArrayToCoordinate( BsonArray array ) { + if ( array.size() != 2 ) { + throw new GenericRuntimeException( "Coordinates need to be of the form [x,y]" ); + } + double x = convertBsonValueToDouble( array.get( 0 ) ); + double y = convertBsonValueToDouble( array.get( 1 ) ); + return new Coordinate( x, y ); + } + + + private static double convertBsonValueToDouble( BsonValue bsonValue ) { + Double result = null; + if ( bsonValue.isDouble() ) { + result = bsonValue.asDouble().getValue(); + } + if ( bsonValue.isInt32() ) { + int intValue = bsonValue.asInt32().getValue(); + result = (double) intValue; + } + if ( bsonValue.isInt64() ) { + long intValue = bsonValue.asInt64().getValue(); + result = (double) intValue; + } + if ( result == null ) { + throw new GenericRuntimeException( "Legacy Coordinates needs to be of type INTEGER or DOUBLE." ); + } + return result; + } + + /** * Converts an $exists field according to the provided information * @@ -1861,7 +2409,12 @@ private RexCall getStringArray( List elements ) { private RexNode convertLiteral( BsonValue bsonValue ) { Pair valuePair = RexLiteral.convertType( getPolyValue( bsonValue ), new DocumentType() ); return new RexLiteral( valuePair.left, new DocumentType(), valuePair.right ); + } + + private RexNode convertGeometry( PolyGeometry geometry ) { + Pair valuePair = RexLiteral.convertType( geometry, new DocumentType() ); + return new RexLiteral( valuePair.left, new DocumentType(), valuePair.right ); } @@ -1947,7 +2500,7 @@ private AlgNode combineProjection( BsonValue projectionValue, AlgNode node, AlgD } if ( !excludes.isEmpty() ) { - return LogicalDocumentProject.create( node, new HashMap<>(), excludes ); + return LogicalDocumentProject.create( node, new HashMap<>(), excludes, Map.of() ); } else if ( isAddFields ) { List names = new ArrayList<>(); diff --git a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/Neo4jPlugin.java b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/Neo4jPlugin.java index 683f01afcd..86d5640629 100644 --- a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/Neo4jPlugin.java +++ b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/Neo4jPlugin.java @@ -127,7 +127,7 @@ private static String getPhysicalGraphName( long id ) { } - private static String getMappingLabel( long id ) { + static String getMappingLabel( long id ) { return String.format( "___n_%d___", id ); } @@ -421,6 +421,9 @@ private void addCompositeIndex( PolyXid xid, LogicalIndex index, List colu public void restoreTable( AllocationTable alloc, List entities, Context context ) { for ( PhysicalEntity entity : entities ) { updateNamespace( entity.namespaceName, entity.namespaceId ); + + // TODO: If entity is of type PhysicalGraph, unwrap() does not work + // Happens if we restart Polypheny-DB and try to restore the graph from Neo4j. adapterCatalog.addPhysical( alloc, currentNamespace.createEntity( entity, entity.unwrapOrThrow( PhysicalTable.class ).columns, currentNamespace ) ); } } @@ -430,7 +433,7 @@ public void restoreTable( AllocationTable alloc, List entities, public void restoreGraph( AllocationGraph alloc, List entities, Context context ) { for ( PhysicalEntity entity : entities ) { updateNamespace( entity.namespaceName, entity.namespaceId ); - adapterCatalog.addPhysical( alloc, currentNamespace.createEntity( entity, List.of(), currentNamespace ) ); + adapterCatalog.addPhysical( alloc, currentNamespace.createGraph( entity, List.of(), this.db, this ) ); } } diff --git a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoNamespace.java b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoNamespace.java index 7a8b1b24ea..4fa7d7c819 100644 --- a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoNamespace.java +++ b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/NeoNamespace.java @@ -16,6 +16,8 @@ package org.polypheny.db.adapter.neo4j; +import static org.polypheny.db.adapter.neo4j.Neo4jPlugin.getMappingLabel; + import java.util.List; import lombok.EqualsAndHashCode; import lombok.Value; @@ -68,6 +70,11 @@ public NeoEntity createEntity( PhysicalEntity entity, List fields, Driver db, Neo4jStore store ) { + return new NeoGraph( entity, fields, this.transactionProvider, db, getMappingLabel( entity.id ), store ); + } + + @Override public Expression asExpression() { return null; diff --git a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/graph/NeoLpgAggregate.java b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/graph/NeoLpgAggregate.java index 3f716372e3..a538930f92 100644 --- a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/graph/NeoLpgAggregate.java +++ b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/graph/NeoLpgAggregate.java @@ -96,7 +96,7 @@ public void implement( NeoGraphImplementor implementor ) { i = currentNames.indexOf( agg.function.getOperatorName().name() ); } - finalRow.set( i, Objects.requireNonNull( NeoUtil.getOpAsNeo( agg.function.getOperatorName(), List.of(), null ) ).apply( refs ) ); + finalRow.set( i, Objects.requireNonNull( NeoUtil.getOpAsNeo( agg.function.getOperatorName(), List.of(), null, List.of() ) ).apply( refs ) ); } implementor.add( with_( list_( finalRow.stream().map( e -> literal_( PolyString.of( e ) ) ).toList() ) ) ); diff --git a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/graph/NeoLpgModify.java b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/graph/NeoLpgModify.java index ca687403e9..9ea6ca8323 100644 --- a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/graph/NeoLpgModify.java +++ b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/rules/graph/NeoLpgModify.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -106,7 +106,6 @@ private void handleInsert( NeoGraphImplementor implementor ) { return; } else { // normal values - } } else { if ( !implementor.statements.isEmpty() ) { diff --git a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/types/NestedListType.java b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/types/NestedListType.java index 9c80890770..e9349a00dd 100644 --- a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/types/NestedListType.java +++ b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/types/NestedListType.java @@ -33,9 +33,26 @@ public class NestedListType extends NestedPolyType { @Getter public PolyType type; + public List names; + + public List types; + public NestedListType( PolyType type, List names, List types ) { + this.type = type; + this.names = names; + this.types = types; + } + + + public NestedListType( PolyType type, List types ) { + this.type = type; + this.types = types; + this.names = types.stream().map( t -> (String) null ).toList(); + } + + @Override public boolean isList() { return true; @@ -50,7 +67,9 @@ public NestedListType asList() { @Override public Expression asExpression() { - return Expressions.new_( NestedListType.class, Expressions.constant( type ), EnumUtils.constantArrayList( types.stream().map( Expressible::asExpression ).toList(), NestedPolyType.class ) ); + Expression tExpression = EnumUtils.constantArrayList( types.stream().map( Expressible::asExpression ).toList(), NestedPolyType.class ); + Expression nExpression = EnumUtils.constantArrayList( names.stream().map( Expressions::constant ).toList(), String.class ); + return Expressions.new_( NestedListType.class, Expressions.constant( type ), nExpression, tExpression ); } } diff --git a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/types/NestedPolyType.java b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/types/NestedPolyType.java index 44ddefb3ce..93e3e2ec4d 100644 --- a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/types/NestedPolyType.java +++ b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/types/NestedPolyType.java @@ -27,7 +27,10 @@ public abstract class NestedPolyType implements Expressible { public static NestedPolyType from( AlgDataType rowType ) { if ( rowType.isStruct() ) { - return new NestedListType( rowType.getPolyType(), rowType.getFields().stream().map( NestedPolyType::from ).toList() ); + return new NestedListType( + rowType.getPolyType(), + rowType.getFields().stream().map( AlgDataTypeField::getName ).toList(), + rowType.getFields().stream().map( NestedPolyType::from ).toList() ); } if ( rowType instanceof ArrayType type ) { diff --git a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/util/NeoStatements.java b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/util/NeoStatements.java index 26898f9e80..143937df07 100644 --- a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/util/NeoStatements.java +++ b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/util/NeoStatements.java @@ -39,6 +39,7 @@ import org.polypheny.db.type.entity.graph.PolyEdge.EdgeDirection; import org.polypheny.db.type.entity.graph.PolyNode; import org.polypheny.db.type.entity.graph.PolyPath; +import org.polypheny.db.type.entity.spatial.PolyPoint; /** * Helper classes, which are used to create cypher queries with a object representation. @@ -451,6 +452,23 @@ static NeoStatement _literalOrString( PolyValue value ) { return string_( value ); } else if ( value.isList() ) { return literal_( PolyString.of( String.format( "[%s]", value.asList().stream().map( value1 -> _literalOrString( (PolyValue) value1 ).build() ).collect( Collectors.joining( ", " ) ) ) ) ); + } else if ( value.isGeometry() ) { + // Neo4j only supports PolyGeometry of type Point natively. We could choose to convert PolyGeometry + // to GeoJSON or WKT, but then the native Neo4j internal methods would no longer be able to work with + // the value. + assert value.asGeometry().isPoint() : "Neo4j only supports Point geometries natively"; + PolyPoint point = value.asGeometry().asPoint(); + int dimensions = Double.isNaN( point.getZ() ) ? 2 : 3; + String pointValues = switch ( point.getSRID() ) { + case 0 -> dimensions == 2 + ? "x: " + point.getX() + " , y: " + point.getY() + : "x: " + point.getX() + " , y: " + point.getY() + ", z: " + point.getZ(); + case 4326 -> "longitude: " + point.getX() + " , latitude: " + point.getY(); + case 4979 -> "longitude: " + point.getX() + " , latitude: " + point.getY() + " , height: " + point.getZ(); + default -> throw new IllegalArgumentException( "Unsupported SRID: " + point.getSRID() ); + }; + String pointString = "point({" + pointValues + "})"; + return literal_( PolyString.of( pointString ) ); } else { return literal_( value ); } diff --git a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/util/NeoUtil.java b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/util/NeoUtil.java index a2e16464f1..b056755451 100644 --- a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/util/NeoUtil.java +++ b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/util/NeoUtil.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -31,14 +31,21 @@ import lombok.NonNull; import org.apache.calcite.linq4j.function.Function1; import org.apache.commons.lang3.NotImplementedException; +import org.locationtech.jts.geom.Coordinate; +import org.locationtech.jts.geom.GeometryFactory; +import org.locationtech.jts.geom.PrecisionModel; import org.neo4j.driver.Record; import org.neo4j.driver.Value; +import org.neo4j.driver.internal.InternalPath; +import org.neo4j.driver.internal.InternalPoint2D; import org.neo4j.driver.internal.value.FloatValue; import org.neo4j.driver.internal.value.IntegerValue; import org.neo4j.driver.internal.value.ListValue; +import org.neo4j.driver.internal.value.PointValue; import org.neo4j.driver.internal.value.StringValue; import org.neo4j.driver.types.Node; import org.neo4j.driver.types.Path; +import org.neo4j.driver.types.Point; import org.neo4j.driver.types.Relationship; import org.polypheny.db.adapter.neo4j.types.NestedPolyType; import org.polypheny.db.algebra.constant.Kind; @@ -46,8 +53,10 @@ import org.polypheny.db.algebra.core.Project; import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.catalog.exceptions.GenericRuntimeException; import org.polypheny.db.rex.RexCall; +import org.polypheny.db.rex.RexIndexRef; import org.polypheny.db.rex.RexLiteral; import org.polypheny.db.rex.RexNode; import org.polypheny.db.rex.RexVisitorImpl; @@ -71,7 +80,9 @@ import org.polypheny.db.type.entity.numerical.PolyDouble; import org.polypheny.db.type.entity.numerical.PolyFloat; import org.polypheny.db.type.entity.numerical.PolyInteger; +import org.polypheny.db.type.entity.spatial.InvalidGeometryException; import org.polypheny.db.type.entity.spatial.PolyGeometry; +import org.polypheny.db.type.entity.spatial.PolyPoint; import org.polypheny.db.type.entity.temporal.PolyDate; import org.polypheny.db.type.entity.temporal.PolyTime; import org.polypheny.db.type.entity.temporal.PolyTimestamp; @@ -120,29 +131,64 @@ static Function1 getUnnullableTypeFunction( NestedPolyType typ case GRAPH -> o -> (PolyValue) o; case NODE -> o -> asPolyNode( o.asNode() ); case EDGE -> o -> asPolyEdge( o.asRelationship() ); - case PATH -> o -> asPolyPath( o.asPath() ); - case GEOMETRY -> o -> PolyGeometry.of( o.asString() ); + case PATH -> o -> { + + if ( o instanceof ListValue ) { + return asPolyPath( ((InternalPath) o.asList().get( 0 )), type.asList().names ); + } + return asPolyPath( o.asPath(), type.asList().names ); + }; + case GEOMETRY -> NeoUtil::asPolyGeometry; default -> throw new GenericRuntimeException( String.format( "Object of type %s was not transformable.", type ) ); }; } - static PolyPath asPolyPath( Path path ) { + static PolyGeometry asPolyGeometry( Value value ) { + if ( value instanceof PointValue pointValue ) { + Point point = pointValue.asPoint(); + + // These are the only real SRIDs that Neo4j uses, the others are only used + // internally for the cartesian 2D / 3D case, which we will represent as 0. + int srid = 0; + if ( point.srid() == 4326 || point.srid() == 4979 ) { + srid = point.srid(); + } + + GeometryFactory geometryFactory = new GeometryFactory( new PrecisionModel(), srid ); + Coordinate coordinate = new Coordinate( point.x(), point.y() ); + if ( !Double.isNaN( point.z() ) ) { + coordinate.setZ( point.z() ); + } + return PolyGeometry.of( geometryFactory.createPoint( coordinate ) ); + } else if ( value instanceof StringValue stringValue ) { + try { + return new PolyGeometry( stringValue.asString() ); + } catch ( InvalidGeometryException e ) { + throw new RuntimeException( e ); + } + } + + throw new GenericRuntimeException( String.format( "Could not transform object of type %s to PolyGeometry", value.type() ) ); + } + + static PolyPath asPolyPath( Path path, List names ) { Iterator nodeIter = path.nodes().iterator(); Iterator edgeIter = path.relationships().iterator(); - List nodes = new ArrayList<>(); - List edges = new ArrayList<>(); + Iterator nameIter = names.iterator(); + List> nodes = new ArrayList<>(); + List> edges = new ArrayList<>(); while ( nodeIter.hasNext() ) { - nodes.add( asPolyNode( nodeIter.next() ) ); + nodes.add( Pair.of( nameIter.hasNext() ? PolyString.of( nameIter.next() ) : null, asPolyNode( nodeIter.next() ) ) ); if ( nodeIter.hasNext() ) { - edges.add( asPolyEdge( edgeIter.next() ) ); + edges.add( Pair.of( null, asPolyEdge( edgeIter.next() ) ) ); } } return PolyPath.create( - nodes.stream().map( n -> Pair.of( (PolyString) null, n ) ).toList(), - edges.stream().map( e -> Pair.of( (PolyString) null, e ) ).toList() ); + nodes, + edges ); } static PolyNode asPolyNode( Node node ) { @@ -185,8 +231,11 @@ static PolyValue asPolyValue( @NonNull Value value ) { return PolyString.of( value.asString() ); } else if ( value instanceof FloatValue ) { return PolyString.of( String.valueOf( value.asDouble() ) ); + } else if ( value instanceof ListValue ) { return new PolyList<>( value.asList( NeoUtil::getComparableOrString ) ); + } else if ( value instanceof PointValue ) { + return asPolyGeometry( value ); } throw new NotImplementedException( "Type not supported" ); } @@ -264,7 +313,7 @@ static String rexAsString( RexLiteral literal, String mappingLabel, boolean isLi } - static Function1, String> getOpAsNeo( OperatorName operatorName, List operands, AlgDataType returnType ) { + static Function1, String> getOpAsNeo( OperatorName operatorName, List operands, AlgDataType returnType, List beforeFields ) { return switch ( operatorName ) { case AND -> o -> o.stream().map( e -> String.format( "(%s)", e ) ).collect( Collectors.joining( " AND " ) ); case DIVIDE -> handleDivide( operatorName, operands, returnType ); @@ -343,6 +392,9 @@ static Function1, String> getOpAsNeo( OperatorName operatorName, Li case AVG -> o -> String.format( "avg(%s)", o.get( 0 ) ); case MIN -> o -> String.format( "min(%s)", o.get( 0 ) ); case MAX -> o -> String.format( "max(%s)", o.get( 0 ) ); + case CYPHER_POINT -> handlePoint( operands, returnType, beforeFields ); + case DISTANCE_NEO4J -> o -> String.format( "distance(%s, %s)", o.get( 0 ), o.get( 1 ) ); + case CYPHER_WITHIN_BBOX -> o -> String.format( "point.withinBBox(%s, %s, %s)", o.get( 0 ), o.get( 1 ), o.get( 2 ) ); default -> null; }; @@ -359,6 +411,56 @@ private static Function1, String> handleCast( List operand return o -> o.get( 0 ); } + static Function1, String> handlePoint( List operands, AlgDataType returnType, List beforeFields ) { + // return point( { argName1: argValue1, argName2: argValue2 } ) + List arguments = new ArrayList<>(); + for ( int i = 0; i < operands.size(); i += 2 ) { + RexNode argName = operands.get( i ); + RexNode argValue = operands.get( i + 1 ); + + if ( argName instanceof RexLiteral argNameLiteral && argValue instanceof RexLiteral argValueLiteral ) { + if ( argNameLiteral.value == null ) { + // Unknown value, we are done. + break; + } + String arg = argNameLiteral.value.toString() + ":" + argValueLiteral.value.toString(); + arguments.add( arg ); + } + + if ( argName instanceof RexLiteral argNameLiteral && + argValue instanceof RexCall cypherExtractProperty && + cypherExtractProperty.getOperator().getOperatorName() == OperatorName.CYPHER_EXTRACT_PROPERTY ) { + if ( argNameLiteral.value == null ) { + // Unknown value, we are done. + break; + } + + ArrayList cypherExtractPropertyOperands = new ArrayList( 2 ); + for ( int operandIndex = 0; operandIndex < cypherExtractProperty.operands.size(); operandIndex++ ) { + RexNode operand = cypherExtractProperty.operands.get( operandIndex ); + + if ( operand instanceof RexIndexRef rexIndexRef && !beforeFields.isEmpty() ) { + for ( AlgDataTypeField field : beforeFields ) { + if ( field.getIndex() == rexIndexRef.getIndex() ) { + cypherExtractPropertyOperands.add( field.getName() ); + break; + } + } + } else { + cypherExtractPropertyOperands.add( maybeUnquote( operand.toString() ) ); + } + } + String path = String.join( ".", cypherExtractPropertyOperands ); + + String arg = argNameLiteral.value.toString() + ":" + path; + arguments.add( arg ); + } + + } + + return o -> "point({" + String.join( ",", arguments ) + "})"; + } + static Function1, String> handleDivide( OperatorName operatorName, List operands, AlgDataType returnType ) { if ( PolyType.APPROX_TYPES.contains( returnType.getPolyType() ) ) { return o -> String.format( "toFloat(%s) / %s", o.get( 0 ), o.get( 1 ) ); @@ -447,7 +549,14 @@ static Object fixParameterValue( PolyValue value, NestedPolyType type, boolean i case BINARY, VARBINARY, FILE, IMAGE, VIDEO, AUDIO -> value.asBinary().value; case FLOAT, REAL, DOUBLE -> value.asNumber().doubleValue(); case DECIMAL -> value.asNumber().bigDecimalValue(); - case GEOMETRY -> value.asGeometry().toWKT(); + case GEOMETRY -> { + if ( value.asGeometry().isPoint() ) { + PolyPoint point = value.asGeometry().asPoint(); + yield new PointValue( new InternalPoint2D( point.getSRID(), point.getX(), point.getY() ) ); + } else { + yield value.asGeometry().toWKT(); + } + } case ARRAY -> value.asList().value.stream().map( e -> { if ( isNested ) { return e.toTypedJson(); @@ -472,7 +581,7 @@ public NeoSupportVisitor() { @Override public Void visitCall( RexCall call ) { - if ( NeoUtil.getOpAsNeo( call.op.getOperatorName(), call.operands, call.type ) == null ) { + if ( NeoUtil.getOpAsNeo( call.op.getOperatorName(), call.operands, call.type, List.of() ) == null ) { supports = false; } return super.visitCall( call ); diff --git a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/util/Translator.java b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/util/Translator.java index 5a80269e18..5375b6258a 100644 --- a/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/util/Translator.java +++ b/plugins/neo4j-adapter/src/main/java/org/polypheny/db/adapter/neo4j/util/Translator.java @@ -168,7 +168,7 @@ public String visitCall( RexCall call ) { private String getFinalFunction( RexCall call, List ops ) { - Function1, String> getter = NeoUtil.getOpAsNeo( call.op.getOperatorName(), call.operands, call.type ); + Function1, String> getter = NeoUtil.getOpAsNeo( call.op.getOperatorName(), call.operands, call.type, this.beforeFields ); assert getter != null : "Function is not supported by the Neo4j adapter."; String adjusted = getter.apply( IntStream.range( 0, ops.size() ).mapToObj( i -> call.operands.get( i ).getType().getPolyType() == PolyType.DECIMAL ? "toFloat(" + ops.get( i ) + ")" : ops.get( i ) ).toList() ); if ( useBrackets ) { diff --git a/plugins/prism-interface/src/main/java/org/polypheny/db/prisminterface/PIServer.java b/plugins/prism-interface/src/main/java/org/polypheny/db/prisminterface/PIServer.java index 77e46ae6fe..03a0c7338c 100644 --- a/plugins/prism-interface/src/main/java/org/polypheny/db/prisminterface/PIServer.java +++ b/plugins/prism-interface/src/main/java/org/polypheny/db/prisminterface/PIServer.java @@ -19,7 +19,6 @@ import java.io.EOFException; import java.io.File; import java.io.IOException; -import java.net.Inet4Address; import java.net.InetSocketAddress; import java.net.StandardProtocolFamily; import java.net.UnixDomainSocketAddress; diff --git a/plugins/prism-interface/src/main/java/org/polypheny/db/prisminterface/utils/PrismUtils.java b/plugins/prism-interface/src/main/java/org/polypheny/db/prisminterface/utils/PrismUtils.java index 93fed4fb95..e74be30c1b 100644 --- a/plugins/prism-interface/src/main/java/org/polypheny/db/prisminterface/utils/PrismUtils.java +++ b/plugins/prism-interface/src/main/java/org/polypheny/db/prisminterface/utils/PrismUtils.java @@ -23,7 +23,6 @@ import org.polypheny.db.type.entity.PolyString; import org.polypheny.db.type.entity.PolyValue; import org.polypheny.db.type.entity.document.PolyDocument; -import org.polypheny.db.type.entity.graph.PolyDictionary; import org.polypheny.prism.ColumnMeta; import org.polypheny.prism.DocumentFrame; import org.polypheny.prism.Frame; @@ -102,11 +101,11 @@ public static Frame buildRelationalFrame( boolean isLast, List> public static Frame buildDocumentFrame( boolean isLast, List data ) { // ToDo: fix me: update counts are sometimes returned as normal results instead of scalar ones. - if (data.size() == 1 && data.get(0).isLong()) { - data = List.of(new PolyDocument( + if ( data.size() == 1 && data.get( 0 ).isLong() ) { + data = List.of( new PolyDocument( new PolyString( "updateCount" ), - data.get(0) - )); + data.get( 0 ) + ) ); } List documents = data.stream() diff --git a/plugins/sql-language/src/main/codegen/Parser.jj b/plugins/sql-language/src/main/codegen/Parser.jj index 62e3cef494..0500138fb9 100644 --- a/plugins/sql-language/src/main/codegen/Parser.jj +++ b/plugins/sql-language/src/main/codegen/Parser.jj @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/plugins/workflow-engine/src/main/java/org/polypheny/db/workflow/dag/activities/impl/transform/DocAggregateActivity.java b/plugins/workflow-engine/src/main/java/org/polypheny/db/workflow/dag/activities/impl/transform/DocAggregateActivity.java index 3fbbbef261..c4eaa56cbe 100644 --- a/plugins/workflow-engine/src/main/java/org/polypheny/db/workflow/dag/activities/impl/transform/DocAggregateActivity.java +++ b/plugins/workflow-engine/src/main/java/org/polypheny/db/workflow/dag/activities/impl/transform/DocAggregateActivity.java @@ -103,7 +103,7 @@ public AlgNode fuse( List inputs, Settings settings, AlgCluster cluster for ( LaxAggregateCall aggCall : aggCalls ) { includes.put( aggCall.name, ActivityUtils.getDocRexNameRef( aggCall.name, 0 ) ); } - return LogicalDocumentProject.create( aggNode, includes, List.of() ); + return LogicalDocumentProject.create( aggNode, includes, List.of(), Map.of() ); } diff --git a/plugins/workflow-engine/src/main/java/org/polypheny/db/workflow/dag/activities/impl/transform/DocSelectFieldsActivity.java b/plugins/workflow-engine/src/main/java/org/polypheny/db/workflow/dag/activities/impl/transform/DocSelectFieldsActivity.java index 2a674ff638..447aa16e62 100644 --- a/plugins/workflow-engine/src/main/java/org/polypheny/db/workflow/dag/activities/impl/transform/DocSelectFieldsActivity.java +++ b/plugins/workflow-engine/src/main/java/org/polypheny/db/workflow/dag/activities/impl/transform/DocSelectFieldsActivity.java @@ -139,7 +139,7 @@ public AlgNode fuse( List inputs, Settings settings, AlgCluster cluster AlgDataType type = getDocType(); FieldSelectValue setting = settings.get( "fields", FieldSelectValue.class ); if ( setting.includeUnspecified() ) { - return LogicalDocumentProject.create( inputs.get( 0 ), Map.of(), setting.getExclude() ); + return LogicalDocumentProject.create( inputs.get( 0 ), new HashMap<>(), setting.getExclude(), new HashMap<>() ); } List fields = setting.getInclude(); @@ -147,7 +147,7 @@ public AlgNode fuse( List inputs, Settings settings, AlgCluster cluster field -> field, field -> RexNameRef.create( List.of( field.split( "\\." ) ), null, type ) ) ); - return LogicalDocumentProject.create( inputs.get( 0 ), nameRefs, setting.getExclude() ); + return LogicalDocumentProject.create( inputs.get( 0 ), nameRefs, setting.getExclude(), new HashMap<>() ); } diff --git a/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java b/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java index 2f9e266db1..f8c6596c1b 100644 --- a/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java +++ b/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java @@ -224,7 +224,7 @@ public static void commitAndFinish( List executedContexts, Quer boolean lastRolledBack = false; for ( Result result : results ) { - if (result.xid != null ) { + if ( result.xid != null ) { lastRolledBack = abortedXids.contains( result.xid ); } result.isRolledBack = lastRolledBack; @@ -474,9 +474,11 @@ public static List computeResultData( final List> rows boolean hasMoreRows = context.getIterator().hasMoreRows(); + String[] normalizedData = data.stream().map( d -> d.get( 0 ).toJson() ).toArray( String[]::new ); + return DocResult.builder() .header( new FieldDefinition[]{ FieldDefinition.builder().name( "Document" ).dataType( DataModel.DOCUMENT.name() ).build() } ) - .data( data.stream().map( d -> d.get( 0 ).toJson() ).toArray( String[]::new ) ) + .data( normalizedData ) .query( context.getQuery().getQuery() ) .language( context.getQuery().getLanguage() ) .queryType( QueryType.from( context.getImplementation().getKind() ) )