From b0e57a0f71fd918e76b15dbb6769d9b2f015975c Mon Sep 17 00:00:00 2001 From: Martin Vahlensieck Date: Thu, 3 Oct 2024 14:45:12 +0200 Subject: [PATCH 1/5] Improve error message --- .../org/polypheny/simpleclient/executor/OltpBenchExecutor.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/org/polypheny/simpleclient/executor/OltpBenchExecutor.java b/src/main/java/org/polypheny/simpleclient/executor/OltpBenchExecutor.java index f9df34a..02f2ba5 100644 --- a/src/main/java/org/polypheny/simpleclient/executor/OltpBenchExecutor.java +++ b/src/main/java/org/polypheny/simpleclient/executor/OltpBenchExecutor.java @@ -63,7 +63,7 @@ public abstract class OltpBenchExecutor implements Executor { } else if ( SystemUtils.IS_OS_MAC ) { OLTPBENCH_RELEASE_URL = "https://github.com/polypheny/OLTPBench/releases/download/v1.2.1/oltpbench-polypheny-1.2.1-jdk11-mac64.zip"; } else { - throw new RuntimeException( "Unknown OS: " + SystemUtils.OS_NAME ); + throw new RuntimeException( "Unsupported OS: " + SystemUtils.OS_NAME ); } FILE_NAME = OLTPBENCH_RELEASE_URL.substring( OLTPBENCH_RELEASE_URL.lastIndexOf( '/' ) + 1 ); CLIENT_DIR = System.getProperty( "user.home" ) + File.separator + ".polypheny" + File.separator + "client" + File.separator; From 4483f46540d1f99843e0e8402e2d44455f64cfcf Mon Sep 17 00:00:00 2001 From: Martin Vahlensieck Date: Thu, 5 Dec 2024 17:37:16 +0100 Subject: [PATCH 2/5] Fix various lints --- .../cli/AbstractOltpBenchCommand.java | 8 +-- .../simpleclient/cli/ComsCommand.java | 31 +++------ .../simpleclient/cli/DocBenchCommand.java | 10 +-- .../simpleclient/cli/DumpCommand.java | 2 +- .../simpleclient/cli/GavelCommand.java | 8 +-- .../simpleclient/cli/GraphCommand.java | 10 +-- .../simpleclient/cli/KnnCommand.java | 10 +-- .../simpleclient/cli/MultiBenchCommand.java | 12 ++-- .../simpleclient/cli/MultimediaCommand.java | 12 ++-- .../executor/CottontaildbExecutor.java | 64 +++++++++---------- .../simpleclient/executor/JdbcExecutor.java | 8 +-- .../executor/PolyphenyDbExecutor.java | 13 ++-- .../executor/PolyphenyDbHttpExecutor.java | 8 +-- .../executor/PolyphenyDbJdbcExecutor.java | 2 +- .../executor/PolyphenyDbMongoQlExecutor.java | 8 +-- .../executor/PolyphenyDbRestExecutor.java | 24 +++---- .../executor/SurrealDBExecutor.java | 2 +- .../simpleclient/main/ChronosAgent.java | 32 +++++----- .../simpleclient/main/ProgressReporter.java | 2 +- .../simpleclient/query/CottontailQuery.java | 11 +--- .../polypheny/simpleclient/query/Query.java | 2 +- .../simpleclient/query/QueryListEntry.java | 2 +- .../simpleclient/query/RawQuery.java | 2 +- .../scenario/EvaluationThread.java | 2 +- .../simpleclient/scenario/Scenario.java | 2 +- .../simpleclient/scenario/coms/Coms.java | 15 +---- .../scenario/coms/SchemaGenerator.java | 6 +- .../coms/simulation/NetworkGenerator.java | 2 +- .../scenario/docbench/DocBench.java | 8 +-- .../simpleclient/scenario/gavel/Gavel.java | 8 +-- .../gavel/queryBuilder/InsertCategory.java | 4 +- .../scenario/graph/DataGenerator.java | 2 +- .../scenario/graph/GraphBench.java | 4 +- .../scenario/knnbench/KnnBench.java | 6 +- .../queryBuilder/InsertIntFeature.java | 2 +- .../knnbench/queryBuilder/InsertMetadata.java | 2 +- .../queryBuilder/InsertRealFeature.java | 2 +- .../scenario/multibench/MultiBench.java | 13 ++-- .../scenario/multibench/MultiBenchConfig.java | 4 +- .../scenario/multimedia/DataGenerator.java | 8 +-- .../scenario/multimedia/MediaGenerator.java | 2 +- .../scenario/multimedia/MultimediaBench.java | 6 +- .../oltpbench/AbstractOltpBenchScenario.java | 4 +- .../scenario/oltpbench/tpcc/Tpcc.java | 2 +- .../scenario/oltpbench/ycsb/Ycsb.java | 2 +- 45 files changed, 171 insertions(+), 218 deletions(-) diff --git a/src/main/java/org/polypheny/simpleclient/cli/AbstractOltpBenchCommand.java b/src/main/java/org/polypheny/simpleclient/cli/AbstractOltpBenchCommand.java index e1a6fd6..0215fc2 100644 --- a/src/main/java/org/polypheny/simpleclient/cli/AbstractOltpBenchCommand.java +++ b/src/main/java/org/polypheny/simpleclient/cli/AbstractOltpBenchCommand.java @@ -73,14 +73,14 @@ public int run() throws SQLException { }; try { - if ( args.get( 0 ).equalsIgnoreCase( "data" ) ) { + if ( args.getFirst().equalsIgnoreCase( "data" ) ) { data( executorFactory, multiplier ); - } else if ( args.get( 0 ).equalsIgnoreCase( "workload" ) ) { + } else if ( args.getFirst().equalsIgnoreCase( "workload" ) ) { workload( executorFactory, multiplier ); - } else if ( args.get( 0 ).equalsIgnoreCase( "schema" ) ) { + } else if ( args.getFirst().equalsIgnoreCase( "schema" ) ) { schema( executorFactory ); } else { - System.err.println( "Unknown task: " + args.get( 0 ) ); + System.err.println( "Unknown task: " + args.getFirst() ); } } catch ( Throwable t ) { log.error( "Exception while executing OltpBench!", t ); diff --git a/src/main/java/org/polypheny/simpleclient/cli/ComsCommand.java b/src/main/java/org/polypheny/simpleclient/cli/ComsCommand.java index deb6312..351964d 100644 --- a/src/main/java/org/polypheny/simpleclient/cli/ComsCommand.java +++ b/src/main/java/org/polypheny/simpleclient/cli/ComsCommand.java @@ -95,7 +95,7 @@ public int run() throws SQLException { ExecutorFactory executorFactory; try { - switch ( args.get( 0 ).toLowerCase() ) { + switch ( args.getFirst().toLowerCase() ) { case "schema": executorFactory = getExecutorFactory( true ); ComsScenario.schema( executorFactory, config ); @@ -113,7 +113,7 @@ public int run() throws SQLException { ComsScenario.workload( executorFactory, config, multiplier, writeCsv ); break; default: - System.err.println( "Unknown task: " + args.get( 0 ) ); + System.err.println( "Unknown task: " + args.getFirst() ); } } catch ( Throwable t ) { @@ -132,25 +132,14 @@ public int run() throws SQLException { private ExecutorFactory getExecutorFactory( boolean createDocker ) { - ExecutorFactory executorFactory; - switch ( config.mode ) { - - case POLYPHENY: - executorFactory = new PolyphenyDbMultiExecutorFactory( polyphenyDbHost ); - break; - case NATIVE: - executorFactory = new MultiExecutorFactory( - new PostgresExecutor.PostgresExecutorFactory( postgres, false ), - new NeoExecutorFactory( neo4j ), - new MongoQlExecutorFactory( mongoDB ) ); - break; - case SURREALDB: - executorFactory = new SurrealDBExecutorFactory( surrealHost, "8989", createDocker ); - break; - default: - throw new IllegalArgumentException(); - } - return executorFactory; + return switch ( config.mode ) { + case POLYPHENY -> new PolyphenyDbMultiExecutorFactory( polyphenyDbHost ); + case NATIVE -> new MultiExecutorFactory( + new PostgresExecutor.PostgresExecutorFactory( postgres, false ), + new NeoExecutorFactory( neo4j ), + new MongoQlExecutorFactory( mongoDB ) ); + case SURREALDB -> new SurrealDBExecutorFactory( surrealHost, "8989", createDocker ); + }; } diff --git a/src/main/java/org/polypheny/simpleclient/cli/DocBenchCommand.java b/src/main/java/org/polypheny/simpleclient/cli/DocBenchCommand.java index d598af6..0fc6a2f 100644 --- a/src/main/java/org/polypheny/simpleclient/cli/DocBenchCommand.java +++ b/src/main/java/org/polypheny/simpleclient/cli/DocBenchCommand.java @@ -79,16 +79,16 @@ public int run() throws SQLException { ExecutorFactory executorFactory = new PolyphenyDbMongoQlExecutorFactory( polyphenyDbHost ); try { - if ( args.get( 0 ).equalsIgnoreCase( "data" ) ) { + if ( args.getFirst().equalsIgnoreCase( "data" ) ) { DocBenchScenario.data( executorFactory, multiplier, true ); - } else if ( args.get( 0 ).equalsIgnoreCase( "workload" ) ) { + } else if ( args.getFirst().equalsIgnoreCase( "workload" ) ) { DocBenchScenario.workload( executorFactory, multiplier, true, writeCsv, dumpQueryList ); - } else if ( args.get( 0 ).equalsIgnoreCase( "schema" ) ) { + } else if ( args.getFirst().equalsIgnoreCase( "schema" ) ) { DocBenchScenario.schema( executorFactory, true ); - } else if ( args.get( 0 ).equalsIgnoreCase( "warmup" ) ) { + } else if ( args.getFirst().equalsIgnoreCase( "warmup" ) ) { DocBenchScenario.warmup( executorFactory, multiplier, true, dumpQueryList ); } else { - System.err.println( "Unknown task: " + args.get( 0 ) ); + System.err.println( "Unknown task: " + args.getFirst() ); } } catch ( Throwable t ) { log.error( "Exception while executing DocBench!", t ); diff --git a/src/main/java/org/polypheny/simpleclient/cli/DumpCommand.java b/src/main/java/org/polypheny/simpleclient/cli/DumpCommand.java index bcccb94..b905d27 100644 --- a/src/main/java/org/polypheny/simpleclient/cli/DumpCommand.java +++ b/src/main/java/org/polypheny/simpleclient/cli/DumpCommand.java @@ -63,7 +63,7 @@ public int run() { System.err.println( "Missing entity name" ); System.exit( 1 ); } - String table = args.get( 0 ); + String table = args.getFirst(); Connection conn = null; try { diff --git a/src/main/java/org/polypheny/simpleclient/cli/GavelCommand.java b/src/main/java/org/polypheny/simpleclient/cli/GavelCommand.java index 19e1ce6..2b4d71c 100644 --- a/src/main/java/org/polypheny/simpleclient/cli/GavelCommand.java +++ b/src/main/java/org/polypheny/simpleclient/cli/GavelCommand.java @@ -104,13 +104,13 @@ public int run() { } try { - if ( args.get( 0 ).equalsIgnoreCase( "data" ) ) { + if ( args.getFirst().equalsIgnoreCase( "data" ) ) { GavelScenario.data( executorFactory, multiplier, true, queryMode ); - } else if ( args.get( 0 ).equalsIgnoreCase( "workload" ) ) { + } else if ( args.getFirst().equalsIgnoreCase( "workload" ) ) { GavelScenario.workload( executorFactory, multiplier, true, writeCsv, dumpQueryList, queryMode ); - } else if ( args.get( 0 ).equalsIgnoreCase( "schema" ) ) { + } else if ( args.getFirst().equalsIgnoreCase( "schema" ) ) { GavelScenario.schema( executorFactory, true, queryMode ); - } else if ( args.get( 0 ).equalsIgnoreCase( "warmup" ) ) { + } else if ( args.getFirst().equalsIgnoreCase( "warmup" ) ) { GavelScenario.warmup( executorFactory, multiplier, true, dumpQueryList, queryMode ); } else { System.err.println( "Unknown task: " + args.get( 0 ) ); diff --git a/src/main/java/org/polypheny/simpleclient/cli/GraphCommand.java b/src/main/java/org/polypheny/simpleclient/cli/GraphCommand.java index 73e662e..7d2e73c 100644 --- a/src/main/java/org/polypheny/simpleclient/cli/GraphCommand.java +++ b/src/main/java/org/polypheny/simpleclient/cli/GraphCommand.java @@ -79,16 +79,16 @@ public int run() throws SQLException { ExecutorFactory executorFactory = new PolyphenyDbCypherExecutorFactory( polyphenyDbHost ); try { - if ( args.get( 0 ).equalsIgnoreCase( "data" ) ) { + if ( args.getFirst().equalsIgnoreCase( "data" ) ) { GraphBenchScenario.data( executorFactory, multiplier, true ); - } else if ( args.get( 0 ).equalsIgnoreCase( "workload" ) ) { + } else if ( args.getFirst().equalsIgnoreCase( "workload" ) ) { GraphBenchScenario.workload( executorFactory, multiplier, true, writeCsv, dumpQueryList ); - } else if ( args.get( 0 ).equalsIgnoreCase( "schema" ) ) { + } else if ( args.getFirst().equalsIgnoreCase( "schema" ) ) { GraphBenchScenario.schema( executorFactory, true ); - } else if ( args.get( 0 ).equalsIgnoreCase( "warmup" ) ) { + } else if ( args.getFirst().equalsIgnoreCase( "warmup" ) ) { GraphBenchScenario.warmup( executorFactory, multiplier, true, dumpQueryList ); } else { - System.err.println( "Unknown task: " + args.get( 0 ) ); + System.err.println( "Unknown task: " + args.getFirst() ); } } catch ( Throwable t ) { log.error( "Exception while executing GraphBench!", t ); diff --git a/src/main/java/org/polypheny/simpleclient/cli/KnnCommand.java b/src/main/java/org/polypheny/simpleclient/cli/KnnCommand.java index 8710390..a55317e 100644 --- a/src/main/java/org/polypheny/simpleclient/cli/KnnCommand.java +++ b/src/main/java/org/polypheny/simpleclient/cli/KnnCommand.java @@ -81,16 +81,16 @@ public int run() throws SQLException { executorFactory = new PolyphenyDbJdbcExecutorFactory( polyphenyDbHost, false ); try { - if ( args.get( 0 ).equalsIgnoreCase( "data" ) ) { + if ( args.getFirst().equalsIgnoreCase( "data" ) ) { KnnBenchScenario.data( executorFactory, multiplier, true ); - } else if ( args.get( 0 ).equalsIgnoreCase( "workload" ) ) { + } else if ( args.getFirst().equalsIgnoreCase( "workload" ) ) { KnnBenchScenario.workload( executorFactory, multiplier, true, writeCsv, dumpQueryList ); - } else if ( args.get( 0 ).equalsIgnoreCase( "schema" ) ) { + } else if ( args.getFirst().equalsIgnoreCase( "schema" ) ) { KnnBenchScenario.schema( executorFactory, true ); - } else if ( args.get( 0 ).equalsIgnoreCase( "warmup" ) ) { + } else if ( args.getFirst().equalsIgnoreCase( "warmup" ) ) { KnnBenchScenario.warmup( executorFactory, multiplier, true, dumpQueryList ); } else { - System.err.println( "Unknown task: " + args.get( 0 ) ); + System.err.println( "Unknown task: " + args.getFirst() ); } } catch ( Throwable t ) { log.error( "Exception while executing KnnBench!", t ); diff --git a/src/main/java/org/polypheny/simpleclient/cli/MultiBenchCommand.java b/src/main/java/org/polypheny/simpleclient/cli/MultiBenchCommand.java index a46b577..66d0f7e 100644 --- a/src/main/java/org/polypheny/simpleclient/cli/MultiBenchCommand.java +++ b/src/main/java/org/polypheny/simpleclient/cli/MultiBenchCommand.java @@ -62,7 +62,7 @@ public class MultiBenchCommand implements CliRunnable { @Override public int run() throws SQLException { - if ( args == null || args.size() < 1 ) { + if ( args == null || args.isEmpty() ) { System.err.println( "Missing task" ); System.exit( 1 ); } @@ -79,16 +79,16 @@ public int run() throws SQLException { ExecutorFactory executorFactory = new PolyphenyDbMultiExecutorFactory( polyphenyDbHost ); try { - if ( args.get( 0 ).equalsIgnoreCase( "data" ) ) { + if ( args.getFirst().equalsIgnoreCase( "data" ) ) { MultiBenchScenario.data( executorFactory, multiplier, true ); - } else if ( args.get( 0 ).equalsIgnoreCase( "workload" ) ) { + } else if ( args.getFirst().equalsIgnoreCase( "workload" ) ) { MultiBenchScenario.workload( executorFactory, multiplier, true, writeCsv, dumpQueryList ); - } else if ( args.get( 0 ).equalsIgnoreCase( "schema" ) ) { + } else if ( args.getFirst().equalsIgnoreCase( "schema" ) ) { MultiBenchScenario.schema( executorFactory, true ); - } else if ( args.get( 0 ).equalsIgnoreCase( "warmup" ) ) { + } else if ( args.getFirst().equalsIgnoreCase( "warmup" ) ) { MultiBenchScenario.warmup( executorFactory, multiplier, true, dumpQueryList ); } else { - System.err.println( "Unknown task: " + args.get( 0 ) ); + System.err.println( "Unknown task: " + args.getFirst() ); } } catch ( Throwable t ) { log.error( "Exception while executing MultiBench!", t ); diff --git a/src/main/java/org/polypheny/simpleclient/cli/MultimediaCommand.java b/src/main/java/org/polypheny/simpleclient/cli/MultimediaCommand.java index 4c64848..7c6b474 100644 --- a/src/main/java/org/polypheny/simpleclient/cli/MultimediaCommand.java +++ b/src/main/java/org/polypheny/simpleclient/cli/MultimediaCommand.java @@ -62,7 +62,7 @@ public class MultimediaCommand implements CliRunnable { @Override public int run() { - if ( args == null || args.size() < 1 ) { + if ( args == null || args.isEmpty() ) { System.err.println( "Missing task" ); System.exit( 1 ); } @@ -84,16 +84,16 @@ public int run() { } try { - if ( args.get( 0 ).equalsIgnoreCase( "data" ) ) { + if ( args.getFirst().equalsIgnoreCase( "data" ) ) { MultimediaScenario.data( executorFactory, multiplier, true ); - } else if ( args.get( 0 ).equalsIgnoreCase( "workload" ) ) { + } else if ( args.getFirst().equalsIgnoreCase( "workload" ) ) { MultimediaScenario.workload( executorFactory, multiplier, true, writeCsv, dumpQueryList ); - } else if ( args.get( 0 ).equalsIgnoreCase( "schema" ) ) { + } else if ( args.getFirst().equalsIgnoreCase( "schema" ) ) { MultimediaScenario.schema( executorFactory, true ); - } else if ( args.get( 0 ).equalsIgnoreCase( "warmup" ) ) { + } else if ( args.getFirst().equalsIgnoreCase( "warmup" ) ) { MultimediaScenario.warmup( executorFactory, multiplier, true, dumpQueryList ); } else { - System.err.println( "Unknown task: " + args.get( 0 ) ); + System.err.println( "Unknown task: " + args.getFirst() ); } } catch ( Throwable t ) { log.error( "Exception while executing MultimediaBench!", t ); diff --git a/src/main/java/org/polypheny/simpleclient/executor/CottontaildbExecutor.java b/src/main/java/org/polypheny/simpleclient/executor/CottontaildbExecutor.java index 552bd3b..9d828a0 100644 --- a/src/main/java/org/polypheny/simpleclient/executor/CottontaildbExecutor.java +++ b/src/main/java/org/polypheny/simpleclient/executor/CottontaildbExecutor.java @@ -100,19 +100,19 @@ private void ensurePublicSchemaExists() { private Object executeWrappedQuery( CottontailQuery cottontailQuery, boolean expectResult ) { - switch ( cottontailQuery.type ) { + switch ( cottontailQuery.type() ) { case QUERY: { final CottonDQLBlockingStub stub = CottonDQLGrpc.newBlockingStub( this.channel ).withDeadlineAfter( 300_000, TimeUnit.MILLISECONDS ); try { if ( expectResult ) { final ArrayList results = new ArrayList<>(); - stub.query( QueryMessage.newBuilder().setQuery( (CottontailGrpc.Query) cottontailQuery.query ).build() ).forEachRemaining( results::add ); + stub.query( QueryMessage.newBuilder().setQuery( (CottontailGrpc.Query) cottontailQuery.query() ).build() ).forEachRemaining( results::add ); return results; } else { - stub.query( QueryMessage.newBuilder().setQuery( (CottontailGrpc.Query) cottontailQuery.query ).build() ); + stub.query( QueryMessage.newBuilder().setQuery( (CottontailGrpc.Query) cottontailQuery.query() ).build() ); } } catch ( StatusRuntimeException e ) { - log.error( "Unable to query cottontail. Query: {}", (CottontailGrpc.Query) cottontailQuery.query, e ); + log.error( "Unable to query cottontail. Query: {}", cottontailQuery.query(), e ); throw new RuntimeException( "Unable to query cottontail.", e ); } break; @@ -122,10 +122,10 @@ private Object executeWrappedQuery( CottontailQuery cottontailQuery, boolean exp try { if ( expectResult ) { final ArrayList results = new ArrayList<>(); - stub.batchedQuery( BatchedQueryMessage.newBuilder().addAllQueries( (List) cottontailQuery.query ).build() ).forEachRemaining( results::add ); + stub.batchedQuery( BatchedQueryMessage.newBuilder().addAllQueries( (List) cottontailQuery.query() ).build() ).forEachRemaining( results::add ); return results; } else { - stub.batchedQuery( BatchedQueryMessage.newBuilder().addAllQueries( (List) cottontailQuery.query ).build() ); + stub.batchedQuery( BatchedQueryMessage.newBuilder().addAllQueries( (List) cottontailQuery.query() ).build() ); } } catch ( StatusRuntimeException e ) { log.error( "Unable to batch query cottontail.", e ); @@ -134,11 +134,11 @@ private Object executeWrappedQuery( CottontailQuery cottontailQuery, boolean exp break; } case INSERT: { - this.insert( ImmutableList.of( (CottontailGrpc.InsertMessage) cottontailQuery.query ) ); + this.insert( ImmutableList.of( (CottontailGrpc.InsertMessage) cottontailQuery.query() ) ); break; } case INSERT_BATCH: { - this.insert( (List) cottontailQuery.query ); + this.insert( (List) cottontailQuery.query() ); break; } case UPDATE: { @@ -147,13 +147,13 @@ private Object executeWrappedQuery( CottontailQuery cottontailQuery, boolean exp try { if ( expectResult ) { final ArrayList results = new ArrayList<>(); - managementStub.update( (CottontailGrpc.UpdateMessage) cottontailQuery.query ).forEachRemaining( results::add ); + managementStub.update( (CottontailGrpc.UpdateMessage) cottontailQuery.query() ).forEachRemaining( results::add ); return results; } else { - managementStub.update( (CottontailGrpc.UpdateMessage) cottontailQuery.query ); + managementStub.update( (CottontailGrpc.UpdateMessage) cottontailQuery.query() ); } } catch ( StatusRuntimeException e ) { - log.error( "Unable to execute cottontail update: {}", (CottontailGrpc.UpdateMessage) cottontailQuery.query, e ); + log.error( "Unable to execute cottontail update: {}", cottontailQuery.query(), e ); throw new RuntimeException( "Unable to execute cottontail update.", e ); } break; @@ -164,35 +164,35 @@ private Object executeWrappedQuery( CottontailQuery cottontailQuery, boolean exp try { if ( expectResult ) { final ArrayList results = new ArrayList<>(); - managementStub.delete( (CottontailGrpc.DeleteMessage) cottontailQuery.query ).forEachRemaining( results::add ); + managementStub.delete( (CottontailGrpc.DeleteMessage) cottontailQuery.query() ).forEachRemaining( results::add ); return results; } else { - managementStub.delete( (CottontailGrpc.DeleteMessage) cottontailQuery.query ); + managementStub.delete( (CottontailGrpc.DeleteMessage) cottontailQuery.query() ); } } catch ( StatusRuntimeException e ) { - log.error( "Unable to execute cottontail delete: {}", (CottontailGrpc.DeleteMessage) cottontailQuery.query, e ); + log.error( "Unable to execute cottontail delete: {}", cottontailQuery.query(), e ); throw new RuntimeException( "Unable to execute cottontail delete.", e ); } break; } case SCHEMA_CREATE: { final CottonDDLFutureStub stub = CottonDDLGrpc.newFutureStub( channel ); - ListenableFuture future = stub.createSchema( (CottontailGrpc.Schema) cottontailQuery.query ); + ListenableFuture future = stub.createSchema( (CottontailGrpc.Schema) cottontailQuery.query() ); try { future.get(); } catch ( InterruptedException | ExecutionException e ) { - log.error( "Unable to create cottontail schema: {}.", (CottontailGrpc.Schema) cottontailQuery.query, e ); + log.error( "Unable to create cottontail schema: {}.", cottontailQuery.query(), e ); throw new RuntimeException( "Unable to create cottontail schema.", e ); } break; } case SCHEMA_DROP: { final CottonDDLFutureStub stub = CottonDDLGrpc.newFutureStub( channel ); - ListenableFuture future = stub.dropSchema( (CottontailGrpc.Schema) cottontailQuery.query ); + ListenableFuture future = stub.dropSchema( (CottontailGrpc.Schema) cottontailQuery.query() ); try { future.get(); } catch ( InterruptedException | ExecutionException e ) { - log.error( "Unable to drop cottontail schema: {}.", (CottontailGrpc.Schema) cottontailQuery.query, e ); + log.error( "Unable to drop cottontail schema: {}.", cottontailQuery.query(), e ); throw new RuntimeException( "Unable to drop cottontail schema.", e ); } break; @@ -200,9 +200,9 @@ private Object executeWrappedQuery( CottontailQuery cottontailQuery, boolean exp case ENTITY_CREATE: { final CottonDDLBlockingStub stub = CottonDDLGrpc.newBlockingStub( this.channel ); try { - stub.createEntity( (CottontailGrpc.EntityDefinition) cottontailQuery.query ); + stub.createEntity( (CottontailGrpc.EntityDefinition) cottontailQuery.query() ); } catch ( StatusRuntimeException e ) { - log.error( "Unable to create cottontail entity: {}", (CottontailGrpc.EntityDefinition) cottontailQuery.query, e ); + log.error( "Unable to create cottontail entity: {}", cottontailQuery.query(), e ); throw new RuntimeException( "Unable to create cottontail entity.", e ); } break; @@ -210,9 +210,9 @@ private Object executeWrappedQuery( CottontailQuery cottontailQuery, boolean exp case ENTITY_DROP: { final CottonDDLBlockingStub stub = CottonDDLGrpc.newBlockingStub( this.channel ); try { - stub.dropEntity( (CottontailGrpc.Entity) cottontailQuery.query ); + stub.dropEntity( (CottontailGrpc.Entity) cottontailQuery.query() ); } catch ( StatusRuntimeException e ) { - log.error( "Unable to drop cottontail entity: {}", (CottontailGrpc.Entity) cottontailQuery.query, e ); + log.error( "Unable to drop cottontail entity: {}", cottontailQuery.query(), e ); throw new RuntimeException( "Unable to drop cottontail entity.", e ); } break; @@ -220,9 +220,9 @@ private Object executeWrappedQuery( CottontailQuery cottontailQuery, boolean exp case TRUNCATE: { final CottonDDLBlockingStub stub = CottonDDLGrpc.newBlockingStub( this.channel ); try { - stub.truncate( (CottontailGrpc.Entity) cottontailQuery.query ); + stub.truncate( (CottontailGrpc.Entity) cottontailQuery.query() ); } catch ( StatusRuntimeException e ) { - log.error( "Unable to truncate cottontail entity: {}", (CottontailGrpc.Entity) cottontailQuery.query, e ); + log.error( "Unable to truncate cottontail entity: {}", cottontailQuery.query(), e ); throw new RuntimeException( "Unable to truncate cottontail entity.", e ); } break; @@ -236,7 +236,7 @@ private Object executeWrappedQuery( CottontailQuery cottontailQuery, boolean exp private boolean insert( List messages ) { CottonDMLStub managementStub = CottonDMLGrpc.newStub( this.channel ); final boolean[] status = { false, false }; /* {done, error}. */ - final StreamObserver observer = new StreamObserver() { + final StreamObserver observer = new StreamObserver<>() { @Override public void onNext( CottontailGrpc.Status value ) { @@ -276,17 +276,17 @@ public void onCompleted() { private String wrapperToString( CottontailQuery query ) { String payload; - switch ( query.type ) { + switch ( query.type() ) { case QUERY_BATCH: case INSERT_BATCH: - payload = String.join( ",", (List) query.query ); + payload = String.join( ",", (List) query.query() ); default: - payload = query.query.toString(); + payload = query.query().toString(); } payload = payload.replace( "\n", "" ).replace( "\r", "" ); - return query.type + ":" + payload; + return query.type() + ":" + payload; } @@ -350,12 +350,12 @@ public void executeInsertList( List batchList, AbstractConfig c for ( BatchableInsert insert : batchList ) { CottontailQuery insertMessage = insert.getCottontail(); if ( insertMessage != null ) { - if ( insertMessage.type != QueryType.INSERT ) { - log.error( "Batchable Insert is not an InsertMessage. {}", insertMessage.query ); + if ( insertMessage.type() != QueryType.INSERT ) { + log.error( "Batchable Insert is not an InsertMessage. {}", insertMessage.query() ); throw new RuntimeException( "Batchable Insert is not an InsertMessage." ); } - insertMessages.add( (InsertMessage) insertMessage.query ); + insertMessages.add( (InsertMessage) insertMessage.query() ); } } diff --git a/src/main/java/org/polypheny/simpleclient/executor/JdbcExecutor.java b/src/main/java/org/polypheny/simpleclient/executor/JdbcExecutor.java index 78fda6d..7440a5d 100644 --- a/src/main/java/org/polypheny/simpleclient/executor/JdbcExecutor.java +++ b/src/main/java/org/polypheny/simpleclient/executor/JdbcExecutor.java @@ -218,7 +218,7 @@ public void closeConnection() throws ExecutorException { @Override public void executeInsertList( List queryList, AbstractConfig config ) throws ExecutorException { - if ( queryList.size() > 0 ) { + if ( !queryList.isEmpty() ) { if ( config.usePreparedBatchForDataInsertion() ) { executeInsertListAsPreparedBatch( queryList ); } else { @@ -230,7 +230,7 @@ public void executeInsertList( List queryList, AbstractConfig c protected void executeInsertListAsPreparedBatch( List queryList ) throws ExecutorException { try { - PreparedStatement preparedStatement = connection.prepareStatement( queryList.get( 0 ).getParameterizedSqlQuery() ); + PreparedStatement preparedStatement = connection.prepareStatement( queryList.getFirst().getParameterizedSqlQuery() ); ArrayList files = new ArrayList<>(); for ( BatchableInsert insert : queryList ) { Map> data = insert.getParameterValues(); @@ -269,9 +269,7 @@ protected void executeInsertListAsPreparedBatch( List queryList preparedStatement.executeBatch(); preparedStatement.close(); files.forEach( File::delete ); - } catch ( SQLException | FileNotFoundException e ) { - throw new ExecutorException( e ); - } catch ( IOException e ) { + } catch ( SQLException | IOException e ) { throw new ExecutorException( e ); } } diff --git a/src/main/java/org/polypheny/simpleclient/executor/PolyphenyDbExecutor.java b/src/main/java/org/polypheny/simpleclient/executor/PolyphenyDbExecutor.java index 1f526ad..93acb7d 100644 --- a/src/main/java/org/polypheny/simpleclient/executor/PolyphenyDbExecutor.java +++ b/src/main/java/org/polypheny/simpleclient/executor/PolyphenyDbExecutor.java @@ -759,14 +759,11 @@ public void startStatusDataGathering( int intervalSeconds ) { throw new RuntimeException( "Status gathering is already running!" ); } log.info( "Start gather status data from Polypheny every " + intervalSeconds + " seconds." ); - Runnable statusGatherer = new Runnable() { - @Override - public void run() { - try { - statuses.add( gatherOnce() ); - } catch ( Exception e ) { - log.error( "Unable to gather status data from Polypheny", e ); - } + Runnable statusGatherer = () -> { + try { + statuses.add( gatherOnce() ); + } catch ( Exception e ) { + log.error( "Unable to gather status data from Polypheny", e ); } }; statusGatheringService = Executors.newScheduledThreadPool( 1 ); diff --git a/src/main/java/org/polypheny/simpleclient/executor/PolyphenyDbHttpExecutor.java b/src/main/java/org/polypheny/simpleclient/executor/PolyphenyDbHttpExecutor.java index 46071f6..afbaeef 100644 --- a/src/main/java/org/polypheny/simpleclient/executor/PolyphenyDbHttpExecutor.java +++ b/src/main/java/org/polypheny/simpleclient/executor/PolyphenyDbHttpExecutor.java @@ -151,9 +151,9 @@ public void setConfig( String key, String value ) { @Override public long executeQuery( Query query ) throws ExecutorException { - if ( query instanceof MultipartInsert ) { - long l = executeQuery( new RawQuery( null, ((MultipartInsert) query).buildMultipartInsert(), query.isExpectResultSet() ) ); - ((MultipartInsert) query).cleanup(); + if ( query instanceof MultipartInsert multipartInsert ) { + long l = executeQuery( new RawQuery( null, multipartInsert.buildMultipartInsert(), query.isExpectResultSet() ) ); + multipartInsert.cleanup(); return l; } long time; @@ -219,7 +219,7 @@ public long executeQueryAndGetNumber( Query query ) throws ExecutorException { // Get result of a count query JSONArray res = result.getBody().getObject().getJSONArray( "data" ); if ( res.length() != 1 ) { - throw new ExecutorException( "Invalid result: " + res.toString() ); + throw new ExecutorException( "Invalid result: " + res ); } return res.getJSONArray( 0 ).getLong( 0 ); diff --git a/src/main/java/org/polypheny/simpleclient/executor/PolyphenyDbJdbcExecutor.java b/src/main/java/org/polypheny/simpleclient/executor/PolyphenyDbJdbcExecutor.java index 86d64d7..f5c7cb1 100644 --- a/src/main/java/org/polypheny/simpleclient/executor/PolyphenyDbJdbcExecutor.java +++ b/src/main/java/org/polypheny/simpleclient/executor/PolyphenyDbJdbcExecutor.java @@ -44,7 +44,7 @@ private PolyphenyDbJdbcExecutor( String polyphenyHost, CsvWriter csvWriter, bool Driver driver; try { CustomClassLoader loader = new CustomClassLoader( ClassLoader.getSystemClassLoader() ); - Class driverClass; + Class driverClass; if ( PolyphenyVersionSwitch.getInstance().usePrismJdbcDriver ) { driverClass = Class.forName( "org.polypheny.jdbc.PolyphenyDriver", true, loader ); } else { diff --git a/src/main/java/org/polypheny/simpleclient/executor/PolyphenyDbMongoQlExecutor.java b/src/main/java/org/polypheny/simpleclient/executor/PolyphenyDbMongoQlExecutor.java index 0acf1a9..d967b31 100644 --- a/src/main/java/org/polypheny/simpleclient/executor/PolyphenyDbMongoQlExecutor.java +++ b/src/main/java/org/polypheny/simpleclient/executor/PolyphenyDbMongoQlExecutor.java @@ -56,9 +56,9 @@ public PolyphenyDbMongoQlExecutor( String host, CsvWriter csvWriter, String name @Override public long executeQuery( Query query ) throws ExecutorException { //query.debug(); - if ( query instanceof MultipartInsert ) { - long l = executeQuery( new RawQuery( null, ((MultipartInsert) query).buildMultipartInsert(), query.isExpectResultSet() ) ); - ((MultipartInsert) query).cleanup(); + if ( query instanceof MultipartInsert multipartInsert ) { + long l = executeQuery( new RawQuery( null, multipartInsert.buildMultipartInsert(), query.isExpectResultSet() ) ); + multipartInsert.cleanup(); return l; } long time; @@ -104,7 +104,7 @@ public long executeQueryAndGetNumber( Query query ) throws ExecutorException { // Get result of a count query JSONArray res = result.getBody().getObject().getJSONArray( "data" ); if ( res.length() != 1 ) { - throw new ExecutorException( "Invalid result: " + res.toString() ); + throw new ExecutorException( "Invalid result: " + res ); } return res.getJSONArray( 0 ).getLong( 0 ); diff --git a/src/main/java/org/polypheny/simpleclient/executor/PolyphenyDbRestExecutor.java b/src/main/java/org/polypheny/simpleclient/executor/PolyphenyDbRestExecutor.java index d0abe7c..6b1685d 100644 --- a/src/main/java/org/polypheny/simpleclient/executor/PolyphenyDbRestExecutor.java +++ b/src/main/java/org/polypheny/simpleclient/executor/PolyphenyDbRestExecutor.java @@ -71,9 +71,9 @@ public void reset() throws ExecutorException { @Override public long executeQuery( Query query ) throws ExecutorException { //query.debug(); - if ( query instanceof MultipartInsert ) { - long l = executeQuery( new RawQuery( null, ((MultipartInsert) query).buildMultipartInsert(), query.isExpectResultSet() ) ); - ((MultipartInsert) query).cleanup(); + if ( query instanceof MultipartInsert multipartInsert ) { + long l = executeQuery( new RawQuery( null, multipartInsert.buildMultipartInsert(), query.isExpectResultSet() ) ); + multipartInsert.cleanup(); return l; } long time; @@ -121,9 +121,9 @@ public long executeQuery( Query query ) throws ExecutorException { @Override public long executeQueryAndGetNumber( Query query ) throws ExecutorException { query.debug(); - if ( query instanceof MultipartInsert ) { - long l = executeQuery( new RawQuery( null, ((MultipartInsert) query).buildMultipartInsert(), query.isExpectResultSet() ) ); - ((MultipartInsert) query).cleanup(); + if ( query instanceof MultipartInsert multipartInsert) { + long l = executeQuery( new RawQuery( null, multipartInsert.buildMultipartInsert(), query.isExpectResultSet() ) ); + multipartInsert.cleanup(); return l; } if ( query.getRest() != null ) { @@ -145,11 +145,11 @@ public long executeQueryAndGetNumber( Query query ) throws ExecutorException { // Get result of a count query JSONArray res = result.getBody().getObject().getJSONArray( "result" ); if ( res.length() != 1 ) { - throw new ExecutorException( "Invalid result: " + res.toString() ); + throw new ExecutorException( "Invalid result: " + res ); } Set names = res.getJSONObject( 0 ).keySet(); if ( names.size() != 1 ) { - throw new ExecutorException( "Invalid result: " + res.toString() ); + throw new ExecutorException( "Invalid result: " + res ); } return res.getJSONObject( 0 ).getLong( names.iterator().next() ); } catch ( UnirestException e ) { @@ -195,9 +195,9 @@ public void executeInsertList( List batchList, AbstractConfig c List rows = new ArrayList<>(); for ( BatchableInsert query : batchList ) { query.debug(); - if ( query instanceof MultipartInsert ) { - executeQuery( new RawQuery( null, ((MultipartInsert) query).buildMultipartInsert(), query.isExpectResultSet() ) ); - ((MultipartInsert) query).cleanup(); + if ( query instanceof MultipartInsert multipartInsert ) { + executeQuery( new RawQuery( null, multipartInsert.buildMultipartInsert(), query.isExpectResultSet() ) ); + multipartInsert.cleanup(); continue; } if ( currentTable == null ) { @@ -210,7 +210,7 @@ public void executeInsertList( List batchList, AbstractConfig c throw new RuntimeException( "Different tables in multi-inserts. This should not happen!" ); } } - if ( rows.size() > 0 ) { + if ( !rows.isEmpty() ) { executeQuery( new RawQuery( null, Query.buildRestInsert( currentTable, rows ), false ) ); } } diff --git a/src/main/java/org/polypheny/simpleclient/executor/SurrealDBExecutor.java b/src/main/java/org/polypheny/simpleclient/executor/SurrealDBExecutor.java index c3d149e..47f746b 100644 --- a/src/main/java/org/polypheny/simpleclient/executor/SurrealDBExecutor.java +++ b/src/main/java/org/polypheny/simpleclient/executor/SurrealDBExecutor.java @@ -141,7 +141,7 @@ public void closeConnection() throws ExecutorException { @Override public void executeInsertList( List batchList, AbstractConfig config ) throws ExecutorException { - StringBuilder query = new StringBuilder( "INSERT INTO " + batchList.get( 0 ).getEntity() + " VALUES" ); + StringBuilder query = new StringBuilder( "INSERT INTO " + batchList.getFirst().getEntity() + " VALUES" ); for ( BatchableInsert insert : batchList ) { query.append( String.format( "(%s)", String.join( ", ", insert.getRowValues() ) ) ); } diff --git a/src/main/java/org/polypheny/simpleclient/main/ChronosAgent.java b/src/main/java/org/polypheny/simpleclient/main/ChronosAgent.java index 6254f02..02956b7 100644 --- a/src/main/java/org/polypheny/simpleclient/main/ChronosAgent.java +++ b/src/main/java/org/polypheny/simpleclient/main/ChronosAgent.java @@ -354,13 +354,13 @@ protected Object prepare( ChronosJob chronosJob, final File inputDirectory, fina throw new RuntimeException( "Unknown system: " + config.system ); } - if ( databaseInstance instanceof PolyphenyDbInstance ) { + if ( databaseInstance instanceof PolyphenyDbInstance polyphenyDbInstance ) { // Set workload monitoring - ((PolyphenyDbInstance) databaseInstance).setWorkloadMonitoring( config.workloadMonitoringLoadingData ); + polyphenyDbInstance.setWorkloadMonitoring( config.workloadMonitoringLoadingData ); // Start Polypheny status data gathering if ( PolyphenyVersionSwitch.getInstance().hasStatusEndpoint ) { - ((PolyphenyDbInstance) databaseInstance).getStatusGatherer().startStatusDataGathering( 60 ); + polyphenyDbInstance.getStatusGatherer().startStatusDataGathering( 60 ); } } @@ -378,8 +378,8 @@ protected Object prepare( ChronosJob chronosJob, final File inputDirectory, fina throw e; } - if ( databaseInstance instanceof PolyphenyDbInstance && config.restartAfterLoadingData ) { - ((PolyphenyDbInstance) databaseInstance).restartPolypheny(); + if ( databaseInstance instanceof PolyphenyDbInstance polyphenyDbInstance && config.restartAfterLoadingData ) { + polyphenyDbInstance.restartPolypheny(); } return new ImmutableTriple<>( scenario, config, databaseInstance ); @@ -395,18 +395,18 @@ protected Object warmUp( ChronosJob chronosJob, final File inputDirectory, final @SuppressWarnings("unchecked") DatabaseInstance databaseInstance = ((Triple) o).getRight(); try { - if ( databaseInstance instanceof PolyphenyDbInstance ) { + if ( databaseInstance instanceof PolyphenyDbInstance polyphenyDbInstance ) { // Set workload monitoring - ((PolyphenyDbInstance) databaseInstance).setWorkloadMonitoring( config.workloadMonitoringWarmup ); + polyphenyDbInstance.setWorkloadMonitoring( config.workloadMonitoringWarmup ); // Enable icarus training -- to be removed if ( config.router != null && config.router.equals( "icarus" ) && PolyphenyVersionSwitch.getInstance().hasIcarusRoutingSettings ) { - ((PolyphenyDbInstance) databaseInstance).setIcarusRoutingTraining( true ); + polyphenyDbInstance.setIcarusRoutingTraining( true ); } // Enable Post Cost Aggregation if ( config.postCostAggregation.equals( "onWarmup" ) && !PolyphenyVersionSwitch.getInstance().hasIcarusRoutingSettings ) { - ((PolyphenyDbInstance) databaseInstance).setPostCostAggregation( true ); + polyphenyDbInstance.setPostCostAggregation( true ); } // Wait a moment to give Polypheny-DB the chance to process all data points from data insertion @@ -424,7 +424,7 @@ protected Object warmUp( ChronosJob chronosJob, final File inputDirectory, final config.progressReportBase ); scenario.warmUp( progressReporter ); - if ( databaseInstance instanceof PolyphenyDbInstance ) { + if ( databaseInstance instanceof PolyphenyDbInstance polyphenyDbInstance ) { // Wait a moment to give Polypheny-DB the chance to process all data points from warmup try { TimeUnit.MINUTES.sleep( 1 ); @@ -434,12 +434,12 @@ protected Object warmUp( ChronosJob chronosJob, final File inputDirectory, final // Disable Post Cost Aggregation if ( config.postCostAggregation.equals( "onWarmup" ) && !PolyphenyVersionSwitch.getInstance().hasIcarusRoutingSettings ) { - ((PolyphenyDbInstance) databaseInstance).setPostCostAggregation( false ); + polyphenyDbInstance.setPostCostAggregation( false ); } // Disable icarus training -- to be removed if ( config.router != null && config.router.equals( "icarus" ) && PolyphenyVersionSwitch.getInstance().hasIcarusRoutingSettings ) { - ((PolyphenyDbInstance) databaseInstance).setIcarusRoutingTraining( false ); + polyphenyDbInstance.setIcarusRoutingTraining( false ); } } } catch ( Exception e ) { @@ -467,8 +467,8 @@ protected Object execute( ChronosJob chronosJob, final File inputDirectory, fina } // Set workload monitoring - if ( databaseInstance instanceof PolyphenyDbInstance ) { - ((PolyphenyDbInstance) databaseInstance).setWorkloadMonitoring( config.workloadMonitoringExecutingWorkload ); + if ( databaseInstance instanceof PolyphenyDbInstance polyphenyDbInstance ) { + polyphenyDbInstance.setWorkloadMonitoring( config.workloadMonitoringExecutingWorkload ); } int numberOfThreads = config.numberOfThreads; @@ -510,8 +510,8 @@ protected Object analyze( ChronosJob chronosJob, final File inputDirectory, fina throw e; } - if ( databaseInstance instanceof PolyphenyDbInstance && PolyphenyVersionSwitch.getInstance().hasStatusEndpoint ) { - StatusGatherer statusGatherer = ((PolyphenyDbInstance) databaseInstance).getStatusGatherer(); + if ( databaseInstance instanceof PolyphenyDbInstance polyphenyDbInstance && PolyphenyVersionSwitch.getInstance().hasStatusEndpoint ) { + StatusGatherer statusGatherer = polyphenyDbInstance.getStatusGatherer(); // Stop gathering List statuses = statusGatherer.stopGathering(); diff --git a/src/main/java/org/polypheny/simpleclient/main/ProgressReporter.java b/src/main/java/org/polypheny/simpleclient/main/ProgressReporter.java index e2d5a53..fa7c373 100644 --- a/src/main/java/org/polypheny/simpleclient/main/ProgressReporter.java +++ b/src/main/java/org/polypheny/simpleclient/main/ProgressReporter.java @@ -84,7 +84,7 @@ public ReportQueryListProgress( List list, ProgressReporter prog public void run() { while ( true ) { theProgressReporter.update( totalNumber - theList.size(), totalNumber ); - if ( theList.size() == 0 ) { + if ( theList.isEmpty() ) { break; } try { diff --git a/src/main/java/org/polypheny/simpleclient/query/CottontailQuery.java b/src/main/java/org/polypheny/simpleclient/query/CottontailQuery.java index f925dd9..068c171 100644 --- a/src/main/java/org/polypheny/simpleclient/query/CottontailQuery.java +++ b/src/main/java/org/polypheny/simpleclient/query/CottontailQuery.java @@ -25,16 +25,7 @@ package org.polypheny.simpleclient.query; -public class CottontailQuery { - - public final QueryType type; - public final Object query; - - - public CottontailQuery( QueryType type, Object query ) { - this.type = type; - this.query = query; - } +public record CottontailQuery( org.polypheny.simpleclient.query.CottontailQuery.QueryType type, Object query ) { public enum QueryType { diff --git a/src/main/java/org/polypheny/simpleclient/query/Query.java b/src/main/java/org/polypheny/simpleclient/query/Query.java index 924d015..99f578f 100644 --- a/src/main/java/org/polypheny/simpleclient/query/Query.java +++ b/src/main/java/org/polypheny/simpleclient/query/Query.java @@ -151,7 +151,7 @@ public static String buildMongoQlInsert( String collection, List fieldNa protected static String maybeQuote( Object o ) { if ( o instanceof String ) { - return "\"" + o.toString() + "\""; + return "\"" + o + "\""; } return o.toString(); } diff --git a/src/main/java/org/polypheny/simpleclient/query/QueryListEntry.java b/src/main/java/org/polypheny/simpleclient/query/QueryListEntry.java index e7366e8..76a7d5a 100644 --- a/src/main/java/org/polypheny/simpleclient/query/QueryListEntry.java +++ b/src/main/java/org/polypheny/simpleclient/query/QueryListEntry.java @@ -42,7 +42,7 @@ public QueryListEntry( Query query, int templateId ) { public QueryListEntry( Query query, List templateIds ) { this.query = query; - this.templateId = templateIds.get( 0 ); + this.templateId = templateIds.getFirst(); this.templateIds = templateIds; } diff --git a/src/main/java/org/polypheny/simpleclient/query/RawQuery.java b/src/main/java/org/polypheny/simpleclient/query/RawQuery.java index ea559be..222e3eb 100644 --- a/src/main/java/org/polypheny/simpleclient/query/RawQuery.java +++ b/src/main/java/org/polypheny/simpleclient/query/RawQuery.java @@ -52,7 +52,7 @@ public class RawQuery extends Query { private final String surrealQl; @Getter - private List types = Collections.emptyList(); + private final List types; public RawQuery( String sql, HttpRequest rest, boolean expectResultSet ) { diff --git a/src/main/java/org/polypheny/simpleclient/scenario/EvaluationThread.java b/src/main/java/org/polypheny/simpleclient/scenario/EvaluationThread.java index 0879843..412afd0 100644 --- a/src/main/java/org/polypheny/simpleclient/scenario/EvaluationThread.java +++ b/src/main/java/org/polypheny/simpleclient/scenario/EvaluationThread.java @@ -74,7 +74,7 @@ public void run() { while ( !queries.isEmpty() && !abort ) { measuredTimeStart = System.nanoTime(); try { - queryListEntry = queries.remove( 0 ); + queryListEntry = queries.removeFirst(); } catch ( IndexOutOfBoundsException e ) { // This is neither nice nor efficient... // This can happen due to concurrency if two threads enter the while-loop and there is only one thread left // Simply leaf the loop diff --git a/src/main/java/org/polypheny/simpleclient/scenario/Scenario.java b/src/main/java/org/polypheny/simpleclient/scenario/Scenario.java index a458956..9ceb928 100644 --- a/src/main/java/org/polypheny/simpleclient/scenario/Scenario.java +++ b/src/main/java/org/polypheny/simpleclient/scenario/Scenario.java @@ -76,7 +76,7 @@ protected Scenario( ExecutorFactory executorFactory, boolean commitAfterEveryQue protected void calculateResults( Map queryTypes, Properties properties, int templateId, List time ) { - if ( time.size() > 0 ) { + if ( !time.isEmpty() ) { LongSummaryStatistics summaryStatistics = time.stream().mapToLong( Long::longValue ).summaryStatistics(); double mean = summaryStatistics.getAverage(); long max = summaryStatistics.getMax(); diff --git a/src/main/java/org/polypheny/simpleclient/scenario/coms/Coms.java b/src/main/java/org/polypheny/simpleclient/scenario/coms/Coms.java index 472b1df..7e6fff7 100644 --- a/src/main/java/org/polypheny/simpleclient/scenario/coms/Coms.java +++ b/src/main/java/org/polypheny/simpleclient/scenario/coms/Coms.java @@ -38,8 +38,6 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.function.Function; import java.util.stream.Collectors; -import lombok.AllArgsConstructor; -import lombok.Value; import lombok.extern.slf4j.Slf4j; import org.polypheny.simpleclient.QueryMode; import org.polypheny.simpleclient.cli.Mode; @@ -402,9 +400,7 @@ public void warmUp( ProgressReporter progressReporter ) { public void analyze( Properties properties, File outputDirectory ) { properties.put( "measuredTime", calculateMean( measuredTimes ) ); - measuredTimePerQueryType.forEach( ( templateId, time ) -> { - calculateResults( queryTypes, properties, templateId, time ); - } ); + measuredTimePerQueryType.forEach( ( templateId, time ) -> calculateResults( queryTypes, properties, templateId, time ) ); properties.put( "queryTypes_maxId", queryTypes.size() ); properties.put( "executeRuntime", executeRuntime / 1000000000.0 ); properties.put( "numberOfQueries", measuredTimes.size() ); @@ -418,14 +414,7 @@ public int getNumberOfInsertThreads() { } - @Value - @AllArgsConstructor - public static class PolyphenyAdapters { - - String relAdapter; - String docAdapter; - String graphAdapter; - + public record PolyphenyAdapters( String relAdapter, String docAdapter, String graphAdapter ) { public boolean isSet() { return relAdapter != null || docAdapter != null || graphAdapter != null; diff --git a/src/main/java/org/polypheny/simpleclient/scenario/coms/SchemaGenerator.java b/src/main/java/org/polypheny/simpleclient/scenario/coms/SchemaGenerator.java index e347351..684dbe7 100644 --- a/src/main/java/org/polypheny/simpleclient/scenario/coms/SchemaGenerator.java +++ b/src/main/java/org/polypheny/simpleclient/scenario/coms/SchemaGenerator.java @@ -55,9 +55,9 @@ public void generateSchema( ComsConfig config, Executor executor, String namespa Graph graph = this.network.toGraph(); - List graphQueries = graph.getSchemaGraphQueries( adapters.getGraphAdapter(), namespace + Graph.GRAPH_POSTFIX ); - List docQueries = graph.getSchemaDocQueries( adapters.getDocAdapter(), namespace + Graph.DOC_POSTFIX ); - List relQueries = graph.getSchemaRelQueries( adapters.getRelAdapter(), namespace + Graph.REL_POSTFIX, graph.getUsers(), config ); + List graphQueries = graph.getSchemaGraphQueries( adapters.graphAdapter(), namespace + Graph.GRAPH_POSTFIX ); + List docQueries = graph.getSchemaDocQueries( adapters.docAdapter(), namespace + Graph.DOC_POSTFIX ); + List relQueries = graph.getSchemaRelQueries( adapters.relAdapter(), namespace + Graph.REL_POSTFIX, graph.getUsers(), config ); for ( Query query : graphQueries ) { executor.executeQuery( query ); diff --git a/src/main/java/org/polypheny/simpleclient/scenario/coms/simulation/NetworkGenerator.java b/src/main/java/org/polypheny/simpleclient/scenario/coms/simulation/NetworkGenerator.java index 71e9c82..9419f4b 100644 --- a/src/main/java/org/polypheny/simpleclient/scenario/coms/simulation/NetworkGenerator.java +++ b/src/main/java/org/polypheny/simpleclient/scenario/coms/simulation/NetworkGenerator.java @@ -584,7 +584,7 @@ private void readEntities( List queries ) { randomType = random.nextInt( elements.size() ); List selectedElement = elements.get( randomType ); - if ( selectedElement.size() == 0 ) { + if ( selectedElement.isEmpty() ) { continue; } diff --git a/src/main/java/org/polypheny/simpleclient/scenario/docbench/DocBench.java b/src/main/java/org/polypheny/simpleclient/scenario/docbench/DocBench.java index bf5e2f7..285ad1e 100644 --- a/src/main/java/org/polypheny/simpleclient/scenario/docbench/DocBench.java +++ b/src/main/java/org/polypheny/simpleclient/scenario/docbench/DocBench.java @@ -99,7 +99,7 @@ public void createSchema( DatabaseInstance databaseInstance, boolean includingKe break; } } - if ( onStore.equals( "" ) ) { + if ( onStore.isEmpty() ) { throw new RuntimeException( "No suitable data store found for optimized placing of the DocBench collection." ); } else { onStore = ".store(\"" + onStore + "\")"; @@ -258,7 +258,7 @@ public void run() { while ( !theQueryList.isEmpty() && !abort ) { measuredTimeStart = System.nanoTime(); try { - queryListEntry = theQueryList.remove( 0 ); + queryListEntry = theQueryList.removeFirst(); } catch ( IndexOutOfBoundsException e ) { // This is neither nice nor efficient... // This can happen due to concurrency if two threads enter the while-loop and there is only one thread left // Simply leaf the loop @@ -356,9 +356,7 @@ public void notifyAboutError( Exception e ) { @Override public void analyze( Properties properties, File outputDirectory ) { properties.put( "measuredTime", calculateMean( measuredTimes ) ); - measuredTimePerQueryType.forEach( ( templateId, time ) -> { - calculateResults( queryTypes, properties, templateId, time ); - } ); + measuredTimePerQueryType.forEach( ( templateId, time ) -> calculateResults( queryTypes, properties, templateId, time ) ); properties.put( "queryTypes_maxId", queryTypes.size() ); properties.put( "executeRuntime", executeRuntime / 1000000000.0 ); properties.put( "numberOfQueries", measuredTimes.size() ); diff --git a/src/main/java/org/polypheny/simpleclient/scenario/gavel/Gavel.java b/src/main/java/org/polypheny/simpleclient/scenario/gavel/Gavel.java index 1f10f2f..78d31c8 100644 --- a/src/main/java/org/polypheny/simpleclient/scenario/gavel/Gavel.java +++ b/src/main/java/org/polypheny/simpleclient/scenario/gavel/Gavel.java @@ -316,7 +316,7 @@ public void run() { while ( !theQueryList.isEmpty() && !abort ) { measuredTimeStart = System.nanoTime(); try { - queryListEntry = theQueryList.remove( 0 ); + queryListEntry = theQueryList.removeFirst(); } catch ( IndexOutOfBoundsException e ) { // This is neither nice nor efficient... // This can happen due to concurrency if two threads enter the while-loop and there is only one thread left // Simply leaf the loop @@ -440,7 +440,7 @@ public void createSchema( DatabaseInstance databaseInstance, boolean includingKe break; } } - if ( onStore.equals( "" ) ) { + if ( onStore.isEmpty() ) { throw new RuntimeException( "No suitable data store found for optimized placing of Gavel tables." ); } else { onStore = " ON STORE " + onStore; @@ -695,9 +695,7 @@ public void notifyAboutError( Exception e ) { public void analyze( Properties properties, File outputDirectory ) { properties.put( "measuredTime", calculateMean( measuredTimes ) ); - measuredTimePerQueryType.forEach( ( templateId, time ) -> { - calculateResults( queryTypes, properties, templateId, time ); - } ); + measuredTimePerQueryType.forEach( ( templateId, time ) -> calculateResults( queryTypes, properties, templateId, time ) ); properties.put( "queryTypes_maxId", queryTypes.size() ); properties.put( "executeRuntime", executeRuntime / 1000000000.0 ); properties.put( "numberOfQueries", measuredTimes.size() ); diff --git a/src/main/java/org/polypheny/simpleclient/scenario/gavel/queryBuilder/InsertCategory.java b/src/main/java/org/polypheny/simpleclient/scenario/gavel/queryBuilder/InsertCategory.java index bddbbeb..408e84a 100644 --- a/src/main/java/org/polypheny/simpleclient/scenario/gavel/queryBuilder/InsertCategory.java +++ b/src/main/java/org/polypheny/simpleclient/scenario/gavel/queryBuilder/InsertCategory.java @@ -71,12 +71,12 @@ public InsertCategory() { @Override public BatchableInsert getNewQuery() { - if ( categories.size() == 0 ) { + if ( categories.isEmpty() ) { throw new RuntimeException( "List of categories is empty" ); } return new InsertCategoryQuery( nextCategoryId.getAndIncrement(), - categories.remove( 0 ) + categories.removeFirst() ); } diff --git a/src/main/java/org/polypheny/simpleclient/scenario/graph/DataGenerator.java b/src/main/java/org/polypheny/simpleclient/scenario/graph/DataGenerator.java index 77740a6..a260175 100644 --- a/src/main/java/org/polypheny/simpleclient/scenario/graph/DataGenerator.java +++ b/src/main/java/org/polypheny/simpleclient/scenario/graph/DataGenerator.java @@ -88,7 +88,7 @@ private String getNode( String identifier, int i, int labelsAmount, int properti private String getEdge( int i, int propertiesAmount ) { - String label = getLabels( i ).get( 0 ); + String label = getLabels( i ).getFirst(); String properties = String.join( ", ", getProperties( i, propertiesAmount ) ); return String.format( "-[:%s {%s}]->", label, properties ); diff --git a/src/main/java/org/polypheny/simpleclient/scenario/graph/GraphBench.java b/src/main/java/org/polypheny/simpleclient/scenario/graph/GraphBench.java index 9698efa..43ecb80 100644 --- a/src/main/java/org/polypheny/simpleclient/scenario/graph/GraphBench.java +++ b/src/main/java/org/polypheny/simpleclient/scenario/graph/GraphBench.java @@ -324,9 +324,7 @@ public void notifyAboutError( Exception e ) { @Override public void analyze( Properties properties, File outputDirectory ) { properties.put( "measuredTime", calculateMean( measuredTimes ) ); - measuredTimePerQueryType.forEach( ( templateId, time ) -> { - calculateResults( queryTypes, properties, templateId, time ); - } ); + measuredTimePerQueryType.forEach( ( templateId, time ) -> calculateResults( queryTypes, properties, templateId, time ) ); properties.put( "queryTypes_maxId", queryTypes.size() ); properties.put( "executeRuntime", executeRuntime / 1000000000.0 ); properties.put( "numberOfQueries", measuredTimes.size() ); diff --git a/src/main/java/org/polypheny/simpleclient/scenario/knnbench/KnnBench.java b/src/main/java/org/polypheny/simpleclient/scenario/knnbench/KnnBench.java index 3760668..cff89c5 100644 --- a/src/main/java/org/polypheny/simpleclient/scenario/knnbench/KnnBench.java +++ b/src/main/java/org/polypheny/simpleclient/scenario/knnbench/KnnBench.java @@ -279,7 +279,7 @@ public void run() { while ( !theQueryList.isEmpty() && !abort ) { measuredTimeStart = System.nanoTime(); try { - queryListEntry = theQueryList.remove( 0 ); + queryListEntry = theQueryList.removeFirst(); } catch ( IndexOutOfBoundsException e ) { // This is neither nice nor efficient... // This can happen due to concurrency if two threads enter the while-loop and there is only one thread left // Simply leaf the loop @@ -378,9 +378,7 @@ public void notifyAboutError( Exception e ) { public void analyze( Properties properties, File outputDirectory ) { properties.put( "measuredTime", calculateMean( measuredTimes ) ); - measuredTimePerQueryType.forEach( ( templateId, time ) -> { - calculateResults( queryTypes, properties, templateId, time ); - } ); + measuredTimePerQueryType.forEach( ( templateId, time ) -> calculateResults( queryTypes, properties, templateId, time ) ); properties.put( "queryTypes_maxId", queryTypes.size() ); properties.put( "executeRuntime", executeRuntime / 1000000000.0 ); properties.put( "numberOfQueries", measuredTimes.size() ); diff --git a/src/main/java/org/polypheny/simpleclient/scenario/knnbench/queryBuilder/InsertIntFeature.java b/src/main/java/org/polypheny/simpleclient/scenario/knnbench/queryBuilder/InsertIntFeature.java index ebd0cbe..31fbbe6 100644 --- a/src/main/java/org/polypheny/simpleclient/scenario/knnbench/queryBuilder/InsertIntFeature.java +++ b/src/main/java/org/polypheny/simpleclient/scenario/knnbench/queryBuilder/InsertIntFeature.java @@ -153,7 +153,7 @@ public String getMongoQl() { @Override public CottontailQuery getCottontail() { Map dataMap = new HashMap<>(); - dataMap.put( "id", Data.newBuilder().setIntData( (int) id ).build() ); + dataMap.put( "id", Data.newBuilder().setIntData( id ).build() ); dataMap.put( "feature", Data.newBuilder().setVectorData( Vector.newBuilder().setIntVector( IntVector.newBuilder() .addAllVector( Arrays.asList( feature ) ) diff --git a/src/main/java/org/polypheny/simpleclient/scenario/knnbench/queryBuilder/InsertMetadata.java b/src/main/java/org/polypheny/simpleclient/scenario/knnbench/queryBuilder/InsertMetadata.java index 201187f..d2b9c9e 100644 --- a/src/main/java/org/polypheny/simpleclient/scenario/knnbench/queryBuilder/InsertMetadata.java +++ b/src/main/java/org/polypheny/simpleclient/scenario/knnbench/queryBuilder/InsertMetadata.java @@ -125,7 +125,7 @@ public String getMongoQl() { @Override public CottontailQuery getCottontail() { Map dataMap = new HashMap<>(); - dataMap.put( "id", Data.newBuilder().setIntData( (int) id ).build() ); + dataMap.put( "id", Data.newBuilder().setIntData( id ).build() ); dataMap.put( "textdata", Data.newBuilder().setStringData( textdata ).build() ); InsertMessage insertMessage = InsertMessage.newBuilder() .setFrom( From.newBuilder().setEntity( Entity.newBuilder().setSchema( Schema.newBuilder().setName( "public" ).build() ).setName( "knn_metadata" ).build() ).build() ) diff --git a/src/main/java/org/polypheny/simpleclient/scenario/knnbench/queryBuilder/InsertRealFeature.java b/src/main/java/org/polypheny/simpleclient/scenario/knnbench/queryBuilder/InsertRealFeature.java index ba7e88a..87760b6 100644 --- a/src/main/java/org/polypheny/simpleclient/scenario/knnbench/queryBuilder/InsertRealFeature.java +++ b/src/main/java/org/polypheny/simpleclient/scenario/knnbench/queryBuilder/InsertRealFeature.java @@ -153,7 +153,7 @@ public String getMongoQl() { @Override public CottontailQuery getCottontail() { Map dataMap = new HashMap<>(); - dataMap.put( "id", Data.newBuilder().setIntData( (int) id ).build() ); + dataMap.put( "id", Data.newBuilder().setIntData( id ).build() ); dataMap.put( "feature", Data.newBuilder().setVectorData( Vector.newBuilder().setFloatVector( FloatVector.newBuilder() .addAllVector( Arrays.asList( feature ) ) diff --git a/src/main/java/org/polypheny/simpleclient/scenario/multibench/MultiBench.java b/src/main/java/org/polypheny/simpleclient/scenario/multibench/MultiBench.java index ca97124..a0c8dce 100644 --- a/src/main/java/org/polypheny/simpleclient/scenario/multibench/MultiBench.java +++ b/src/main/java/org/polypheny/simpleclient/scenario/multibench/MultiBench.java @@ -53,15 +53,14 @@ public class MultiBench extends Scenario { public MultiBench( Executor.ExecutorFactory multiExecutorFactory, MultiBenchConfig config, boolean commitAfterEveryQuery, boolean dumpQueryList ) { super( multiExecutorFactory, commitAfterEveryQuery, dumpQueryList, QueryMode.TABLE ); - if ( !(multiExecutorFactory instanceof PolyphenyDbMultiExecutorFactory) ) { + if ( !(multiExecutorFactory instanceof PolyphenyDbMultiExecutorFactory polyphenyDbMultiExecutorFactory) ) { throw new RuntimeException( "This benchmark requires a multi executor" ); } - PolyphenyDbMultiExecutorFactory executorFactory = (PolyphenyDbMultiExecutorFactory) multiExecutorFactory; // Initialize underlying benchmarks if ( config.numberOfGavelQueries > 0 ) { gavel = new Gavel( - executorFactory.getJdbcExecutorFactory(), + polyphenyDbMultiExecutorFactory.getJdbcExecutorFactory(), config.getGavelConfig(), commitAfterEveryQuery, dumpQueryList, @@ -72,7 +71,7 @@ public MultiBench( Executor.ExecutorFactory multiExecutorFactory, MultiBenchConf } if ( config.numberOfGraphBenchQueries > 0 ) { graphBench = new GraphBench( - executorFactory.getCypherExecutorFactory(), + polyphenyDbMultiExecutorFactory.getCypherExecutorFactory(), config.getGraphBenchConfig(), commitAfterEveryQuery, dumpQueryList @@ -82,7 +81,7 @@ public MultiBench( Executor.ExecutorFactory multiExecutorFactory, MultiBenchConf } if ( config.numberOfDocBenchQueries > 0 ) { docBench = new DocBench( - executorFactory.getMongoQlExecutorFactory(), + polyphenyDbMultiExecutorFactory.getMongoQlExecutorFactory(), config.getDocBenchConfig(), commitAfterEveryQuery, dumpQueryList @@ -92,7 +91,7 @@ public MultiBench( Executor.ExecutorFactory multiExecutorFactory, MultiBenchConf } if ( config.numberOfKnnBenchQueries > 0 ) { knnBench = new KnnBench( - executorFactory.getJdbcExecutorFactory(), + polyphenyDbMultiExecutorFactory.getJdbcExecutorFactory(), config.getKnnBenchConfig(), commitAfterEveryQuery, dumpQueryList @@ -267,4 +266,4 @@ public int getNumberOfInsertThreads() { return 1; } -} \ No newline at end of file +} diff --git a/src/main/java/org/polypheny/simpleclient/scenario/multibench/MultiBenchConfig.java b/src/main/java/org/polypheny/simpleclient/scenario/multibench/MultiBenchConfig.java index 040b1f3..f27fac1 100644 --- a/src/main/java/org/polypheny/simpleclient/scenario/multibench/MultiBenchConfig.java +++ b/src/main/java/org/polypheny/simpleclient/scenario/multibench/MultiBenchConfig.java @@ -189,8 +189,8 @@ protected MultiBenchKnnBenchConfig( Map cdl ) { private void settings() { if ( dataStores.size() == 1 ) { - dataStoreFeature = dataStores.get( 0 ); - dataStoreMetadata = dataStores.get( 0 ); + dataStoreFeature = dataStores.getFirst(); + dataStoreMetadata = dataStores.getFirst(); } randomSeedInsert = MultiBenchConfig.this.seed; diff --git a/src/main/java/org/polypheny/simpleclient/scenario/multimedia/DataGenerator.java b/src/main/java/org/polypheny/simpleclient/scenario/multimedia/DataGenerator.java index 1195431..53586d6 100644 --- a/src/main/java/org/polypheny/simpleclient/scenario/multimedia/DataGenerator.java +++ b/src/main/java/org/polypheny/simpleclient/scenario/multimedia/DataGenerator.java @@ -146,15 +146,15 @@ private void executeInsertList() throws ExecutorException { executionTimes.add( executionTime ); } //the batchList contains only queries of one type - String sql = batchList.get( 0 ).getParameterizedSqlQuery(); + String sql = batchList.getFirst().getParameterizedSqlQuery(); if ( sql == null ) { - sql = batchList.get( 0 ).getSql(); + sql = batchList.getFirst().getSql(); sql = sql.substring( 0, Math.min( 500, sql.length() ) ); } if ( !queryTimes.containsKey( sql ) ) { - queryTimes.put( batchList.get( 0 ).getParameterizedSqlQuery(), new ArrayList<>() ); + queryTimes.put( batchList.getFirst().getParameterizedSqlQuery(), new ArrayList<>() ); } - queryTimes.get( batchList.get( 0 ).getParameterizedSqlQuery() ).addAll( executionTimes ); + queryTimes.get( batchList.getFirst().getParameterizedSqlQuery() ).addAll( executionTimes ); batchList.clear(); } diff --git a/src/main/java/org/polypheny/simpleclient/scenario/multimedia/MediaGenerator.java b/src/main/java/org/polypheny/simpleclient/scenario/multimedia/MediaGenerator.java index 9b7d8a2..eacd43b 100644 --- a/src/main/java/org/polypheny/simpleclient/scenario/multimedia/MediaGenerator.java +++ b/src/main/java/org/polypheny/simpleclient/scenario/multimedia/MediaGenerator.java @@ -124,7 +124,7 @@ static class RandomInputStream extends InputStream { @Override - public int read() throws IOException { + public int read() { if ( counter-- == 0 ) { return -1; } diff --git a/src/main/java/org/polypheny/simpleclient/scenario/multimedia/MultimediaBench.java b/src/main/java/org/polypheny/simpleclient/scenario/multimedia/MultimediaBench.java index 9632e6a..8c5bcd6 100644 --- a/src/main/java/org/polypheny/simpleclient/scenario/multimedia/MultimediaBench.java +++ b/src/main/java/org/polypheny/simpleclient/scenario/multimedia/MultimediaBench.java @@ -297,7 +297,7 @@ public void run() { while ( !theQueryList.isEmpty() && !abort ) { measuredTimeStart = System.nanoTime(); try { - queryListEntry = theQueryList.remove( 0 ); + queryListEntry = theQueryList.removeFirst(); } catch ( IndexOutOfBoundsException e ) { // This is neither nice nor efficient... // This can happen due to concurrency if two threads enter the while-loop and there is only one thread left // Simply leaf the loop @@ -396,9 +396,7 @@ public void notifyAboutError( Exception e ) { public void analyze( Properties properties, File outputDirectory ) { properties.put( "measuredTime", calculateMean( measuredTimes ) ); - measuredTimePerQueryType.forEach( ( templateId, time ) -> { - calculateResults( queryTypes, properties, templateId, time ); - } ); + measuredTimePerQueryType.forEach( ( templateId, time ) -> calculateResults( queryTypes, properties, templateId, time ) ); properties.put( "queryTypes_maxId", queryTypes.size() ); } diff --git a/src/main/java/org/polypheny/simpleclient/scenario/oltpbench/AbstractOltpBenchScenario.java b/src/main/java/org/polypheny/simpleclient/scenario/oltpbench/AbstractOltpBenchScenario.java index 339c5e1..c1abba8 100644 --- a/src/main/java/org/polypheny/simpleclient/scenario/oltpbench/AbstractOltpBenchScenario.java +++ b/src/main/java/org/polypheny/simpleclient/scenario/oltpbench/AbstractOltpBenchScenario.java @@ -55,8 +55,8 @@ public abstract class AbstractOltpBenchScenario extends Scenario { public AbstractOltpBenchScenario( ExecutorFactory executorFactory, AbstractOltpBenchConfig config, boolean dumpQueryList, QueryMode queryMode ) { super( executorFactory, true, dumpQueryList, queryMode ); this.config = config; - if ( executorFactory instanceof OltpBenchExecutorFactory ) { - this.executorFactory = (OltpBenchExecutorFactory) executorFactory; + if ( executorFactory instanceof OltpBenchExecutorFactory oltpBenchExecutorFactory ) { + this.executorFactory = oltpBenchExecutorFactory; } else { throw new RuntimeException( "Unsupported executor factory: " + executorFactory.getClass().getName() ); } diff --git a/src/main/java/org/polypheny/simpleclient/scenario/oltpbench/tpcc/Tpcc.java b/src/main/java/org/polypheny/simpleclient/scenario/oltpbench/tpcc/Tpcc.java index 5c7f262..b72bc23 100644 --- a/src/main/java/org/polypheny/simpleclient/scenario/oltpbench/tpcc/Tpcc.java +++ b/src/main/java/org/polypheny/simpleclient/scenario/oltpbench/tpcc/Tpcc.java @@ -47,7 +47,7 @@ public Tpcc( ExecutorFactory executorFactory, TpccConfig config, boolean dumpQue @Override - protected void preSchemaCreationTasks( DatabaseInstance databaseInstance, ExecutorFactory executorFactory ) throws ExecutorException { + protected void preSchemaCreationTasks( DatabaseInstance databaseInstance, ExecutorFactory executorFactory ) { Executor executor = executorFactory.createExecutorInstance(); try { // Set table placement strategy diff --git a/src/main/java/org/polypheny/simpleclient/scenario/oltpbench/ycsb/Ycsb.java b/src/main/java/org/polypheny/simpleclient/scenario/oltpbench/ycsb/Ycsb.java index 92cad5b..a57f9a3 100644 --- a/src/main/java/org/polypheny/simpleclient/scenario/oltpbench/ycsb/Ycsb.java +++ b/src/main/java/org/polypheny/simpleclient/scenario/oltpbench/ycsb/Ycsb.java @@ -47,7 +47,7 @@ public Ycsb( ExecutorFactory executorFactory, YcsbConfig config, boolean dumpQue @Override - protected void preSchemaCreationTasks( DatabaseInstance databaseInstance, ExecutorFactory executorFactory ) throws ExecutorException { + protected void preSchemaCreationTasks( DatabaseInstance databaseInstance, ExecutorFactory executorFactory ) { Executor executor = executorFactory.createExecutorInstance(); try { // Set table placement strategy From d1a6813c003774cdc612b14e15a20e9da4155f3c Mon Sep 17 00:00:00 2001 From: Martin Vahlensieck Date: Mon, 10 Feb 2025 12:26:28 +0100 Subject: [PATCH 3/5] Remove superfluous final --- .../java/org/polypheny/simpleclient/scenario/coms/Coms.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/org/polypheny/simpleclient/scenario/coms/Coms.java b/src/main/java/org/polypheny/simpleclient/scenario/coms/Coms.java index 7e6fff7..7aecaaa 100644 --- a/src/main/java/org/polypheny/simpleclient/scenario/coms/Coms.java +++ b/src/main/java/org/polypheny/simpleclient/scenario/coms/Coms.java @@ -241,7 +241,7 @@ private List getRelQueries( List queries, Function threadDistribution, List... queryLists ) { + private void startEvaluation( ProgressReporter progressReporter, CsvWriter csvWriter, int numberOfThreads, List threadDistribution, List... queryLists ) { log.info( "Executing benchmark..." ); if ( threadDistribution.size() != queryLists.length ) { throw new RuntimeException( "ThreadDistribution needs to define an number for each data model" ); @@ -323,7 +323,7 @@ private void collectResultsOfThreads( ArrayList threads ) { @SafeVarargs - private final List randomlyMergeInOrder( final List... lists ) { + private List randomlyMergeInOrder( final List... lists ) { List merged = new ArrayList<>(); List> bucket = new ArrayList<>( Arrays.asList( lists ) ); From af7b8bf6f4dbac0db85ff613ff3e0751899a7b0d Mon Sep 17 00:00:00 2001 From: Martin Vahlensieck Date: Thu, 2 Jan 2025 11:30:24 +0100 Subject: [PATCH 4/5] Use records --- .../executor/PolyphenyDbExecutor.java | 36 ++----------------- .../simpleclient/main/ChronosAgent.java | 26 +++++++------- 2 files changed, 15 insertions(+), 47 deletions(-) diff --git a/src/main/java/org/polypheny/simpleclient/executor/PolyphenyDbExecutor.java b/src/main/java/org/polypheny/simpleclient/executor/PolyphenyDbExecutor.java index 93acb7d..96e26a8 100644 --- a/src/main/java/org/polypheny/simpleclient/executor/PolyphenyDbExecutor.java +++ b/src/main/java/org/polypheny/simpleclient/executor/PolyphenyDbExecutor.java @@ -40,8 +40,6 @@ import kong.unirest.core.Unirest; import kong.unirest.core.json.JSONArray; import kong.unirest.core.json.JSONObject; -import lombok.Data; -import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.extern.slf4j.Slf4j; import org.polypheny.control.client.PolyphenyControlConnector; @@ -790,45 +788,15 @@ public void terminate() { } - @Data - public static class PolyphenyStatus { - - protected final long currentMemory; - protected final int numOfActiveTrx; - protected final int monitoringQueueSize; + public record PolyphenyStatus( long currentMemory, int numOfActiveTrx, int monitoringQueueSize ) { } - @EqualsAndHashCode(callSuper = true) - @Getter - public static class PolyphenyFullStatus extends PolyphenyStatus { - - PolyphenyFullStatus( String uui, String version, String hash, long currentMemory, long trxCount, int numOfActiveTrx, int implementationCacheSize, int queryPlanCacheSize, int routingPlanCacheSize, int monitoringQueueSize ) { - super( currentMemory, numOfActiveTrx, monitoringQueueSize ); - this.uuid = uui; - this.version = version; - this.hash = hash; - this.trxCount = trxCount; - this.implementationCacheSize = implementationCacheSize; - this.queryPlanCacheSize = queryPlanCacheSize; - this.routingPlanCacheSize = routingPlanCacheSize; - } - - - private final String uuid; - private final String version; - private final String hash; - - private final long trxCount; - - private final int implementationCacheSize; - private final int queryPlanCacheSize; - private final int routingPlanCacheSize; + public record PolyphenyFullStatus( String uui, String version, String hash, long currentMemory, long trxCount, int numOfActiveTrx, int implementationCacheSize, int queryPlanCacheSize, int routingPlanCacheSize, int monitoringQueueSize ) { } } - } diff --git a/src/main/java/org/polypheny/simpleclient/main/ChronosAgent.java b/src/main/java/org/polypheny/simpleclient/main/ChronosAgent.java index 02956b7..57798c0 100644 --- a/src/main/java/org/polypheny/simpleclient/main/ChronosAgent.java +++ b/src/main/java/org/polypheny/simpleclient/main/ChronosAgent.java @@ -521,9 +521,9 @@ protected Object analyze( ChronosJob chronosJob, final File inputDirectory, fina List numOfActiveTrxReadings = new ArrayList<>( statuses.size() ); List monitoringQueueSizeReadings = new ArrayList<>( statuses.size() ); for ( PolyphenyStatus status : statuses ) { - currentMemoryReadings.add( status.getCurrentMemory() ); - numOfActiveTrxReadings.add( status.getNumOfActiveTrx() ); - monitoringQueueSizeReadings.add( status.getMonitoringQueueSize() ); + currentMemoryReadings.add( status.currentMemory() ); + numOfActiveTrxReadings.add( status.numOfActiveTrx() ); + monitoringQueueSizeReadings.add( status.monitoringQueueSize() ); } properties.put( "pdbStatus_currentMemory", currentMemoryReadings ); properties.put( "pdbStatus_numOfActiveTrx", numOfActiveTrxReadings ); @@ -539,16 +539,16 @@ protected Object analyze( ChronosJob chronosJob, final File inputDirectory, fina // Do a final gathering try { PolyphenyFullStatus status = statusGatherer.gatherFullOnce(); - properties.put( "pdbStatus_uuid", status.getUuid() ); - properties.put( "pdbStatus_version", status.getVersion() ); - properties.put( "pdbStatus_hash", status.getHash() ); - properties.put( "pdbStatus_currentMemory_final", status.getCurrentMemory() ); - properties.put( "pdbStatus_numOfActiveTrx_final", status.getNumOfActiveTrx() ); - properties.put( "pdbStatus_trxCount_final", status.getTrxCount() ); - properties.put( "pdbStatus_implementationCacheSize_final", status.getImplementationCacheSize() ); - properties.put( "pdbStatus_queryPlanCacheSize_final", status.getQueryPlanCacheSize() ); - properties.put( "pdbStatus_routingPlanCacheSize_final", status.getRoutingPlanCacheSize() ); - properties.put( "pdbStatus_monitoringQueueSize_final", status.getMonitoringQueueSize() ); + properties.put( "pdbStatus_uuid", status.uui() ); + properties.put( "pdbStatus_version", status.version() ); + properties.put( "pdbStatus_hash", status.hash() ); + properties.put( "pdbStatus_currentMemory_final", status.currentMemory() ); + properties.put( "pdbStatus_numOfActiveTrx_final", status.numOfActiveTrx() ); + properties.put( "pdbStatus_trxCount_final", status.trxCount() ); + properties.put( "pdbStatus_implementationCacheSize_final", status.implementationCacheSize() ); + properties.put( "pdbStatus_queryPlanCacheSize_final", status.queryPlanCacheSize() ); + properties.put( "pdbStatus_routingPlanCacheSize_final", status.routingPlanCacheSize() ); + properties.put( "pdbStatus_monitoringQueueSize_final", status.monitoringQueueSize() ); } catch ( Exception e ) { log.error( "Unable to gather final status data from Polypheny", e ); } From ca1d10cb1dc98547f547d81b622fac7fc5003751 Mon Sep 17 00:00:00 2001 From: Martin Vahlensieck Date: Tue, 11 Feb 2025 08:43:21 +0100 Subject: [PATCH 5/5] Do not use fully qualified name --- .../java/org/polypheny/simpleclient/query/CottontailQuery.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/org/polypheny/simpleclient/query/CottontailQuery.java b/src/main/java/org/polypheny/simpleclient/query/CottontailQuery.java index 068c171..f831219 100644 --- a/src/main/java/org/polypheny/simpleclient/query/CottontailQuery.java +++ b/src/main/java/org/polypheny/simpleclient/query/CottontailQuery.java @@ -25,7 +25,7 @@ package org.polypheny.simpleclient.query; -public record CottontailQuery( org.polypheny.simpleclient.query.CottontailQuery.QueryType type, Object query ) { +public record CottontailQuery( QueryType type, Object query ) { public enum QueryType {