diff --git a/src/main/java/org/polypheny/simpleclient/scenario/EvaluationThread.java b/src/main/java/org/polypheny/simpleclient/scenario/EvaluationThread.java index d8de9e1..678dc0e 100644 --- a/src/main/java/org/polypheny/simpleclient/scenario/EvaluationThread.java +++ b/src/main/java/org/polypheny/simpleclient/scenario/EvaluationThread.java @@ -37,7 +37,6 @@ import org.polypheny.simpleclient.executor.Executor; import org.polypheny.simpleclient.executor.ExecutorException; import org.polypheny.simpleclient.query.QueryListEntry; -import org.polypheny.simpleclient.scenario.graph.GraphBench.EvaluationThreadMonitor; @Getter @Slf4j diff --git a/src/main/java/org/polypheny/simpleclient/scenario/EvaluationThreadMonitor.java b/src/main/java/org/polypheny/simpleclient/scenario/EvaluationThreadMonitor.java new file mode 100644 index 0000000..79534f8 --- /dev/null +++ b/src/main/java/org/polypheny/simpleclient/scenario/EvaluationThreadMonitor.java @@ -0,0 +1,56 @@ +/* + * The MIT License (MIT) + * + * Copyright (c) 2019-2025 The Polypheny Project + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package org.polypheny.simpleclient.scenario; + +import java.util.List; +import lombok.Getter; + +public final class EvaluationThreadMonitor { + + private final List threads; + @Getter + private Exception exception; + @Getter + private boolean aborted; + + + public EvaluationThreadMonitor( List threads ) { + this.threads = threads; + this.aborted = false; + } + + + public void abortAll() { + this.aborted = true; + threads.forEach( EvaluationThread::abort ); + } + + + public void notifyAboutError( Exception e ) { + exception = e; + abortAll(); + } + +} diff --git a/src/main/java/org/polypheny/simpleclient/scenario/PolyphenyScenario.java b/src/main/java/org/polypheny/simpleclient/scenario/PolyphenyScenario.java new file mode 100644 index 0000000..21c39df --- /dev/null +++ b/src/main/java/org/polypheny/simpleclient/scenario/PolyphenyScenario.java @@ -0,0 +1,149 @@ +/* + * The MIT License (MIT) + * + * Copyright (c) 2019-2025 The Polypheny Project + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package org.polypheny.simpleclient.scenario; + +import java.io.File; +import java.io.FileWriter; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Properties; +import java.util.Random; +import java.util.concurrent.ConcurrentHashMap; +import java.util.function.Function; +import java.util.function.Supplier; +import lombok.extern.slf4j.Slf4j; +import org.polypheny.simpleclient.QueryMode; +import org.polypheny.simpleclient.executor.Executor; +import org.polypheny.simpleclient.executor.JdbcExecutor; +import org.polypheny.simpleclient.main.ProgressReporter; +import org.polypheny.simpleclient.query.Query; +import org.polypheny.simpleclient.query.QueryListEntry; + +@Slf4j +public abstract class PolyphenyScenario extends Scenario { + + protected long executeRuntime; + protected final Map queryTypes; + protected final List measuredTimes = Collections.synchronizedList( new LinkedList<>() ); + protected final Map> measuredTimePerQueryType = new ConcurrentHashMap<>(); + + + public PolyphenyScenario( JdbcExecutor.ExecutorFactory executorFactory, boolean commitAfterEveryQuery, boolean dumpQueryList, QueryMode queryMode ) { + super( executorFactory, commitAfterEveryQuery, dumpQueryList, queryMode ); + queryTypes = new HashMap<>(); + } + + + protected long commonExecute( List queryList, ProgressReporter progressReporter, File outputDirectory, int numberOfThreads, Function toString, Supplier executor, Random random ) { + Collections.shuffle( queryList, random ); + + // This dumps the queries independent of the selected interface + dumpQueryList( outputDirectory, queryList, toString ); + + log.info( "Executing benchmark..." ); + (new Thread( new ProgressReporter.ReportQueryListProgress( queryList, progressReporter ) )).start(); + long startTime = System.nanoTime(); + + List threads = new ArrayList<>(); + for ( int i = 0; i < numberOfThreads; i++ ) { + threads.add( new EvaluationThread( queryList, executor.get(), queryTypes.keySet(), commitAfterEveryQuery ) ); + } + + EvaluationThreadMonitor threadMonitor = new EvaluationThreadMonitor( threads ); + threads.forEach( t -> t.setThreadMonitor( threadMonitor ) ); + + for ( EvaluationThread thread : threads ) { + thread.start(); + } + + for ( EvaluationThread thread : threads ) { + try { + thread.join(); + this.measuredTimes.addAll( thread.getMeasuredTimes() ); + thread.getMeasuredTimePerQueryType().forEach( ( k, v ) -> { + if ( !this.measuredTimePerQueryType.containsKey( k ) ) { + this.measuredTimePerQueryType.put( k, new ArrayList<>() ); + } + this.measuredTimePerQueryType.get( k ).addAll( v ); + } ); + } catch ( InterruptedException e ) { + throw new RuntimeException( "Unexpected interrupt", e ); + } + } + + executeRuntime = System.nanoTime() - startTime; + + for ( EvaluationThread thread : threads ) { + thread.closeExecutor(); + } + + if ( threadMonitor.isAborted() ) { + throw new RuntimeException( "Exception while executing benchmark", threadMonitor.getException() ); + } + + log.info( "run time: {} s", executeRuntime / 1000000000 ); + + return executeRuntime; + } + + + @Override + public void analyze( Properties properties, File outputDirectory ) { + properties.put( "measuredTime", calculateMean( measuredTimes ) ); + + measuredTimePerQueryType.forEach( ( templateId, time ) -> calculateResults( queryTypes, properties, templateId, time ) ); + properties.put( "queryTypes_maxId", queryTypes.size() ); + properties.put( "executeRuntime", executeRuntime / 1000000000.0 ); + properties.put( "numberOfQueries", measuredTimes.size() ); + properties.put( "throughput", measuredTimes.size() / (executeRuntime / 1000000000.0) ); + } + + + private void dumpQueryList( File outputDirectory, List queryList, Function toString ) { + if ( outputDirectory != null && dumpQueryList ) { + log.info( "Dump query list..." ); + try { + FileWriter fw = new FileWriter( outputDirectory.getPath() + File.separator + "queryList" ); + queryList.forEach( query -> { + try { + fw.append( toString.apply( query.query ) ).append( "\n" ); + } catch ( IOException e ) { + log.error( "Error while dumping query list", e ); + } + } ); + fw.close(); + } catch ( IOException e ) { + log.error( "Error while dumping query list", e ); + } + } + + } + +} diff --git a/src/main/java/org/polypheny/simpleclient/scenario/coms/Coms.java b/src/main/java/org/polypheny/simpleclient/scenario/coms/Coms.java index 7aecaaa..b315cd8 100644 --- a/src/main/java/org/polypheny/simpleclient/scenario/coms/Coms.java +++ b/src/main/java/org/polypheny/simpleclient/scenario/coms/Coms.java @@ -33,7 +33,6 @@ import java.util.HashMap; import java.util.LinkedList; import java.util.List; -import java.util.Properties; import java.util.Random; import java.util.concurrent.ConcurrentHashMap; import java.util.function.Function; @@ -53,11 +52,11 @@ import org.polypheny.simpleclient.query.Query; import org.polypheny.simpleclient.query.QueryListEntry; import org.polypheny.simpleclient.scenario.EvaluationThread; -import org.polypheny.simpleclient.scenario.Scenario; -import org.polypheny.simpleclient.scenario.graph.GraphBench.EvaluationThreadMonitor; +import org.polypheny.simpleclient.scenario.EvaluationThreadMonitor; +import org.polypheny.simpleclient.scenario.PolyphenyScenario; @Slf4j -public class Coms extends Scenario { +public class Coms extends PolyphenyScenario { public static final String NAMESPACE = "coms"; public static final double EPSILON = 0.000001; @@ -396,18 +395,6 @@ public void warmUp( ProgressReporter progressReporter ) { } - @Override - public void analyze( Properties properties, File outputDirectory ) { - properties.put( "measuredTime", calculateMean( measuredTimes ) ); - - measuredTimePerQueryType.forEach( ( templateId, time ) -> calculateResults( queryTypes, properties, templateId, time ) ); - properties.put( "queryTypes_maxId", queryTypes.size() ); - properties.put( "executeRuntime", executeRuntime / 1000000000.0 ); - properties.put( "numberOfQueries", measuredTimes.size() ); - properties.put( "throughput", measuredTimes.size() / (executeRuntime / 1000000000.0) ); - } - - @Override public int getNumberOfInsertThreads() { return 0; diff --git a/src/main/java/org/polypheny/simpleclient/scenario/docbench/DocBench.java b/src/main/java/org/polypheny/simpleclient/scenario/docbench/DocBench.java index 285ad1e..c180f06 100644 --- a/src/main/java/org/polypheny/simpleclient/scenario/docbench/DocBench.java +++ b/src/main/java/org/polypheny/simpleclient/scenario/docbench/DocBench.java @@ -25,8 +25,6 @@ package org.polypheny.simpleclient.scenario.docbench; import java.io.File; -import java.io.FileWriter; -import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -37,8 +35,6 @@ import java.util.Random; import java.util.Vector; import java.util.concurrent.ConcurrentHashMap; -import lombok.Getter; -import lombok.Setter; import lombok.extern.slf4j.Slf4j; import org.polypheny.simpleclient.QueryMode; import org.polypheny.simpleclient.executor.Executor; @@ -47,17 +43,18 @@ import org.polypheny.simpleclient.executor.PolyphenyDbExecutor; import org.polypheny.simpleclient.main.CsvWriter; import org.polypheny.simpleclient.main.ProgressReporter; +import org.polypheny.simpleclient.query.Query; import org.polypheny.simpleclient.query.QueryBuilder; import org.polypheny.simpleclient.query.QueryListEntry; import org.polypheny.simpleclient.query.RawQuery; -import org.polypheny.simpleclient.scenario.Scenario; +import org.polypheny.simpleclient.scenario.PolyphenyScenario; import org.polypheny.simpleclient.scenario.docbench.queryBuilder.PutProductQueryBuilder; import org.polypheny.simpleclient.scenario.docbench.queryBuilder.SearchProductQueryBuilder; import org.polypheny.simpleclient.scenario.docbench.queryBuilder.UpdateProductQueryBuilder; @Slf4j -public class DocBench extends Scenario { +public class DocBench extends PolyphenyScenario { private final DocBenchConfig config; private final List measuredTimes; @@ -140,63 +137,7 @@ public long execute( ProgressReporter progressReporter, CsvWriter csvWriter, Fil addNumberOfTimes( queryList, new SearchProductQueryBuilder( random, valuesPool, config ), config.numberOfFindQueries ); addNumberOfTimes( queryList, new UpdateProductQueryBuilder( random, valuesPool, config ), config.numberOfUpdateQueries ); addNumberOfTimes( queryList, new PutProductQueryBuilder( random, valuesPool, config ), config.numberOfPutQueries ); - Collections.shuffle( queryList, random ); - - // This dumps the MQL queries independent of the selected interface - if ( outputDirectory != null && dumpQueryList ) { - log.info( "Dump query list..." ); - try { - FileWriter fw = new FileWriter( outputDirectory.getPath() + File.separator + "queryList" ); - queryList.forEach( query -> { - try { - fw.append( query.query.getMongoQl() ).append( "\n" ); - } catch ( IOException e ) { - log.error( "Error while dumping query list", e ); - } - } ); - fw.close(); - } catch ( IOException e ) { - log.error( "Error while dumping query list", e ); - } - } - - log.info( "Executing benchmark..." ); - (new Thread( new ProgressReporter.ReportQueryListProgress( queryList, progressReporter ) )).start(); - long startTime = System.nanoTime(); - - ArrayList threads = new ArrayList<>(); - for ( int i = 0; i < numberOfThreads; i++ ) { - threads.add( new EvaluationThread( queryList, executorFactory.createExecutorInstance( csvWriter, NAMESPACE ) ) ); - } - - EvaluationThreadMonitor threadMonitor = new EvaluationThreadMonitor( threads ); - threads.forEach( t -> t.setThreadMonitor( threadMonitor ) ); - - for ( EvaluationThread thread : threads ) { - thread.start(); - } - - for ( Thread thread : threads ) { - try { - thread.join(); - } catch ( InterruptedException e ) { - throw new RuntimeException( "Unexpected interrupt", e ); - } - } - - executeRuntime = System.nanoTime() - startTime; - - for ( EvaluationThread thread : threads ) { - thread.closeExecutor(); - } - - if ( threadMonitor.aborted ) { - throw new RuntimeException( "Exception while executing benchmark", threadMonitor.exception ); - } - - log.info( "run time: {} s", executeRuntime / 1000000000 ); - - return executeRuntime; + return commonExecute( queryList, progressReporter, outputDirectory, numberOfThreads, Query::getMongoQl, () -> executorFactory.createExecutorInstance( csvWriter, NAMESPACE ), random ); } @@ -233,134 +174,9 @@ public void warmUp( ProgressReporter progressReporter ) { } - private class EvaluationThread extends Thread { - - private final Executor executor; - private final List theQueryList; - private boolean abort = false; - @Setter - private EvaluationThreadMonitor threadMonitor; - - - EvaluationThread( List queryList, Executor executor ) { - super( "EvaluationThread" ); - this.executor = executor; - theQueryList = queryList; - } - - - @Override - public void run() { - long measuredTimeStart; - long measuredTime; - QueryListEntry queryListEntry; - - while ( !theQueryList.isEmpty() && !abort ) { - measuredTimeStart = System.nanoTime(); - try { - queryListEntry = theQueryList.removeFirst(); - } catch ( IndexOutOfBoundsException e ) { // This is neither nice nor efficient... - // This can happen due to concurrency if two threads enter the while-loop and there is only one thread left - // Simply leaf the loop - break; - } - try { - executor.executeQuery( queryListEntry.query ); - } catch ( ExecutorException e ) { - log.error( "Caught exception while executing queries", e ); - threadMonitor.notifyAboutError( e ); - try { - executor.executeRollback(); - } catch ( ExecutorException ex ) { - log.error( "Error while rollback", e ); - } - throw new RuntimeException( e ); - } - measuredTime = System.nanoTime() - measuredTimeStart; - measuredTimes.add( measuredTime ); - measuredTimePerQueryType.get( queryListEntry.templateId ).add( measuredTime ); - if ( commitAfterEveryQuery ) { - try { - executor.executeCommit(); - } catch ( ExecutorException e ) { - log.error( "Caught exception while committing", e ); - threadMonitor.notifyAboutError( e ); - try { - executor.executeRollback(); - } catch ( ExecutorException ex ) { - log.error( "Error while rollback", e ); - } - throw new RuntimeException( e ); - } - } - } - - try { - executor.executeCommit(); - } catch ( ExecutorException e ) { - log.error( "Caught exception while committing", e ); - threadMonitor.notifyAboutError( e ); - try { - executor.executeRollback(); - } catch ( ExecutorException ex ) { - log.error( "Error while rollback", e ); - } - throw new RuntimeException( e ); - } - - executor.flushCsvWriter(); - } - - - public void abort() { - this.abort = true; - } - - - public void closeExecutor() { - commitAndCloseExecutor( executor ); - } - - } - - - private class EvaluationThreadMonitor { - - private final List threads; - @Getter - private Exception exception; - @Getter - private boolean aborted; - - - public EvaluationThreadMonitor( List threads ) { - this.threads = threads; - this.aborted = false; - } - - - public void abortAll() { - this.aborted = true; - threads.forEach( EvaluationThread::abort ); - } - - - public void notifyAboutError( Exception e ) { - exception = e; - abortAll(); - } - - } - - @Override public void analyze( Properties properties, File outputDirectory ) { - properties.put( "measuredTime", calculateMean( measuredTimes ) ); - measuredTimePerQueryType.forEach( ( templateId, time ) -> calculateResults( queryTypes, properties, templateId, time ) ); - properties.put( "queryTypes_maxId", queryTypes.size() ); - properties.put( "executeRuntime", executeRuntime / 1000000000.0 ); - properties.put( "numberOfQueries", measuredTimes.size() ); - properties.put( "throughput", (measuredTimes.size() / (executeRuntime / 1000000000.0)) ); + super.analyze( properties, outputDirectory ); properties.put( "numberOfFindQueries", measuredTimePerQueryType.get( 1 ).size() ); properties.put( "numberOfUpdateQueries", measuredTimePerQueryType.get( 2 ).size() ); properties.put( "numberOfPutQueries", measuredTimePerQueryType.get( 3 ).size() ); diff --git a/src/main/java/org/polypheny/simpleclient/scenario/gavel/Gavel.java b/src/main/java/org/polypheny/simpleclient/scenario/gavel/Gavel.java index 78d31c8..9b04d08 100644 --- a/src/main/java/org/polypheny/simpleclient/scenario/gavel/Gavel.java +++ b/src/main/java/org/polypheny/simpleclient/scenario/gavel/Gavel.java @@ -26,7 +26,6 @@ import java.io.BufferedReader; import java.io.File; -import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; @@ -36,11 +35,9 @@ import java.util.LinkedList; import java.util.List; import java.util.Map; -import java.util.Properties; +import java.util.Random; import java.util.Vector; -import java.util.concurrent.ConcurrentHashMap; import lombok.Getter; -import lombok.Setter; import lombok.extern.slf4j.Slf4j; import org.polypheny.simpleclient.QueryMode; import org.polypheny.simpleclient.executor.Executor; @@ -54,10 +51,11 @@ import org.polypheny.simpleclient.executor.PostgresExecutor.PostgresExecutorFactory; import org.polypheny.simpleclient.main.CsvWriter; import org.polypheny.simpleclient.main.ProgressReporter; +import org.polypheny.simpleclient.query.Query; import org.polypheny.simpleclient.query.QueryBuilder; import org.polypheny.simpleclient.query.QueryListEntry; import org.polypheny.simpleclient.query.RawQuery; -import org.polypheny.simpleclient.scenario.Scenario; +import org.polypheny.simpleclient.scenario.PolyphenyScenario; import org.polypheny.simpleclient.scenario.gavel.queryBuilder.ChangePasswordOfRandomUser; import org.polypheny.simpleclient.scenario.gavel.queryBuilder.ChangeRandomAuction; import org.polypheny.simpleclient.scenario.gavel.queryBuilder.CountAuction; @@ -82,22 +80,14 @@ @Slf4j -public class Gavel extends Scenario { +public class Gavel extends PolyphenyScenario { private final GavelConfig config; - private final List measuredTimes; - private long executeRuntime; - private final Map queryTypes; - private final Map> measuredTimePerQueryType; public Gavel( JdbcExecutor.ExecutorFactory executorFactory, GavelConfig config, boolean commitAfterEveryQuery, boolean dumpQueryList, QueryMode queryMode ) { super( executorFactory, commitAfterEveryQuery, dumpQueryList, queryMode ); this.config = config; - measuredTimes = Collections.synchronizedList( new LinkedList<>() ); - - queryTypes = new HashMap<>(); - measuredTimePerQueryType = new ConcurrentHashMap<>(); } @@ -130,63 +120,7 @@ public long execute( ProgressReporter progressReporter, CsvWriter csvWriter, Fil addNumberOfTimes( queryList, new SelectTopHundredSellerByNumberOfAuctions( queryMode ), config.totalNumOfTopHundredSellerByNumberOfAuctionsQueries ); addNumberOfTimes( queryList, new SelectPriceBetweenAndNotInCategory( queryMode ), config.totalNumOfPriceBetweenAndNotInCategoryQueries ); - Collections.shuffle( queryList ); - - // This dumps the sql queries independent of the selected interface - if ( outputDirectory != null && dumpQueryList ) { - log.info( "Dump query list..." ); - try { - FileWriter fw = new FileWriter( outputDirectory.getPath() + File.separator + "queryList" ); - queryList.forEach( query -> { - try { - fw.append( query.query.getSql() ).append( "\n" ); - } catch ( IOException e ) { - log.error( "Error while dumping query list", e ); - } - } ); - fw.close(); - } catch ( IOException e ) { - log.error( "Error while dumping query list", e ); - } - } - - log.info( "Executing benchmark..." ); - (new Thread( new ProgressReporter.ReportQueryListProgress( queryList, progressReporter ) )).start(); - long startTime = System.nanoTime(); - - ArrayList threads = new ArrayList<>(); - for ( int i = 0; i < numberOfThreads; i++ ) { - threads.add( new EvaluationThread( queryList, executorFactory.createExecutorInstance( csvWriter ) ) ); - } - - EvaluationThreadMonitor threadMonitor = new EvaluationThreadMonitor( threads ); - threads.forEach( t -> t.setThreadMonitor( threadMonitor ) ); - - for ( EvaluationThread thread : threads ) { - thread.start(); - } - - for ( Thread thread : threads ) { - try { - thread.join(); - } catch ( InterruptedException e ) { - throw new RuntimeException( "Unexpected interrupt", e ); - } - } - - executeRuntime = System.nanoTime() - startTime; - - for ( EvaluationThread thread : threads ) { - thread.closeExecutor(); - } - - if ( threadMonitor.aborted ) { - throw new RuntimeException( "Exception while executing benchmark", threadMonitor.exception ); - } - - log.info( "run time: {} s", executeRuntime / 1000000000 ); - - return executeRuntime; + return commonExecute( queryList, progressReporter, outputDirectory, numberOfThreads, Query::getSql, () -> executorFactory.createExecutorInstance( csvWriter ), new Random() ); } @@ -291,126 +225,6 @@ public void warmUp( ProgressReporter progressReporter ) { } - private class EvaluationThread extends Thread { - - private final Executor executor; - private final List theQueryList; - private boolean abort = false; - @Setter - private EvaluationThreadMonitor threadMonitor; - - - EvaluationThread( List queryList, Executor executor ) { - super( "EvaluationThread" ); - this.executor = executor; - theQueryList = queryList; - } - - - @Override - public void run() { - long measuredTimeStart; - long measuredTime; - QueryListEntry queryListEntry; - - while ( !theQueryList.isEmpty() && !abort ) { - measuredTimeStart = System.nanoTime(); - try { - queryListEntry = theQueryList.removeFirst(); - } catch ( IndexOutOfBoundsException e ) { // This is neither nice nor efficient... - // This can happen due to concurrency if two threads enter the while-loop and there is only one thread left - // Simply leaf the loop - break; - } - try { - executor.executeQuery( queryListEntry.query ); - } catch ( ExecutorException e ) { - log.error( "Caught exception while executing the following query: {}", queryListEntry.query.getClass().getName(), e ); - threadMonitor.notifyAboutError( e ); - try { - executor.executeRollback(); - } catch ( ExecutorException ex ) { - log.error( "Error while rollback", e ); - } - throw new RuntimeException( e ); - } - measuredTime = System.nanoTime() - measuredTimeStart; - measuredTimes.add( measuredTime ); - measuredTimePerQueryType.get( queryListEntry.templateId ).add( measuredTime ); - if ( commitAfterEveryQuery ) { - try { - executor.executeCommit(); - } catch ( ExecutorException e ) { - log.error( "Caught exception while committing", e ); - threadMonitor.notifyAboutError( e ); - try { - executor.executeRollback(); - } catch ( ExecutorException ex ) { - log.error( "Error while rollback", e ); - } - throw new RuntimeException( e ); - } - } - } - - try { - executor.executeCommit(); - } catch ( ExecutorException e ) { - log.error( "Caught exception while committing", e ); - threadMonitor.notifyAboutError( e ); - try { - executor.executeRollback(); - } catch ( ExecutorException ex ) { - log.error( "Error while rollback", e ); - } - throw new RuntimeException( e ); - } - - executor.flushCsvWriter(); - } - - - public void abort() { - this.abort = true; - } - - - public void closeExecutor() { - commitAndCloseExecutor( executor ); - } - - } - - - private class EvaluationThreadMonitor { - - private final List threads; - @Getter - private Exception exception; - @Getter - private boolean aborted; - - - public EvaluationThreadMonitor( List threads ) { - this.threads = threads; - this.aborted = false; - } - - - public void abortAll() { - this.aborted = true; - threads.forEach( EvaluationThread::abort ); - } - - - public void notifyAboutError( Exception e ) { - exception = e; - abortAll(); - } - - } - - private long countNumberOfRecords( Executor executor, QueryBuilder queryBuilder ) throws ExecutorException { return executor.executeQueryAndGetNumber( queryBuilder.getNewQuery() ); } @@ -691,18 +505,6 @@ public void notifyAboutError( Exception e ) { } - @Override - public void analyze( Properties properties, File outputDirectory ) { - properties.put( "measuredTime", calculateMean( measuredTimes ) ); - - measuredTimePerQueryType.forEach( ( templateId, time ) -> calculateResults( queryTypes, properties, templateId, time ) ); - properties.put( "queryTypes_maxId", queryTypes.size() ); - properties.put( "executeRuntime", executeRuntime / 1000000000.0 ); - properties.put( "numberOfQueries", measuredTimes.size() ); - properties.put( "throughput", measuredTimes.size() / (executeRuntime / 1000000000.0) ); - } - - @Override public int getNumberOfInsertThreads() { return config.numberOfUserGenerationThreads + config.numberOfAuctionGenerationThreads; diff --git a/src/main/java/org/polypheny/simpleclient/scenario/graph/GraphBench.java b/src/main/java/org/polypheny/simpleclient/scenario/graph/GraphBench.java index 43ecb80..a1a5056 100644 --- a/src/main/java/org/polypheny/simpleclient/scenario/graph/GraphBench.java +++ b/src/main/java/org/polypheny/simpleclient/scenario/graph/GraphBench.java @@ -25,20 +25,15 @@ package org.polypheny.simpleclient.scenario.graph; import java.io.File; -import java.io.FileWriter; -import java.io.IOException; -import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; -import java.util.Properties; import java.util.Random; import java.util.Vector; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicLong; -import lombok.Getter; import lombok.extern.slf4j.Slf4j; import org.polypheny.simpleclient.QueryMode; import org.polypheny.simpleclient.executor.Executor; @@ -49,10 +44,10 @@ import org.polypheny.simpleclient.executor.PolyphenyDbExecutor; import org.polypheny.simpleclient.main.CsvWriter; import org.polypheny.simpleclient.main.ProgressReporter; +import org.polypheny.simpleclient.query.Query; import org.polypheny.simpleclient.query.QueryBuilder; import org.polypheny.simpleclient.query.QueryListEntry; -import org.polypheny.simpleclient.scenario.EvaluationThread; -import org.polypheny.simpleclient.scenario.Scenario; +import org.polypheny.simpleclient.scenario.PolyphenyScenario; import org.polypheny.simpleclient.scenario.graph.queryBuilder.CountNodePropertyBuilder; import org.polypheny.simpleclient.scenario.graph.queryBuilder.CreateGraphDatabase; import org.polypheny.simpleclient.scenario.graph.queryBuilder.DeleteNodeBuilder; @@ -67,7 +62,7 @@ @Slf4j -public class GraphBench extends Scenario { +public class GraphBench extends PolyphenyScenario { public static final String GRAPH_NAMESPACE = "test"; public static boolean EXPECTED_RESULT = true; @@ -161,70 +156,7 @@ public long execute( ProgressReporter progressReporter, CsvWriter csvWriter, Fil addNumberOfTimes( queryList, new RelatedInsertBuilder( config ), config.numberOfInsertQueries ); addNumberOfTimes( queryList, new DeleteNodeBuilder( config ), config.numberOfDeleteQueries ); - Collections.shuffle( queryList, new Random( config.seed ) ); - - // This dumps the cypher queries independent of the selected interface - if ( outputDirectory != null && dumpQueryList ) { - log.info( "Dump query list..." ); - try { - FileWriter fw = new FileWriter( outputDirectory.getPath() + File.separator + "queryList" ); - queryList.forEach( query -> { - try { - fw.append( query.query.getCypher() ).append( "\n" ); - } catch ( IOException e ) { - log.error( "Error while dumping query list", e ); - } - } ); - fw.close(); - } catch ( IOException e ) { - log.error( "Error while dumping query list", e ); - } - } - - log.info( "Executing benchmark..." ); - (new Thread( new ProgressReporter.ReportQueryListProgress( queryList, progressReporter ) )).start(); - long startTime = System.nanoTime(); - - ArrayList threads = new ArrayList<>(); - for ( int i = 0; i < numberOfThreads; i++ ) { - threads.add( new EvaluationThread( queryList, executorFactory.createExecutorInstance( csvWriter, GRAPH_NAMESPACE ), queryTypes.keySet(), commitAfterEveryQuery ) ); - } - - EvaluationThreadMonitor threadMonitor = new EvaluationThreadMonitor( threads ); - threads.forEach( t -> t.setThreadMonitor( threadMonitor ) ); - - for ( EvaluationThread thread : threads ) { - thread.start(); - } - - for ( EvaluationThread thread : threads ) { - try { - thread.join(); - this.measuredTimes.addAll( thread.getMeasuredTimes() ); - thread.getMeasuredTimePerQueryType().forEach( ( k, v ) -> { - if ( !this.measuredTimePerQueryType.containsKey( k ) ) { - this.measuredTimePerQueryType.put( k, new ArrayList<>() ); - } - this.measuredTimePerQueryType.get( k ).addAll( v ); - } ); - } catch ( InterruptedException e ) { - throw new RuntimeException( "Unexpected interrupt", e ); - } - } - - executeRuntime = System.nanoTime() - startTime; - - for ( EvaluationThread thread : threads ) { - thread.closeExecutor(); - } - - if ( threadMonitor.aborted ) { - throw new RuntimeException( "Exception while executing benchmark", threadMonitor.exception ); - } - - log.info( "run time: {} s", executeRuntime / 1000000000 ); - - return executeRuntime; + return commonExecute( queryList, progressReporter, outputDirectory, numberOfThreads, Query::getCypher, () -> executorFactory.createExecutorInstance( csvWriter, GRAPH_NAMESPACE ), new Random( config.seed ) ); } @@ -292,46 +224,6 @@ public void warmUp( ProgressReporter progressReporter ) { } - public static class EvaluationThreadMonitor { - - private final List threads; - @Getter - private Exception exception; - @Getter - private boolean aborted; - - - public EvaluationThreadMonitor( List threads ) { - this.threads = threads; - this.aborted = false; - } - - - public void abortAll() { - this.aborted = true; - threads.forEach( EvaluationThread::abort ); - } - - - public void notifyAboutError( Exception e ) { - exception = e; - abortAll(); - } - - } - - - @Override - public void analyze( Properties properties, File outputDirectory ) { - properties.put( "measuredTime", calculateMean( measuredTimes ) ); - measuredTimePerQueryType.forEach( ( templateId, time ) -> calculateResults( queryTypes, properties, templateId, time ) ); - properties.put( "queryTypes_maxId", queryTypes.size() ); - properties.put( "executeRuntime", executeRuntime / 1000000000.0 ); - properties.put( "numberOfQueries", measuredTimes.size() ); - properties.put( "throughput", measuredTimes.size() / (executeRuntime / 1000000000.0) ); - } - - @Override public int getNumberOfInsertThreads() { return 1; diff --git a/src/main/java/org/polypheny/simpleclient/scenario/knnbench/KnnBench.java b/src/main/java/org/polypheny/simpleclient/scenario/knnbench/KnnBench.java index cff89c5..555d0dd 100644 --- a/src/main/java/org/polypheny/simpleclient/scenario/knnbench/KnnBench.java +++ b/src/main/java/org/polypheny/simpleclient/scenario/knnbench/KnnBench.java @@ -26,19 +26,14 @@ package org.polypheny.simpleclient.scenario.knnbench; import java.io.File; -import java.io.FileWriter; -import java.io.IOException; -import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; -import java.util.Properties; +import java.util.Random; import java.util.Vector; import java.util.concurrent.ConcurrentHashMap; -import lombok.Getter; -import lombok.Setter; import lombok.extern.slf4j.Slf4j; import org.polypheny.simpleclient.QueryMode; import org.polypheny.simpleclient.executor.Executor; @@ -46,9 +41,10 @@ import org.polypheny.simpleclient.executor.ExecutorException; import org.polypheny.simpleclient.main.CsvWriter; import org.polypheny.simpleclient.main.ProgressReporter; +import org.polypheny.simpleclient.query.Query; import org.polypheny.simpleclient.query.QueryBuilder; import org.polypheny.simpleclient.query.QueryListEntry; -import org.polypheny.simpleclient.scenario.Scenario; +import org.polypheny.simpleclient.scenario.PolyphenyScenario; import org.polypheny.simpleclient.scenario.knnbench.queryBuilder.CreateIntFeature; import org.polypheny.simpleclient.scenario.knnbench.queryBuilder.CreateMetadata; import org.polypheny.simpleclient.scenario.knnbench.queryBuilder.CreateRealFeature; @@ -61,7 +57,7 @@ @Slf4j -public class KnnBench extends Scenario { +public class KnnBench extends PolyphenyScenario { private final KnnBenchConfig config; @@ -141,63 +137,7 @@ public long execute( ProgressReporter progressReporter, CsvWriter csvWriter, Fil addNumberOfTimes( queryList, new MetadataKnnIntFeature( config.randomSeedQuery, config.dimensionFeatureVectors, config.limitKnnQueries, config.distanceNorm ), config.numberOfMetadataKnnIntFeatureQueries ); addNumberOfTimes( queryList, new MetadataKnnRealFeature( config.randomSeedQuery, config.dimensionFeatureVectors, config.limitKnnQueries, config.distanceNorm ), config.numberOfMetadataKnnRealFeatureQueries ); - Collections.shuffle( queryList ); - - // This dumps the sql queries independent of the selected interface - if ( outputDirectory != null && dumpQueryList ) { - log.info( "Dump query list..." ); - try { - FileWriter fw = new FileWriter( outputDirectory.getPath() + File.separator + "queryList" ); - queryList.forEach( query -> { - try { - fw.append( query.query.getSql() ).append( "\n" ); - } catch ( IOException e ) { - log.error( "Error while dumping query list", e ); - } - } ); - fw.close(); - } catch ( IOException e ) { - log.error( "Error while dumping query list", e ); - } - } - - log.info( "Executing benchmark..." ); - (new Thread( new ProgressReporter.ReportQueryListProgress( queryList, progressReporter ) )).start(); - long startTime = System.nanoTime(); - - ArrayList threads = new ArrayList<>(); - for ( int i = 0; i < numberOfThreads; i++ ) { - threads.add( new EvaluationThread( queryList, executorFactory.createExecutorInstance( csvWriter ) ) ); - } - - EvaluationThreadMonitor threadMonitor = new EvaluationThreadMonitor( threads ); - threads.forEach( t -> t.setThreadMonitor( threadMonitor ) ); - - for ( EvaluationThread thread : threads ) { - thread.start(); - } - - for ( Thread thread : threads ) { - try { - thread.join(); - } catch ( InterruptedException e ) { - throw new RuntimeException( "Unexpected interrupt", e ); - } - } - - executeRuntime = System.nanoTime() - startTime; - - for ( EvaluationThread thread : threads ) { - thread.closeExecutor(); - } - - if ( threadMonitor.aborted ) { - throw new RuntimeException( "Exception while executing benchmark", threadMonitor.exception ); - } - - log.info( "run time: {} s", executeRuntime / 1000000000 ); - - return executeRuntime; + return commonExecute( queryList, progressReporter, outputDirectory, numberOfThreads, Query::getSql, () -> executorFactory.createExecutorInstance( csvWriter ), new Random() ); } @@ -254,138 +194,6 @@ public void warmUp( ProgressReporter progressReporter ) { } - private class EvaluationThread extends Thread { - - private final Executor executor; - private final List theQueryList; - private boolean abort = false; - @Setter - private EvaluationThreadMonitor threadMonitor; - - - EvaluationThread( List queryList, Executor executor ) { - super( "EvaluationThread" ); - this.executor = executor; - theQueryList = queryList; - } - - - @Override - public void run() { - long measuredTimeStart; - long measuredTime; - QueryListEntry queryListEntry; - - while ( !theQueryList.isEmpty() && !abort ) { - measuredTimeStart = System.nanoTime(); - try { - queryListEntry = theQueryList.removeFirst(); - } catch ( IndexOutOfBoundsException e ) { // This is neither nice nor efficient... - // This can happen due to concurrency if two threads enter the while-loop and there is only one thread left - // Simply leaf the loop - break; - } - try { - executor.executeQuery( queryListEntry.query ); - } catch ( ExecutorException e ) { - log.error( "Caught exception while executing queries", e ); - threadMonitor.notifyAboutError( e ); - try { - executor.executeRollback(); - } catch ( ExecutorException ex ) { - log.error( "Error while rollback", e ); - } - throw new RuntimeException( e ); - } - measuredTime = System.nanoTime() - measuredTimeStart; - measuredTimes.add( measuredTime ); - measuredTimePerQueryType.get( queryListEntry.templateId ).add( measuredTime ); - if ( commitAfterEveryQuery ) { - try { - executor.executeCommit(); - } catch ( ExecutorException e ) { - log.error( "Caught exception while committing", e ); - threadMonitor.notifyAboutError( e ); - try { - executor.executeRollback(); - } catch ( ExecutorException ex ) { - log.error( "Error while rollback", e ); - } - throw new RuntimeException( e ); - } - } - } - - try { - executor.executeCommit(); - } catch ( ExecutorException e ) { - log.error( "Caught exception while committing", e ); - threadMonitor.notifyAboutError( e ); - try { - executor.executeRollback(); - } catch ( ExecutorException ex ) { - log.error( "Error while rollback", e ); - } - throw new RuntimeException( e ); - } - - executor.flushCsvWriter(); - } - - - public void abort() { - this.abort = true; - } - - - public void closeExecutor() { - commitAndCloseExecutor( executor ); - } - - } - - - private class EvaluationThreadMonitor { - - private final List threads; - @Getter - private Exception exception; - @Getter - private boolean aborted; - - - public EvaluationThreadMonitor( List threads ) { - this.threads = threads; - this.aborted = false; - } - - - public void abortAll() { - this.aborted = true; - threads.forEach( EvaluationThread::abort ); - } - - - public void notifyAboutError( Exception e ) { - exception = e; - abortAll(); - } - - } - - - @Override - public void analyze( Properties properties, File outputDirectory ) { - properties.put( "measuredTime", calculateMean( measuredTimes ) ); - - measuredTimePerQueryType.forEach( ( templateId, time ) -> calculateResults( queryTypes, properties, templateId, time ) ); - properties.put( "queryTypes_maxId", queryTypes.size() ); - properties.put( "executeRuntime", executeRuntime / 1000000000.0 ); - properties.put( "numberOfQueries", measuredTimes.size() ); - properties.put( "throughput", measuredTimes.size() / (executeRuntime / 1000000000.0) ); - } - - @Override public int getNumberOfInsertThreads() { return 1; diff --git a/src/main/java/org/polypheny/simpleclient/scenario/multimedia/MultimediaBench.java b/src/main/java/org/polypheny/simpleclient/scenario/multimedia/MultimediaBench.java index 8c5bcd6..509405b 100644 --- a/src/main/java/org/polypheny/simpleclient/scenario/multimedia/MultimediaBench.java +++ b/src/main/java/org/polypheny/simpleclient/scenario/multimedia/MultimediaBench.java @@ -27,20 +27,15 @@ import java.io.File; -import java.io.FileWriter; -import java.io.IOException; -import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; -import java.util.Properties; +import java.util.Random; import java.util.Vector; import java.util.concurrent.ConcurrentHashMap; import kong.unirest.core.Unirest; -import lombok.Getter; -import lombok.Setter; import lombok.extern.slf4j.Slf4j; import org.polypheny.simpleclient.QueryMode; import org.polypheny.simpleclient.executor.Executor; @@ -48,9 +43,10 @@ import org.polypheny.simpleclient.executor.ExecutorException; import org.polypheny.simpleclient.main.CsvWriter; import org.polypheny.simpleclient.main.ProgressReporter; +import org.polypheny.simpleclient.query.Query; import org.polypheny.simpleclient.query.QueryBuilder; import org.polypheny.simpleclient.query.QueryListEntry; -import org.polypheny.simpleclient.scenario.Scenario; +import org.polypheny.simpleclient.scenario.PolyphenyScenario; import org.polypheny.simpleclient.scenario.multimedia.queryBuilder.CreateTable; import org.polypheny.simpleclient.scenario.multimedia.queryBuilder.DeleteRandomTimeline; import org.polypheny.simpleclient.scenario.multimedia.queryBuilder.InsertRandomTimeline; @@ -62,7 +58,7 @@ @Slf4j -public class MultimediaBench extends Scenario { +public class MultimediaBench extends PolyphenyScenario { private final MultimediaConfig config; @@ -168,7 +164,6 @@ public void generateData( DatabaseInstance databaseInstance, ProgressReporter pr @Override public long execute( ProgressReporter progressReporter, CsvWriter csvWriter, File outputDirectory, int numberOfThreads ) { - log.info( "Preparing query list for the benchmark..." ); List queryList = new Vector<>(); addNumberOfTimes( queryList, new SelectRandomUser( config.numberOfUsers ), config.numberOfSelectUserQueries ); @@ -179,64 +174,7 @@ public long execute( ProgressReporter progressReporter, CsvWriter csvWriter, Fil addNumberOfTimes( queryList, new DeleteRandomTimeline( config.numberOfUsers * config.postsPerUser ), config.numberOfDeleteTimelineQueries ); addNumberOfTimes( queryList, new InsertRandomTimeline( config.numberOfUsers, config.postsPerUser, config.imgSize, config.numberOfFrames, config.fileSizeKB, false ), config.numberOfInsertTimelineQueries ); - Collections.shuffle( queryList ); - - // This dumps the sql queries independent of the selected interface - // always false for the MultimediaBench - if ( outputDirectory != null && dumpQueryList ) { - log.info( "Dump query list..." ); - try { - FileWriter fw = new FileWriter( outputDirectory.getPath() + File.separator + "queryList" ); - queryList.forEach( query -> { - try { - fw.append( query.query.getSql() ).append( "\n" ); - } catch ( IOException e ) { - log.error( "Error while dumping query list", e ); - } - } ); - fw.close(); - } catch ( IOException e ) { - log.error( "Error while dumping query list", e ); - } - } - - log.info( "Executing benchmark..." ); - (new Thread( new ProgressReporter.ReportQueryListProgress( queryList, progressReporter ) )).start(); - long startTime = System.nanoTime(); - - ArrayList threads = new ArrayList<>(); - for ( int i = 0; i < numberOfThreads; i++ ) { - threads.add( new EvaluationThread( queryList, executorFactory.createExecutorInstance( csvWriter ) ) ); - } - - EvaluationThreadMonitor threadMonitor = new EvaluationThreadMonitor( threads ); - threads.forEach( t -> t.setThreadMonitor( threadMonitor ) ); - - for ( EvaluationThread thread : threads ) { - thread.start(); - } - - for ( Thread thread : threads ) { - try { - thread.join(); - } catch ( InterruptedException e ) { - throw new RuntimeException( "Unexpected interrupt", e ); - } - } - - long runTime = System.nanoTime() - startTime; - - for ( EvaluationThread thread : threads ) { - thread.closeExecutor(); - } - - if ( threadMonitor.aborted ) { - throw new RuntimeException( "Exception while executing benchmark", threadMonitor.exception ); - } - - log.info( "run time: {} s", runTime / 1000000000 ); - - return runTime; + return commonExecute( queryList, progressReporter, outputDirectory, numberOfThreads, Query::getSql, () -> executorFactory.createExecutorInstance( csvWriter ), new Random() ); } @@ -272,135 +210,6 @@ public void warmUp( ProgressReporter progressReporter ) { } - private class EvaluationThread extends Thread { - - private final Executor executor; - private final List theQueryList; - private boolean abort = false; - @Setter - private EvaluationThreadMonitor threadMonitor; - - - EvaluationThread( List queryList, Executor executor ) { - super( "EvaluationThread" ); - this.executor = executor; - theQueryList = queryList; - } - - - @Override - public void run() { - long measuredTimeStart; - long measuredTime; - QueryListEntry queryListEntry; - - while ( !theQueryList.isEmpty() && !abort ) { - measuredTimeStart = System.nanoTime(); - try { - queryListEntry = theQueryList.removeFirst(); - } catch ( IndexOutOfBoundsException e ) { // This is neither nice nor efficient... - // This can happen due to concurrency if two threads enter the while-loop and there is only one thread left - // Simply leaf the loop - break; - } - try { - executor.executeQuery( queryListEntry.query ); - } catch ( ExecutorException | RuntimeException e ) { - log.error( "Caught exception while executing queries", e ); - threadMonitor.notifyAboutError( e ); - try { - executor.executeRollback(); - } catch ( ExecutorException ex ) { - log.error( "Error while rollback", e ); - } - throw new RuntimeException( e ); - } - measuredTime = System.nanoTime() - measuredTimeStart; - measuredTimes.add( measuredTime ); - measuredTimePerQueryType.get( queryListEntry.templateId ).add( measuredTime ); - if ( commitAfterEveryQuery ) { - try { - executor.executeCommit(); - } catch ( ExecutorException e ) { - log.error( "Caught exception while committing", e ); - threadMonitor.notifyAboutError( e ); - try { - executor.executeRollback(); - } catch ( ExecutorException ex ) { - log.error( "Error while rollback", e ); - } - throw new RuntimeException( e ); - } - } - } - - try { - executor.executeCommit(); - } catch ( ExecutorException e ) { - log.error( "Caught exception while committing", e ); - threadMonitor.notifyAboutError( e ); - try { - executor.executeRollback(); - } catch ( ExecutorException ex ) { - log.error( "Error while rollback", e ); - } - throw new RuntimeException( e ); - } - - executor.flushCsvWriter(); - } - - - public void abort() { - this.abort = true; - } - - - public void closeExecutor() { - commitAndCloseExecutor( executor ); - } - - } - - - private class EvaluationThreadMonitor { - - private final List threads; - @Getter - private Exception exception; - @Getter - private boolean aborted; - - - public EvaluationThreadMonitor( List threads ) { - this.threads = threads; - this.aborted = false; - } - - - public void abortAll() { - this.aborted = true; - threads.forEach( EvaluationThread::abort ); - } - - - public void notifyAboutError( Exception e ) { - exception = e; - abortAll(); - } - - } - - - @Override - public void analyze( Properties properties, File outputDirectory ) { - properties.put( "measuredTime", calculateMean( measuredTimes ) ); - - measuredTimePerQueryType.forEach( ( templateId, time ) -> calculateResults( queryTypes, properties, templateId, time ) ); - properties.put( "queryTypes_maxId", queryTypes.size() ); - } - - @Override public int getNumberOfInsertThreads() { return 1;