Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 5 additions & 5 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@
<spring.datasource.hikari.connection-test-query-timeout>2000</spring.datasource.hikari.connection-test-query-timeout>
<spring.datasource.hikari.maximum-pool-size>5</spring.datasource.hikari.maximum-pool-size>
<spring.datasource.hikari.minimum-idle>1</spring.datasource.hikari.minimum-idle>
<spring.datasource.hikari.connection-timeout>5000</spring.datasource.hikari.connection-timeout>
<spring.datasource.hikari.connection-timeout>30000</spring.datasource.hikari.connection-timeout>
<spring.datasource.hikari.register-mbeans>true</spring.datasource.hikari.register-mbeans>
<spring.datasource.hikari.mbean-name>authDataSource</spring.datasource.hikari.mbean-name>

Expand All @@ -140,7 +140,7 @@
<server.ssl.key-store-password></server.ssl.key-store-password>
<server.ssl.key-password></server.ssl.key-password>

<arachne.version>1.14.1-alpha2</arachne.version>
<arachne.version>1.16.0-SNAPSHOT</arachne.version>
<jersey-media-multipart.version>2.25.1</jersey-media-multipart.version>
<execution.invalidation.period>600000</execution.invalidation.period>
<execution.invalidation.maxage.hours>12</execution.invalidation.maxage.hours>
Expand Down Expand Up @@ -495,7 +495,7 @@
<dependency>
<groupId>org.ohdsi.sql</groupId>
<artifactId>SqlRender</artifactId>
<version>1.6.3-SNAPSHOT</version>
<version>1.6.4-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>commons-dbutils</groupId>
Expand All @@ -521,7 +521,7 @@
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<version>3.3.2</version>
<version>3.6</version>
</dependency>
<dependency>
<groupId>org.flywaydb</groupId>
Expand Down Expand Up @@ -634,7 +634,7 @@
<dependency>
<groupId>org.ohdsi</groupId>
<artifactId>circe</artifactId>
<version>1.8.1</version>
<version>1.9.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.ohdsi</groupId>
Expand Down
2 changes: 1 addition & 1 deletion src/main/extras/bigquery/README.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
To build WebAPI with Google BigQuery support do the following:
1. Go to https://cloud.google.com/bigquery/partners/simba-drivers/ and
2. Download the JDBC 4.2 driver
3. Unpack archive and and set the bigquery.classpath property in your settings.xml to the unpacked archive location (ie: C://downloads/bigquery) inside the webapi-bigquery profile.
3. Unpack archive and set the bigquery.classpath property in your settings.xml to the unpacked archive location (ie: C://downloads/bigquery) inside the webapi-bigquery profile.
4. Build WebAPI with webapi-bigquery profile.
* mvn -Pwebapi-postgresql,webapi-bigquery clean package
4 changes: 2 additions & 2 deletions src/main/extras/impala/README.md
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
To build WebAPI with Impala support do the following:
1. Go to the https://www.cloudera.com/downloads/connectors/impala/jdbc/2-5-43.html
2. Register to clouder if you did not registered earlier or sign in to your Cloudera account
2. Register to Cloudera if you did not registered earlier or sign in to your Cloudera account
3. Download the latest Impala JDBC drivers
4. Unpack archive and and set the impala.classpath property in your settings.xml to the unpacked archive location (ie: C://downloads/impalaJDBC) inside the webapi-impala profile.
4. Unpack archive and set the impala.classpath property in your settings.xml to the unpacked archive location (ie: C://downloads/impalaJDBC) inside the webapi-impala profile.
5. Build WebAPI with webapi-impala profile.
* mvn -Pwebapi-postgresql,webapi-impala clean package
10 changes: 9 additions & 1 deletion src/main/java/org/ohdsi/webapi/DataAccessConfig.java
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,15 @@ public DataSource primaryDataSource() {
//note autocommit defaults vary across vendors. use provided @Autowired TransactionTemplate

String[] supportedDrivers;
supportedDrivers = new String[]{"org.postgresql.Driver", "com.microsoft.sqlserver.jdbc.SQLServerDriver", "oracle.jdbc.driver.OracleDriver", "com.amazon.redshift.jdbc.Driver", "com.cloudera.impala.jdbc41.Driver", "net.starschema.clouddb.jdbc.BQDriver", "org.netezza.Driver", "com.simba.googlebigquery.jdbc42.Driver"};
supportedDrivers = new String[]{"org.postgresql.Driver",
"com.microsoft.sqlserver.jdbc.SQLServerDriver",
"oracle.jdbc.driver.OracleDriver",
"com.amazon.redshift.jdbc.Driver",
"com.cloudera.impala.jdbc41.Driver",
"net.starschema.clouddb.jdbc.BQDriver",
"org.netezza.Driver",
"com.simba.googlebigquery.jdbc42.Driver",
"org.apache.hive.jdbc.HiveDriver"};
for (String driverName : supportedDrivers) {
try {
Class.forName(driverName);
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
package org.ohdsi.webapi.cohortcharacterization;

import com.odysseusinc.arachne.commons.types.DBMSType;
import java.util.Objects;
import org.apache.commons.lang3.StringUtils;
import org.ohdsi.circe.helper.ResourceHelper;
import org.ohdsi.sql.SqlTranslate;
import org.ohdsi.webapi.service.SourceService;
Expand Down Expand Up @@ -44,8 +47,12 @@ private Object doTask(JobParameters parameters) {
final Source source = sourceService.findBySourceId(sourceId);
final String resultsQualifier = SourceUtils.getResultsQualifier(source);
final String tempQualifier = SourceUtils.getTempQualifier(source, resultsQualifier);
jdbcTemplate.execute(SqlTranslate.translateSql(sql, source.getSourceDialect(), null, tempQualifier));
String toRemove = SqlTranslate.translateSql(sql, source.getSourceDialect(), null, tempQualifier);

if (Objects.equals(DBMSType.HIVE.getOhdsiDB(), source.getSourceDialect())){
toRemove = StringUtils.remove(toRemove, ';');
}
jdbcTemplate.execute(toRemove);
return null;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ protected String[] prepareQueries(ChunkContext chunkContext, CancelableJdbcTempl
*
* Therefore, there are two ways:
* - either precisely group SQLs into statements so that temp tables aren't re-used in a single statement,
* - or use ‘permenant temporary tables’
* - or use ‘permanent temporary tables’
*
* The second option looks better since such SQL could be exported and executed manually,
* which is not the case with the first option.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -164,11 +164,15 @@ public SimpleJobBuilder buildJobForExecutionEngineBasedAnalysisTasklet(String an
Step waitCallbackStep = stepBuilderFactory.get(analysisTypeName + ".waitForCallback")
.tasklet(callbackTasklet)
.build();

DropCohortTableListener dropCohortTableListener = new DropCohortTableListener(getSourceJdbcTemplate(source),
transactionTemplate, sourceService, sourceAwareSqlRender);

return jobBuilders.get(analysisTypeName)
.start(createAnalysisExecutionStep)
.next(runExecutionStep)
.next(waitCallbackStep)
.listener(dropCohortTableListener)
.listener(new AutoremoveJobListener(jobService));
}
}
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
package org.ohdsi.webapi.executionengine.service;

import com.odysseusinc.arachne.execution_engine_common.util.CommonFileUtils;
import net.lingala.zip4j.core.ZipFile;
import net.lingala.zip4j.ZipFile;
import net.lingala.zip4j.exception.ZipException;
import org.apache.commons.collections.map.HashedMap;
import org.apache.commons.io.FileUtils;
Expand Down Expand Up @@ -156,9 +156,8 @@ private void processArchive(Path zipPath, Map<String, Object> variables) {

// Delete archive volumes
ZipFile zipFile = new ZipFile(zipPath.toFile());
List<String> filenames = zipFile.getSplitZipFiles();
filenames.forEach(filename -> {
File file = new File(filename);
List<File> filenames = zipFile.getSplitZipFiles();
filenames.forEach(file -> {
file.delete();
});

Expand All @@ -170,10 +169,10 @@ private void processArchive(Path zipPath, Map<String, Object> variables) {
}
});
CommonFileUtils.compressAndSplit(temporaryDir, zipPath.toFile(), null);
} catch (IOException e) {
LOGGER.error("File writing error", e);
} catch (ZipException e) {
LOGGER.error("Error unzipping file", e);
} catch (IOException e) {
LOGGER.error("File writing error", e);
} finally {
FileUtils.deleteQuietly(temporaryDir);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ public void runScript(Long executionId, Source source, List<AnalysisFile> files,
} finally {
FileUtils.deleteQuietly(tempDir);
}
}catch (ZipException | IOException e) {
}catch (IOException e) {
log.error("Failed to compress request files", e);
throw new InternalServerErrorException(e);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import com.cosium.spring.data.jpa.entity.graph.domain.EntityGraph;
import com.google.common.base.MoreObjects;
import com.odysseusinc.arachne.commons.types.DBMSType;
import org.hibernate.Hibernate;
import org.ohdsi.circe.helper.ResourceHelper;
import org.ohdsi.sql.SqlRender;
Expand Down Expand Up @@ -324,7 +325,8 @@ public String buildAnalysisSql(Long generationId, PathwayAnalysisEntity pathwayA
"pathway_target_cohort_id",
"max_depth",
"combo_window",
"allow_repeats"
"allow_repeats",
"isHive"
};
String[] values = new String[]{
generationId.toString(),
Expand All @@ -335,7 +337,8 @@ public String buildAnalysisSql(Long generationId, PathwayAnalysisEntity pathwayA
tc.getCohortDefinition().getId().toString(),
pathwayAnalysis.getMaxDepth().toString(),
MoreObjects.firstNonNull(pathwayAnalysis.getCombinationWindow(), 1).toString(),
String.valueOf(pathwayAnalysis.isAllowRepeats())
String.valueOf(pathwayAnalysis.isAllowRepeats()),
String.valueOf(Objects.equals(DBMSType.HIVE.getOhdsiDB(), source.getSourceDialect()))
};

String renderedSql = SqlRender.renderSql(analysisSql, params, values);
Expand Down
21 changes: 18 additions & 3 deletions src/main/java/org/ohdsi/webapi/service/DDLService.java
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,13 @@

import static org.ohdsi.webapi.service.SqlRenderService.translateSQL;

import com.odysseusinc.arachne.commons.types.DBMSType;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
Expand Down Expand Up @@ -76,9 +78,14 @@ public class DDLService {
"/ddl/results/pathway_analysis_stats.sql"
);

private static final String INIT_HERACLES_PERIODS = "/ddl/results/init_heracles_periods.sql";

public static final Collection<String> RESULT_INIT_FILE_PATHS = Arrays.asList(
"/ddl/results/init_heracles_analysis.sql",
"/ddl/results/init_heracles_periods.sql"
"/ddl/results/init_heracles_analysis.sql", INIT_HERACLES_PERIODS
);

public static final Collection<String> HIVE_RESULT_INIT_FILE_PATHS = Arrays.asList(
"/ddl/results/init_hive_heracles_analysis.sql", INIT_HERACLES_PERIODS
);

public static final Collection<String> INIT_CONCEPT_HIERARCHY_FILE_PATHS = Arrays.asList(
Expand Down Expand Up @@ -123,7 +130,15 @@ public String generateResultSQL(
put(TEMP_SCHEMA, oracleTempSchema);
}};

return generateSQL(dialect, params, resultDDLFilePaths, RESULT_INIT_FILE_PATHS, RESULT_INDEX_FILE_PATHS);
return generateSQL(dialect, params, resultDDLFilePaths, getResultInitFilePaths(dialect), RESULT_INDEX_FILE_PATHS);
}

private Collection<String> getResultInitFilePaths(String dialect) {
if (Objects.equals(DBMSType.HIVE.getOhdsiDB(), dialect)) {
return HIVE_RESULT_INIT_FILE_PATHS;
} else {
return RESULT_INIT_FILE_PATHS;
}
}

@GET
Expand Down
10 changes: 6 additions & 4 deletions src/main/java/org/ohdsi/webapi/source/Source.java
Original file line number Diff line number Diff line change
Expand Up @@ -117,12 +117,14 @@ public String getTableQualifier(DaimonType daimonType) {
}

public String getTableQualifierOrNull(DaimonType daimonType) {
for (SourceDaimon sourceDaimon : this.getDaimons()) {
if (sourceDaimon.getDaimonType() == daimonType) {
return sourceDaimon.getTableQualifier();
if (this.getDaimons() != null){
for (SourceDaimon sourceDaimon : this.getDaimons()) {
if (sourceDaimon.getDaimonType() == daimonType) {
return sourceDaimon.getTableQualifier();
}
}
}
return null;
return null;
}

public String getSourceKey() {
Expand Down
2 changes: 2 additions & 0 deletions src/main/resources/ddl/results/heracles_results.sql
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
--HINT PARTITION(cohort_definition_id int)
--HINT BUCKET(analysis_id, 64)
IF OBJECT_ID('@results_schema.heracles_results', 'U') IS NULL
create table @results_schema.heracles_results
(
Expand Down
2 changes: 2 additions & 0 deletions src/main/resources/ddl/results/heracles_results_dist.sql
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
--HINT PARTITION(cohort_definition_id int)
--HINT BUCKET(analysis_id, 64)
IF OBJECT_ID('@results_schema.heracles_results_dist', 'U') IS NULL
create table @results_schema.heracles_results_dist
(
Expand Down
Loading