diff --git a/.github/workflows/sonar-build.yml b/.github/workflows/sonar-build.yml
index 8b7d729e..d9b7760c 100644
--- a/.github/workflows/sonar-build.yml
+++ b/.github/workflows/sonar-build.yml
@@ -1,25 +1,25 @@
-name: Analyze project with Sonar Cloud
+name: SonarQube
on:
push:
branches:
- - master
+ - main
- develop
pull_request:
types: [opened, synchronize, reopened]
jobs:
- Analyze-project-with-Sonar-Cloud:
- name: Analyze project with Sonar Cloud
+ build:
+ name: Build and analyze
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis
- - name: Set up JDK 11
+ - name: Set up JDK 17
uses: actions/setup-java@v4
with:
- distribution: 'temurin'
- java-version: 11
- - name: Cache SonarCloud packages
+ java-version: 17
+ distribution: 'temurin' # Alternative distribution options are available.
+ - name: Cache SonarQube packages
uses: actions/cache@v4
with:
path: ~/.sonar/cache
@@ -31,18 +31,7 @@ jobs:
path: ~/.m2
key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }}
restore-keys: ${{ runner.os }}-m2
- - name: Build
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Needed to get PR information, if any
- SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
- run: mvn -B -Pjacoco verify
- - name: Set up JDK 17
- uses: actions/setup-java@v4
- with:
- distribution: 'temurin'
- java-version: 17
- - name: Sonar
+ - name: Build and analyze
env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Needed to get PR information, if any
- SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
- run: mvn -B org.sonarsource.scanner.maven:sonar-maven-plugin:sonar -Dsonar.projectKey=ontimize_ontimize-jee -Dsonar.coverage.jacoco.xmlReportPaths="$GITHUB_WORKSPACE/ontimize-jee-jacoco/target/site/jacoco-aggregate/jacoco.xml"
\ No newline at end of file
+ SONAR_TOKEN: ${{ secrets.SONAR_TOKEN_EE }}
+ run: mvn -B verify org.sonarsource.scanner.maven:sonar-maven-plugin:sonar -Dsonar.projectKey=ontimize_ontimize-jee
\ No newline at end of file
diff --git a/CHANGELOG.md b/CHANGELOG.md
index c3176cba..2ef118ed 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -8,6 +8,30 @@
## [Unreleased]
+## [5.14.0] - 2025-11-03
+### Added ✔️
+- **OpenAI Client**
+
+ An OpenAI client has been implemented with a method to process images. This method accepts a parameter of type ProcessRequest, which contains the following:
+ - The image file to be processed.
+ - An optional prompt to provide additional instructions (the default behavior is to analyze the image and return it in a specific format).
+ - The number of retries for the request.
+ - The output class type (T) expected as a result.
+
+ The client is initialized via a constructor that receives an OpenAiClientConfig object. This configuration object includes:
+ - The user's API key.
+ - The model to be used.
+ - The maximum number of tokens allowed per request.
+ - The temperature setting (which controls how creative the responses should be).
+### Changed
+- **ApplicationContextUtils**: Simplified the getBeanForName method for more streamlined service retrieval.
+- **DefaultDaoExtensionHelper**: Improved property placeholder handling with Java streams for type safety and clarity, addressing SonarQube findings.
+- **Log4j2LoggerHelper**: Clarified variable names, added null checks, enhanced type safety, and improved Javadoc documentation.
+- **LogbackLoggerHelper**: Improved Javadoc documentation and suppress false positives on Sonar.
+- **OntimizeJdbcDaoSupport**: Refactored SQL placeholder and property handling, introduced a helper method, corrected key retrieval logic, and improved type safety using Java streams to resolve SonarQube issues.
+- **OntimizeJdbcDaoSupportTest**: Updated mocks to match the modified method signatures.
+- **SharePreferencesBeanDefinitionParser**: Added null check and refactored engine parsing logic for safer and clearer configuration handling.
+- **XMLClientUtilities**: Refactored string concatenation to use StringBuilder for improved readability and performance.
## [5.13.0] - 2025-09-11
### Added ✔️
* **OntimizeJdbcDaoSupport**: Created executeSQLStatement() to use DDL statements. #175
@@ -67,7 +91,8 @@
* **POM**: Sorted pom alphabetically and sorted, extracted version to properties and put all dependencies into dependency manager.
* **Sonar**: Fix some sonar code smells.
-[unreleased]: https://github.com/ontimize/ontimize-jee/compare/5.13.0...HEAD
+[unreleased]: https://github.com/ontimize/ontimize-jee/compare/5.14.0...HEAD
+[5.14.0]: https://github.com/ontimize/ontimize-jee/compare/5.13.0...5.14.0
[5.13.0]: https://github.com/ontimize/ontimize-jee/compare/5.12.1...5.13.0
[5.12.1]: https://github.com/ontimize/ontimize-jee/compare/5.12.0...5.12.1
[5.12.0]: https://github.com/ontimize/ontimize-jee/compare/5.11.0...5.12.0
diff --git a/ontimize-jee-common/pom.xml b/ontimize-jee-common/pom.xml
index 816e6d8d..3f2cb3d4 100644
--- a/ontimize-jee-common/pom.xml
+++ b/ontimize-jee-common/pom.xml
@@ -4,7 +4,7 @@
com.ontimize.jeeontimize-jee
- 5.13.0
+ 5.14.0ontimize-jee-common
diff --git a/ontimize-jee-common/src/main/java/com/ontimize/jee/common/security/XMLClientUtilities.java b/ontimize-jee-common/src/main/java/com/ontimize/jee/common/security/XMLClientUtilities.java
index 4647ee14..e64ed76c 100644
--- a/ontimize-jee-common/src/main/java/com/ontimize/jee/common/security/XMLClientUtilities.java
+++ b/ontimize-jee-common/src/main/java/com/ontimize/jee/common/security/XMLClientUtilities.java
@@ -696,7 +696,7 @@ private static StringBuffer dom2StringInternal(Node node) {
if (namedNodeAttributes != null) {
for (int i = 0; i < namedNodeAttributes.getLength(); i++) {
Node n = namedNodeAttributes.item(i);
- sbAttributes.append(n.getNodeName() + "=\"" + n.getNodeValue() + "\" ");
+ sbAttributes.append(n.getNodeName()).append("=\"").append(n.getNodeValue()).append("\" ");
}
}
if (node.getNodeType() == Node.ELEMENT_NODE) {
diff --git a/ontimize-jee-server-jdbc/pom.xml b/ontimize-jee-server-jdbc/pom.xml
index 4d42295f..1bc8a38a 100644
--- a/ontimize-jee-server-jdbc/pom.xml
+++ b/ontimize-jee-server-jdbc/pom.xml
@@ -4,7 +4,7 @@
com.ontimize.jeeontimize-jee
- 5.13.0
+ 5.14.0ontimize-jee-server-jdbc
diff --git a/ontimize-jee-server-jdbc/src/main/java/com/ontimize/jee/server/dao/jdbc/OntimizeJdbcDaoSupport.java b/ontimize-jee-server-jdbc/src/main/java/com/ontimize/jee/server/dao/jdbc/OntimizeJdbcDaoSupport.java
index 67e2afec..a56ad74f 100644
--- a/ontimize-jee-server-jdbc/src/main/java/com/ontimize/jee/server/dao/jdbc/OntimizeJdbcDaoSupport.java
+++ b/ontimize-jee-server-jdbc/src/main/java/com/ontimize/jee/server/dao/jdbc/OntimizeJdbcDaoSupport.java
@@ -1,71 +1,9 @@
-/**
- *
- */
package com.ontimize.jee.server.dao.jdbc;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.Reader;
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.SQLWarning;
-import java.sql.Statement;
-import java.sql.Types;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.Enumeration;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Properties;
-
-import javax.sql.DataSource;
-import javax.xml.bind.JAXB;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.beans.BeansException;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.context.ApplicationContext;
-import org.springframework.context.ApplicationContextAware;
-import org.springframework.dao.DataAccessException;
-import org.springframework.dao.DataIntegrityViolationException;
-import org.springframework.dao.DataRetrievalFailureException;
-import org.springframework.dao.InvalidDataAccessApiUsageException;
-import org.springframework.dao.InvalidDataAccessResourceUsageException;
-import org.springframework.jdbc.SQLWarningException;
-import org.springframework.jdbc.core.ArgumentPreparedStatementSetter;
-import org.springframework.jdbc.core.BatchPreparedStatementSetter;
-import org.springframework.jdbc.core.ConnectionCallback;
-import org.springframework.jdbc.core.JdbcTemplate;
-import org.springframework.jdbc.core.PreparedStatementCreator;
-import org.springframework.jdbc.core.SqlParameterValue;
-import org.springframework.jdbc.core.SqlProvider;
-import org.springframework.jdbc.core.SqlTypeValue;
-import org.springframework.jdbc.core.StatementCreatorUtils;
-import org.springframework.jdbc.core.metadata.TableParameterMetaData;
-import org.springframework.jdbc.core.namedparam.SqlParameterSource;
-import org.springframework.jdbc.core.support.JdbcDaoSupport;
-import org.springframework.jdbc.support.GeneratedKeyHolder;
-import org.springframework.jdbc.support.JdbcUtils;
-import org.springframework.jdbc.support.KeyHolder;
-import org.springframework.util.Assert;
-
import com.ontimize.jee.common.db.AdvancedEntityResult;
import com.ontimize.jee.common.db.AdvancedEntityResultMapImpl;
import com.ontimize.jee.common.db.NullValue;
import com.ontimize.jee.common.db.SQLStatementBuilder;
-import com.ontimize.jee.common.db.SQLStatementBuilder.BasicExpression;
-import com.ontimize.jee.common.db.SQLStatementBuilder.BasicField;
-import com.ontimize.jee.common.db.SQLStatementBuilder.ExtendedSQLConditionValuesProcessor;
-import com.ontimize.jee.common.db.SQLStatementBuilder.Operator;
-import com.ontimize.jee.common.db.SQLStatementBuilder.SQLOrder;
-import com.ontimize.jee.common.db.SQLStatementBuilder.SQLStatement;
import com.ontimize.jee.common.db.handler.SQLStatementHandler;
import com.ontimize.jee.common.db.util.DBFunctionName;
import com.ontimize.jee.common.dto.EntityResult;
@@ -74,11 +12,7 @@
import com.ontimize.jee.common.gui.field.MultipleTableAttribute;
import com.ontimize.jee.common.gui.field.ReferenceFieldAttribute;
import com.ontimize.jee.common.naming.I18NNaming;
-import com.ontimize.jee.common.tools.CheckingTools;
-import com.ontimize.jee.common.tools.Chronometer;
-import com.ontimize.jee.common.tools.ObjectTools;
-import com.ontimize.jee.common.tools.Pair;
-import com.ontimize.jee.common.tools.StringTools;
+import com.ontimize.jee.common.tools.*;
import com.ontimize.jee.common.tools.streamfilter.ReplaceTokensFilterReader;
import com.ontimize.jee.server.dao.DaoProperty;
import com.ontimize.jee.server.dao.IOntimizeDaoSupport;
@@ -87,2109 +21,2190 @@
import com.ontimize.jee.server.dao.common.INameConvention;
import com.ontimize.jee.server.dao.common.INameConverter;
import com.ontimize.jee.server.dao.jdbc.extension.IDaoExtensionHelper;
-import com.ontimize.jee.server.dao.jdbc.setup.AmbiguousColumnType;
-import com.ontimize.jee.server.dao.jdbc.setup.FunctionColumnType;
-import com.ontimize.jee.server.dao.jdbc.setup.JdbcEntitySetupType;
-import com.ontimize.jee.server.dao.jdbc.setup.OrderColumnType;
-import com.ontimize.jee.server.dao.jdbc.setup.QueryType;
+import com.ontimize.jee.server.dao.jdbc.setup.*;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.BeansException;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.context.ApplicationContext;
+import org.springframework.context.ApplicationContextAware;
+import org.springframework.dao.*;
+import org.springframework.jdbc.SQLWarningException;
+import org.springframework.jdbc.core.*;
+import org.springframework.jdbc.core.metadata.TableParameterMetaData;
+import org.springframework.jdbc.core.namedparam.SqlParameterSource;
+import org.springframework.jdbc.core.support.JdbcDaoSupport;
+import org.springframework.jdbc.support.GeneratedKeyHolder;
+import org.springframework.jdbc.support.JdbcUtils;
+import org.springframework.jdbc.support.KeyHolder;
+import org.springframework.util.Assert;
+
+import javax.sql.DataSource;
+import javax.xml.bind.JAXB;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.Reader;
+import java.sql.*;
+import java.util.*;
+import java.util.function.Function;
+import java.util.stream.Collectors;
/**
* The Class OntimizeJdbcDaoSupport.
*/
public class OntimizeJdbcDaoSupport extends JdbcDaoSupport implements ApplicationContextAware, IOntimizeDaoSupport {
- /** The logger. */
- protected static final Logger logger = LoggerFactory.getLogger(OntimizeJdbcDaoSupport.class);
-
- /** The Constant PLACEHOLDER_ORDER. */
- protected static final String PLACEHOLDER_ORDER = "#ORDER#";
-
- /** The Constant PLACEHOLDER_ORDER_CONCAT. */
- protected static final String PLACEHOLDER_ORDER_CONCAT = "#ORDER_CONCAT#";
+ /**
+ * The logger.
+ */
+ protected static final Logger logger = LoggerFactory.getLogger(OntimizeJdbcDaoSupport.class);
+
+ /**
+ * The Constant PLACEHOLDER_ORDER.
+ */
+ protected static final String PLACEHOLDER_ORDER = "#ORDER#";
+
+ /**
+ * The Constant PLACEHOLDER_ORDER_CONCAT.
+ */
+ protected static final String PLACEHOLDER_ORDER_CONCAT = "#ORDER_CONCAT#";
+
+ /**
+ * The Constant PLACEHOLDER_WHERE.
+ */
+ protected static final String PLACEHOLDER_WHERE = "#WHERE#";
+
+ /**
+ * The Constant PLACEHOLDER_WHERE_CONCAT.
+ */
+ protected static final String PLACEHOLDER_WHERE_CONCAT = "#WHERE_CONCAT#";
+
+ /**
+ * The Constant PLACEHOLDER_COLUMNS.
+ */
+ protected static final String PLACEHOLDER_COLUMNS = "#COLUMNS#";
+
+ /**
+ * The Constant PLACEHOLDER_SCHEMA.
+ */
+ protected static final String PLACEHOLDER_SCHEMA = "#SCHEMA#";
+
+ /**
+ * Context used to retrieve and manage database metadata.
+ */
+ protected final OntimizeTableMetaDataContext tableMetaDataContext;
+
+ /**
+ * List of columns objects to be used in insert statement.
+ */
+ protected final List declaredColumns = new ArrayList<>();
+ /**
+ * Queries.
+ */
+ protected final Map sqlQueries = new HashMap<>();
+ /**
+ * Has this operation been compiled? Compilation means at least checking that a DataSource or
+ * JdbcTemplate has been provided, but subclasses may also implement their own custom validation.
+ */
+ private boolean compiled = false;
+ private String[] generatedKeyNames = {};
+ /**
+ * The statement builder.
+ */
+ private SQLStatementHandler statementHandler;
+ /**
+ * The bean property converter.
+ */
+ private INameConverter nameConverter;
+ /**
+ * Mandatory delete keys.
+ */
+ private List deleteKeys;
+ /**
+ * Mandatory update keys.
+ */
+ private List updateKeys;
+ /**
+ * The application context.
+ */
+ private ApplicationContext applicationContext;
+
+ /**
+ * Configuration file
+ */
+ private String configurationFile = null;
+
+ /**
+ * Configuration file placeholder
+ */
+ private String configurationFilePlaceholder = null;
+
+ /**
+ * Name convention
+ */
+ @Autowired
+ private INameConvention nameConvention;
+
+ /**
+ * Dao Extension helper.
+ */
+ @Autowired(required = false)
+ private IDaoExtensionHelper daoExtensionHelper;
+
+ /**
+ * Instantiates a new ontimize jdbc dao support.
+ */
+ public OntimizeJdbcDaoSupport() {
+ super();
+ this.tableMetaDataContext = this.createTableMetadataContext();
+ }
+
+ public OntimizeJdbcDaoSupport(final String configurationFile, final String configurationFilePlaceholder) {
+ this();
+ this.configurationFile = configurationFile;
+ this.configurationFilePlaceholder = configurationFilePlaceholder;
+ }
+
+ @Override
+ public EntityResult query(Map, ?> keysValues, List> attributes, List> sort, String queryId) {
+ return this.query(keysValues, attributes, sort, queryId, null);
+ }
+
+ /**
+ * Query.
+ *
+ * @param keysValues the keys values
+ * @param attributes the attributes
+ * @param sort the sort
+ * @param queryId the query id
+ * @return the entity result
+ */
+ @Override
+ public EntityResult query(final Map, ?> keysValues, final List> attributes, final List> sort, final String queryId, ISQLQueryAdapter queryAdapter) {
+ this.checkCompiled();
+ final QueryTemplateInformation queryTemplateInformation = this.getQueryTemplateInformation(queryId);
+
+ final SQLStatementBuilder.SQLStatement stSQL = this.composeQuerySql(queryId, attributes, keysValues, sort, null, queryAdapter);
+
+ final String sqlQuery = stSQL.getSQLStatement();
+ final List> vValues = stSQL.getValues();
+ // TODO los atributos que se pasan al entityresultsetextractor tienen que ir "desambiguados" porque
+ // cuando el DefaultSQLStatementHandler busca
+ // las columnas toUpperCase y toLowerCase no tiene en cuenta el '.'
+ Chronometer chrono = new Chronometer().start();
+ try {
+
+ JdbcTemplate jdbcTemplate = this.getJdbcTemplate();
+
+ if (jdbcTemplate != null) {
+
+ ArgumentPreparedStatementSetter pss = new ArgumentPreparedStatementSetter(vValues.toArray());
- /** The Constant PLACEHOLDER_WHERE. */
- protected static final String PLACEHOLDER_WHERE = "#WHERE#";
-
- /** The Constant PLACEHOLDER_WHERE_CONCAT. */
- protected static final String PLACEHOLDER_WHERE_CONCAT = "#WHERE_CONCAT#";
-
- /** The Constant PLACEHOLDER_COLUMNS. */
- protected static final String PLACEHOLDER_COLUMNS = "#COLUMNS#";
-
- /** The Constant PLACEHOLDER_SCHEMA. */
- protected static final String PLACEHOLDER_SCHEMA = "#SCHEMA#";
+ return jdbcTemplate.query(sqlQuery, pss, new EntityResultResultSetExtractor(this.getStatementHandler(), queryTemplateInformation, attributes));
+ }
- /** Context used to retrieve and manage database metadata. */
- protected final OntimizeTableMetaDataContext tableMetaDataContext;
+ return new EntityResultMapImpl(EntityResult.OPERATION_WRONG, EntityResult.NODATA_RESULT);
- /** List of columns objects to be used in insert statement. */
- protected final List declaredColumns = new ArrayList<>();
-
- /**
- * Has this operation been compiled? Compilation means at least checking that a DataSource or
- * JdbcTemplate has been provided, but subclasses may also implement their own custom validation.
- */
- private boolean compiled = false;
+ } finally {
+ OntimizeJdbcDaoSupport.logger.trace("Time consumed in query+result= {} ms", chrono.stopMs());
+ }
+ }
+
+ @Override
+ public AdvancedEntityResult paginationQuery(Map, ?> keysValues, List> attributes, int recordNumber, int startIndex, List> orderBy, String queryId) {
+ return this.paginationQuery(keysValues, attributes, recordNumber, startIndex, orderBy, queryId, null);
+ }
+
+ /**
+ * Pageable query.
+ *
+ * @param keysValues the keys values
+ * @param attributes the attributes
+ * @param recordNumber number of records to query
+ * @param startIndex number of first row
+ * @param orderBy list of columns to establish the order
+ * @param queryId the query id
+ * @return the entity result
+ */
+ @Override
+ public AdvancedEntityResult paginationQuery(Map, ?> keysValues, List> attributes, int recordNumber, int startIndex, List> orderBy, String queryId, ISQLQueryAdapter queryAdapter) {
+ this.checkCompiled();
+ final QueryTemplateInformation queryTemplateInformation = this.getQueryTemplateInformation(queryId);
+ final SQLStatementBuilder.SQLStatement stSQL = this.composeQuerySql(queryId, attributes, keysValues, orderBy, new PageableInfo(recordNumber, startIndex), queryAdapter);
+ final String sqlQuery = stSQL.getSQLStatement();
+ final List> vValues = stSQL.getValues();
+
+ ArgumentPreparedStatementSetter pss = new ArgumentPreparedStatementSetter(vValues.toArray());
- private String[] generatedKeyNames = {};
+ JdbcTemplate jdbcTemplate = this.getJdbcTemplate();
- /** The statement builder. */
- private SQLStatementHandler statementHandler;
+ if (jdbcTemplate != null) {
+ AdvancedEntityResult advancedER = jdbcTemplate.query(new SimpleScrollablePreparedStatementCreator(sqlQuery), pss, new AdvancedEntityResultResultSetExtractor(this.getStatementHandler(), queryTemplateInformation, attributes, recordNumber, startIndex));
+
+ advancedER.setTotalRecordCount(this.getQueryRecordNumber(keysValues, queryId));
+ return advancedER;
+
+ }
+
+ return new AdvancedEntityResultMapImpl(EntityResult.OPERATION_WRONG, EntityResult.NODATA_RESULT);
+
+ }
- /** The bean property converter. */
- private INameConverter nameConverter;
+ protected int getQueryRecordNumber(Map, ?> keysValues, final String queryId) {
+ final QueryTemplateInformation queryTemplateInformation = this.getQueryTemplateInformation(queryId);
+ final Map, ?> kvWithoutReferenceAttributes = this.processReferenceDataFieldAttributes(keysValues);
+ Map
- *
- * @param sqlStatement The SQL statement to execute.
- * @param vValues The list of values for the prepared statement.
- */
- public boolean executeComposeSQLStatement(String sqlStatement, List> vValues) {
- Chronometer chrono = new Chronometer().start();
- try {
- JdbcTemplate jdbcTemplate = this.getJdbcTemplate();
- if (jdbcTemplate != null) {
- jdbcTemplate.execute((ConnectionCallback) con -> {
- PreparedStatement ps = con.prepareStatement(sqlStatement);
- ArgumentPreparedStatementSetter pss = new ArgumentPreparedStatementSetter(vValues.toArray());
- pss.setValues(ps);
- return ps.execute();
- });
- }
- return false;
- } finally {
- logger.trace("Time consumed in statement= {} ms", chrono.stopMs());
- }
- }
-
- /**
- * Creates the row mapper.
- * @param the generic type
- * @param clazz the clazz
- * @return the bean property row mapper
- */
- protected BeanPropertyRowMapper createRowMapper(final Class clazz) {
- return new BeanPropertyRowMapper<>(this.getNameConverter(), this.getDataSource(), clazz);
- }
-
- /**
- * Apply template prefix.
- * @param templateInformation the template information
- * @param vValidAttributes the v valid attributes
- * @return the list
- */
- protected List> applyTransformations(final QueryTemplateInformation templateInformation,
- final List> vValidAttributes) {
- final List ambiguousColumns = templateInformation.getAmbiguousColumns();
- final List functionColumns = templateInformation.getFunctionColumns();
-
- final List res = new ArrayList<>(vValidAttributes.size());
- for (final Object ob : vValidAttributes) {
- boolean transformed = false;
- if (ambiguousColumns != null) {
- for (final AmbiguousColumnType ambiguosColumn : ambiguousColumns) {
- if (ob.toString().toUpperCase().equals(ambiguosColumn.getName().toUpperCase())) {
- final String dbName = ambiguosColumn.getDatabaseName() == null ? ambiguosColumn.getName()
- : ambiguosColumn.getDatabaseName();
- final StringBuilder sb = new StringBuilder();
- sb.append(ambiguosColumn.getPrefix());
- sb.append(".");
- sb.append(dbName);
- sb.append(SQLStatementBuilder.AS);
- sb.append(ambiguosColumn.getName());
- res.add(sb.toString());
- transformed = true;
- break;
- }
- }
- }
- if (!transformed && (functionColumns != null)) {
- for (final FunctionColumnType functionColumn : functionColumns) {
- if (ob.toString().toUpperCase().equals(functionColumn.getName().toUpperCase())) {
- final StringBuilder sb = new StringBuilder();
- sb.append(SQLStatementBuilder.OPEN_PARENTHESIS);
- sb.append(functionColumn.getValue());
- sb.append(SQLStatementBuilder.CLOSE_PARENTHESIS);
- sb.append(SQLStatementBuilder.AS);
- sb.append(functionColumn.getName());
- res.add(sb.toString());
- transformed = true;
- break;
- }
- }
- }
- if (!transformed) {
- res.add(ob);
- }
- }
- return res;
- }
-
- /**
- * Apply template prefix.
- * @param templateInformation the template information
- * @param kvValidKeysValues the kv valid keys values
- * @return the Map
- */
- protected Map applyTransformations(final QueryTemplateInformation templateInformation,
- final Map kvValidKeysValues) {
- final List ambiguousColumns = templateInformation.getAmbiguousColumns();
- final List functionColumns = templateInformation.getFunctionColumns();
-
- final Map res = new HashMap<>();
- for (final Entry kvEntry : kvValidKeysValues.entrySet()) {
- if (kvEntry.getKey() instanceof String) {
- String key = (String) kvEntry.getKey();
- boolean transformed = false;
- if ( (ExtendedSQLConditionValuesProcessor.EXPRESSION_KEY.equals(key) || ExtendedSQLConditionValuesProcessor.FILTER_KEY.equals(key))
- && (kvEntry.getValue() instanceof BasicExpression)) {
- res.put(key, this.applyTransformationsToBasicExpression((BasicExpression) kvEntry.getValue(),
- ambiguousColumns, functionColumns));
- transformed = true;
- } else {
- String resolvedAmbiguousColumn = this.resolveAmbiguousColumn(key, ambiguousColumns);
- if (resolvedAmbiguousColumn != null) {
- res.put(resolvedAmbiguousColumn, kvEntry.getValue());
- transformed = true;
- } else {
- String resolvedFunctionColumn = this.resolveFunctionColumn(key, functionColumns);
- if (resolvedFunctionColumn != null) {
- res.put(resolvedFunctionColumn, kvEntry.getValue());
- transformed = true;
- }
- }
- }
- if (!transformed) {
- res.put(key, kvEntry.getValue());
- }
- } else {
- res.put(kvEntry.getKey(), kvEntry.getValue());
- }
- }
- return res;
- }
-
- protected List applyOrderColumns(final List> sort, final List orderColumns) {
- List vResult = new ArrayList<>();
- if ((sort != null) && (sort.size() > 0)) {
- vResult.addAll(sort);
- }
-
- if ((orderColumns != null) && (orderColumns.size() > 0)) {
- for (OrderColumnType orderColumnType : orderColumns) {
- SQLOrder sqlOrder = new SQLOrder(orderColumnType.getName(), "ASC".equals(orderColumnType.getType()));
- vResult.add(sqlOrder);
- }
- }
-
- return vResult;
- }
-
- /**
- * Resolve function column.
- * @param key the key
- * @param functionColumns the function columns
- * @return the string
- */
- protected String resolveFunctionColumn(String key, List functionColumns) {
- if (functionColumns != null) {
- for (final FunctionColumnType functionColumn : functionColumns) {
- if (key.toString().toUpperCase().equals(functionColumn.getName().toUpperCase())) {
- return functionColumn.getValue();
- }
- }
- }
- return null;
- }
-
- /**
- * Resolve ambiguous column.
- * @param key the key
- * @param ambiguousColumns the ambiguous columns
- * @return the string
- */
- protected String resolveAmbiguousColumn(String key, List ambiguousColumns) {
- if (ambiguousColumns != null) {
- for (final AmbiguousColumnType ambiguosColumn : ambiguousColumns) {
- if (key.toUpperCase().equals(ambiguosColumn.getName().toUpperCase())) {
- final String dbName = ambiguosColumn.getDatabaseName() == null ? key
- : ambiguosColumn.getDatabaseName();
- return ambiguosColumn.getPrefix() + "." + dbName;
- }
- }
- }
- return null;
- }
-
- /**
- * Apply transformations to basic expression.
- * @param functionColumns
- * @param ambiguousColumns
- * @param value the value
- * @return the object
- */
- protected BasicExpression applyTransformationsToBasicExpression(final BasicExpression original,
- List ambiguousColumns,
- List functionColumns) {
- Object originalLeftOperand = original.getLeftOperand();
- Operator originalOperator = original.getOperator();
- Object originalRightOperand = original.getRightOperand();
- Object transformedLeftOperand = null;
- Operator transformedOperator = originalOperator;
- Object transformedRightOperand = null;
- if (originalLeftOperand instanceof BasicField) {
- transformedLeftOperand = this.applyTransformationsToBasicField((BasicField) originalLeftOperand,
- ambiguousColumns, functionColumns);
- } else if (originalLeftOperand instanceof BasicExpression) {
- transformedLeftOperand = this.applyTransformationsToBasicExpression((BasicExpression) originalLeftOperand,
- ambiguousColumns, functionColumns);
- } else {
- transformedLeftOperand = originalLeftOperand;
- }
-
- if (originalRightOperand instanceof BasicField) {
- transformedRightOperand = this.applyTransformationsToBasicField((BasicField) originalRightOperand,
- ambiguousColumns, functionColumns);
- } else if (originalRightOperand instanceof BasicExpression) {
- transformedRightOperand = this.applyTransformationsToBasicExpression((BasicExpression) originalRightOperand,
- ambiguousColumns, functionColumns);
- } else {
- transformedRightOperand = originalRightOperand;
- }
-
- return new BasicExpression(transformedLeftOperand, transformedOperator, transformedRightOperand);
- }
-
- /**
- * Apply transformations to basic field.
- * @param originalField the original field
- * @param ambiguousColumns the ambiguous columns
- * @param functionColumns the function columns
- * @return the basic field
- */
- protected BasicField applyTransformationsToBasicField(BasicField originalField,
- List ambiguousColumns, List functionColumns) {
- String columnName = originalField.toString();
- Integer columnType = originalField.getSqlType();
- if (columnType == null) columnType = this.getColumnSQLType(columnName);
- String resolvedAmbiguousColumn = this.resolveAmbiguousColumn(columnName, ambiguousColumns);
- if (resolvedAmbiguousColumn != null) {
- return new BasicField(resolvedAmbiguousColumn, columnType);
- }
- String resolvedFunctionColumn = this.resolveFunctionColumn(columnName, functionColumns);
- if (resolvedFunctionColumn != null) {
- return new BasicField(resolvedFunctionColumn, columnType);
- }
- return new BasicField(columnName, columnType);
- }
-
- /*
- * (non-Javadoc)
- *
- * @see com.ontimize.jee.server.entity.IOntimizeDaoSupport#insert(java.util.Map)
- */
- /**
- * Insert.
- * @param attributesValues the attributes values
- * @return the entity result
- */
- @Override
- public EntityResult insert(final Map, ?> attributesValues) {
- this.checkCompiled();
- final EntityResult erResult = new EntityResultMapImpl();
-
- final Map, ?> avWithoutMultipleTableAttributes = this.processMultipleTableAttribute(attributesValues);
- final Map, ?> avWithoutReferenceAttributes = this
- .processReferenceDataFieldAttributes(avWithoutMultipleTableAttributes);
- final Map, ?> avWithoutMultipleValueAttributes = this
- .processMultipleValueAttributes(avWithoutReferenceAttributes);
- final Map avValidPre = this
- .getValidAttributes(this.processStringKeys(avWithoutMultipleValueAttributes));
- final Map avValid = this.removeNullValues(avValidPre);
- if (avValid.isEmpty()) {
- // TODO se deber�a lanzar excepci�n, pero puede tener colaterales con la one-2-one
- OntimizeJdbcDaoSupport.logger.warn("Insert: Attributes does not contain any pair key-value valid");
- return erResult;
- }
-
- if (this.getGeneratedKeyNames().length < 1) {
- final int res = this.doExecuteInsert(avValid);
- if (res != 1) {
- throw new SQLWarningException(I18NNaming.M_IT_HAS_NOT_CHANGED_ANY_RECORD, null);
- }
- } else if (this.getGeneratedKeyNames().length == 1) {
- final Object res = this.doExecuteInsertAndReturnKey(avValid);
- if (res == null) {
- throw new DataRetrievalFailureException(I18NNaming.M_IT_HAS_NOT_CHANGED_ANY_RECORD);
- }
- erResult.put(this.nameConvention.convertName(this.getGeneratedKeyNames()[0]), res);
- }
- return erResult;
- }
-
- /**
- * Removes the null values.
- * @param inputAttributesValues the input attributes values
- * @return the map
- */
- protected Map removeNullValues(Map inputAttributesValues) {
- final Map hValidKeysValues = new HashMap<>();
- for (final Entry entry : inputAttributesValues.entrySet()) {
- final String oKey = entry.getKey();
- final Object oValue = entry.getValue();
- if ((oValue != null) && !(oValue instanceof NullValue)) {
- hValidKeysValues.put(oKey, oValue);
- }
- }
- return hValidKeysValues;
- }
-
- /*
- * (non-Javadoc)
- *
- * @see com.ontimize.jee.server.entity.IOntimizeDaoSupport#unsafeUpdate(java.util .Map,
- * java.util.Map)
- */
- /**
- * Unsafe update.
- * @param attributesValues the attributes values
- * @param keysValues the keys values
- * @return the entity result
- */
- @Override
- public EntityResult unsafeUpdate(final Map, ?> attributesValues, final Map, ?> keysValues) {
- return this.innerUpdate(attributesValues, keysValues, false);
- }
-
- /*
- * (non-Javadoc)
- *
- * @see com.ontimize.jee.server.entity.IOntimizeDaoSupport#update(java.util.Map, java.util.Map)
- */
- /**
- * Update.
- * @param attributesValues the attributes values
- * @param keysValues the keys values
- * @return the entity result
- */
- @Override
- public EntityResult update(final Map, ?> attributesValues, final Map, ?> keysValues) {
- return this.innerUpdate(attributesValues, keysValues, true);
- }
-
- /**
- * Inner update.
- * @param attributesValues the attributes values
- * @param keysValues the keys values
- * @param safe the safe
- * @return the entity result
- */
- protected EntityResult innerUpdate(final Map, ?> attributesValues, final Map, ?> keysValues,
- final boolean safe) {
- this.checkCompiled();
- final EntityResult erResult = new EntityResultMapImpl();
-
- // Check the primary keys
- final Map, ?> avWithoutMultipleTableAttributes = this.processMultipleTableAttribute(attributesValues);
- final Map, ?> avWithoutReferenceAttributes = this
- .processReferenceDataFieldAttributes(avWithoutMultipleTableAttributes);
- final Map, ?> avWithoutMultipleValue = this.processMultipleValueAttributes(avWithoutReferenceAttributes);
-
- final Map, ?> kvWithoutMulpleTableAttributes = this.processMultipleTableAttribute(keysValues);
- final Map, ?> kvWithoutReferenceAttributessRef = this
- .processReferenceDataFieldAttributes(kvWithoutMulpleTableAttributes);
-
- final Map hValidAttributesValues = this.getValidAttributes(avWithoutMultipleValue);
- Map, ?> hValidKeysValues = null;
- if (safe) {
- hValidKeysValues = this.getValidUpdatingKeysValues(kvWithoutReferenceAttributessRef);
- this.checkUpdateKeys(hValidKeysValues);
- } else {
- hValidKeysValues = kvWithoutReferenceAttributessRef;
- }
-
- if (hValidAttributesValues.isEmpty() || hValidKeysValues.isEmpty()) {
- OntimizeJdbcDaoSupport.logger.debug("Update: Attributes or Keys do not contain any pair key-value valid");
- throw new SQLWarningException(I18NNaming.M_IT_HAS_NOT_CHANGED_ANY_RECORD, null);
- }
- final SQLStatement stSQL = this.getStatementHandler()
- .createUpdateQuery(this.getSchemaTable(), new HashMap<>(hValidAttributesValues),
- new HashMap<>(hValidKeysValues));
- final String sqlQuery = stSQL.getSQLStatement();
- final List> vValues = this.processNullValues(stSQL.getValues());
- final int update = this.getJdbcTemplate().update(sqlQuery, vValues.toArray());
- if (update == 0) {
- erResult.setCode(EntityResult.OPERATION_SUCCESSFUL_SHOW_MESSAGE);
- erResult.setMessage(I18NNaming.M_IT_HAS_NOT_CHANGED_ANY_RECORD);
- }
- return erResult;
- }
-
- /**
- * Process null values.
- * @param values the values
- * @return the list
- */
- protected List> processNullValues(final List> values) {
- for (int i = 0; i < values.size(); i++) {
- final Object ob = values.get(i);
- if (ob instanceof NullValue) {
- ((List) values).set(i, new SqlParameterValue(((NullValue) ob).getSQLDataType(), null));
- }
- }
- return values;
- }
-
- /*
- * (non-Javadoc)
- *
- * @see com.ontimize.jee.server.entity.IOntimizeDaoSupport#unsafeDelete(java.util .Map)
- */
- /**
- * Unsafe delete.
- * @param keysValues the keys values
- * @return the entity result
- */
- @Override
- public EntityResult unsafeDelete(final Map, ?> keysValues) {
- return this.innerDelete(keysValues, false);
- }
-
- /*
- * (non-Javadoc)
- *
- * @see com.ontimize.jee.server.entity.IOntimizeDaoSupport#delete(java.util.Map)
- */
- /**
- * Delete.
- * @param keysValues the keys values
- * @return the entity result
- */
- @Override
- public EntityResult delete(final Map, ?> keysValues) {
- return this.innerDelete(keysValues, true);
- }
-
- /**
- * Inner delete.
- * @param keysValues the keys values
- * @param safe the safe
- * @return the entity result
- */
- public EntityResult innerDelete(final Map, ?> keysValues, final boolean safe) {
- this.checkCompiled();
- final EntityResult erResult = new EntityResultMapImpl();
- Map, ?> keysValuesChecked = keysValues;
- if (safe) {
- keysValuesChecked = this.checkDeleteKeys(keysValues);
- }
-
- if (keysValuesChecked.isEmpty()) {
- OntimizeJdbcDaoSupport.logger
- .debug("Delete: Keys does not contain any pair key-value valid:" + keysValues);
- throw new SQLWarningException(I18NNaming.M_IT_HAS_NOT_CHANGED_ANY_RECORD, null);
- }
-
- final SQLStatement stSQL = this.getStatementHandler()
- .createDeleteQuery(this.getSchemaTable(), new HashMap<>(keysValuesChecked));
- final String sqlQuery = stSQL.getSQLStatement();
- final List> vValues = stSQL.getValues();
- this.getJdbcTemplate().update(sqlQuery, vValues.toArray());
-
- return erResult;
- }
-
- /**
- * Checks if keysValues contains a value for all columns defined in 'delete_keys'
- * parameter.
- *
- * @param keysValues the keys values
- */
- protected Map checkDeleteKeys(final Map, ?> keysValues) {
- Map res = new HashMap<>();
- for (String element : this.deleteKeys) {
- String mapKey = element;
- if (!keysValues.containsKey(mapKey)) {
- throw new SQLWarningException("M_NECESSARY_" + mapKey.toUpperCase(), null);
- }
- res.put(mapKey, keysValues.get(mapKey));
- }
- OntimizeJdbcDaoSupport.logger.debug(" Delete valid keys values: Input: {} -> Result: {}", keysValues, res);
- return res;
- }
-
- /**
- * Checks if keysValues contains a value for all columns defined in 'update_keys'
- * parameter.
- *
- * @param keysValues the keys values
- */
- protected void checkUpdateKeys(final Map, ?> keysValues) {
- for (String element : this.updateKeys) {
- if (!keysValues.containsKey(element)) {
- throw new SQLWarningException("M_NECESSARY_" + element.toUpperCase(), new SQLWarning());
- }
- }
- }
-
- /**
- * Returns a Map containing a list of valid key-value pairs from those contained in the
- * keysValues argument.
- *
- * A key-value pair is valid if the key is valid.
- *
- * Only keys matching (case-sensitive) any of the columns defined by the 'update_keys' parameter are
- * considered valid.
- *
- * @param keysValues the keys values
- * @return the valid updating keys values
- */
- public Map, ?> getValidUpdatingKeysValues(final Map, ?> keysValues) {
- final Map hValidKeysValues = new HashMap<>();
- for (String element : this.updateKeys) {
- if (keysValues.containsKey(element)) {
- hValidKeysValues.put(element, keysValues.get(element));
- }
- }
- OntimizeJdbcDaoSupport.logger
- .debug(" Update valid keys values: Input: " + keysValues + " -> Result: " + hValidKeysValues);
- return hValidKeysValues;
- }
-
- /**
- * Returns a Map containing a list of valid key-value pairs from those contained in the
- * attributesValues argument.
- *
- * A key-value pair is valid if the key is in the table column list.
- *
- * @param inputAttributesValues the attributes values
- * @return the valid attributes
- */
- public Map getValidAttributes(final Map, ?> inputAttributesValues) {
- final Map hValidKeysValues = new HashMap<>();
- List nameConventionTableColumns = this.tableMetaDataContext.getNameConventionTableColumns();
- for (final Entry, ?> entry : inputAttributesValues.entrySet()) {
- final Object oKey = entry.getKey();
- final Object oValue = entry.getValue();
- if (nameConventionTableColumns.contains(oKey)) {
- hValidKeysValues.put((String) oKey, oValue);
- }
- }
- OntimizeJdbcDaoSupport.logger.debug(
- " Update valid attributes values: Input: " + inputAttributesValues + " -> Result: " + hValidKeysValues);
- return hValidKeysValues;
- }
-
- /**
- * Processes all the MultipleTableAttribute contained as keys ih the Map av. All other
- * objects are added to the resulting List with no changes. The MultipleTableAttribute objects are
- * replaced by their attribute.
- * @param av the av
- * @return a new HashMap with the processed objects.
- */
- protected Map, ?> processMultipleTableAttribute(final Map, ?> av) {
- final Map res = new HashMap<>();
- for (final Entry, ?> entry : av.entrySet()) {
- final Object oKey = entry.getKey();
- final Object oValue = entry.getValue();
- if (oKey instanceof MultipleTableAttribute) {
- res.put(((MultipleTableAttribute) oKey).getAttribute(), oValue);
- } else {
- res.put(oKey, oValue);
- }
- }
- return res;
- }
-
- /**
- * Processes the ReferenceFieldAttribute objects contained in keysValues.
- *
- * Returns a Map containing all the objects contained in the argument keysValues except
- * in the case of keys that are ReferenceFieldAttribute objects, which are replaced by
- * ((ReferenceFieldAttribute)object).getAttr()
- *
- * @param keysValues the keysValues to process
- * @return a Map containing the processed objects
- */
- public Map, ?> processReferenceDataFieldAttributes(final Map, ?> keysValues) {
- if (keysValues == null) {
- return null;
- }
- final Map res = new HashMap<>();
- for (final Entry, ?> entry : keysValues.entrySet()) {
- final Object oKey = entry.getKey();
- final Object oValue = entry.getValue();
- if (oKey instanceof ReferenceFieldAttribute) {
- final String attr = ((ReferenceFieldAttribute) oKey).getAttr();
- res.put(attr, oValue);
- } else {
- res.put(oKey, oValue);
- }
- }
- return res;
- }
-
- /**
- * Processes the ReferenceFieldAttribute objects contained in list.
- *
- * Returns a List containing all the objects in the argument list except in the case of
- * keys that are ReferenceFieldAttribute objects, which are maintained but also
- * ((ReferenceFieldAttribute)object).getAttr() is added
- *
- * @param list the list to process
- * @return a List containing the processed objects
- */
- public List> processReferenceDataFieldAttributes(final List> list) {
- if (list == null) {
- return null;
- }
- final List res = new ArrayList<>();
- for (final Object ob : list) {
- // Add the attribute
- if (!res.contains(ob)) {
- res.add(ob);
- }
- // If the attribute is ReferenceFieldAttribute add the string to
- if ((ob instanceof ReferenceFieldAttribute) && !res.contains(((ReferenceFieldAttribute) ob).getAttr())) {
- res.add(((ReferenceFieldAttribute) ob).getAttr());
- }
- }
- return res;
- }
-
- /**
- * Returns a list containing the valid attributes of those included in the List
- * attributes
- *
- * If valid column names have been specified for this entity, only attributes matching
- * (case-sensitive) any of this column names are considered valid.
- *
- * If no columns have been defined, all attributes will be considered valid.
- * @param attributes the attributes
- * @param validColumns the valid columns
- * @return a List with the valid attributes
- */
- public List> getValidAttributes(final List> attributes, List validColumns) {
- List inputValidColumns = validColumns == null ? (List) Collections.EMPTY_LIST : validColumns;
- final List validAttributes = new ArrayList<>();
- for (final Object ob : attributes) {
- if ((ob instanceof String) || (ob instanceof DBFunctionName)) {
- boolean isValid = true;
- if (ob instanceof String) {
- if (ExtendedSQLConditionValuesProcessor.EXPRESSION_KEY.equals(ob)) {
- isValid = true;
- } else if (!inputValidColumns.isEmpty() && !inputValidColumns.contains(ob)) {
- isValid = false;
- } else {
- isValid = this.isColumnNameValid((String) ob);
- }
- }
- if (isValid) {
- validAttributes.add(ob);
- }
- }
- }
- return validAttributes;
- }
-
- /**
- * Checks if is column name valid.
- * @param ob the ob
- * @return true, if is column name valid
- */
- protected boolean isColumnNameValid(String ob) {
- boolean notValid = ob.contains(" ") || ob.contains("*");
- return !notValid;
- }
-
- /**
- * Returns a cleaned map containing the valid pairs of those included in the map
- * inputKeysValues
- *
- * If valid column names have been specified for this entity/query, only attributes matching
- * (case-sensitive) any of this column names are considered valid.
- *
- * If no columns have been defined, all attributes will be considered valid.
- *
- * Returns cleaned keys values to do query according valid columns (if defined).
- * @param inputKeysValues
- * @param validColumns
- * @return
- */
- protected Map getValidQueryingKeysValues(Map inputKeysValues,
- List validColumns) {
- if ((validColumns == null) || validColumns.isEmpty()) {
- return inputKeysValues;
- }
- final Map hValidKeysValues = new HashMap<>();
- for (Entry entry : inputKeysValues.entrySet()) {
- if (ExtendedSQLConditionValuesProcessor.EXPRESSION_KEY.equals(entry.getKey())
- || validColumns.contains(entry.getKey())) {
- hValidKeysValues.put(entry.getKey(), entry.getValue());
- }
- }
- OntimizeJdbcDaoSupport.logger
- .debug(" Query valid keys values: Input: " + inputKeysValues + " -> Result: " + hValidKeysValues);
- return hValidKeysValues;
- }
-
- /**
- * Processes the MultipleValue objects contained in keysValues. Returns a new HashMap
- * with the same data as keysValues except that MultipleValue objects are deleted and
- * the key-value pairs of these objects are added to the new HashMap.
- * @param keysValues the keys values
- * @return a new HashMap with MultipleValue objects replaced by their key-value pairs
- */
- public Map, ?> processMultipleValueAttributes(final Map, ?> keysValues) {
- if (keysValues == null) {
- return null;
- }
- final Map res = new HashMap<>();
- for (final Entry, ?> entry : keysValues.entrySet()) {
- final Object oKey = entry.getKey();
- final Object oValue = entry.getValue();
- if (oValue instanceof MultipleValue) {
- final Enumeration> mvKeys = ((MultipleValue) oValue).keys();
- while (mvKeys.hasMoreElements()) {
- final Object iMvKeyM = mvKeys.nextElement();
- final Object oMvValue = ((MultipleValue) oValue).get(iMvKeyM);
- res.put(iMvKeyM, oMvValue);
- }
- } else {
- res.put(oKey, oValue);
- }
- }
- return res;
- }
-
- /**
- * Processes the keys in order to get String as column name.
- * @param keysValues the keys values
- * @return a new HashMap with MultipleValue objects replaced by their key-value pairs
- */
- public Map processStringKeys(final Map, ?> keysValues) {
- if (keysValues == null) {
- return null;
- }
- final Map res = new HashMap<>();
- for (final Entry, ?> entry : keysValues.entrySet()) {
- final Object oKey = entry.getKey();
- final Object oValue = entry.getValue();
- res.put(oKey.toString(), oValue);
- }
- return res;
- }
-
- // -------------------------------------------------------------------------
- // Methods dealing with configuration properties
- // -------------------------------------------------------------------------
-
- /**
- * Set the name of the table for this insert.
- * @param tableName the new table name
- */
- public void setTableName(final String tableName) {
- this.checkIfConfigurationModificationIsAllowed();
- this.tableMetaDataContext.setTableName(tableName);
- }
-
- /**
- * Get the name of the table for this insert.
- * @return the table name
- */
- public String getTableName() {
- this.checkCompiled();
- return this.tableMetaDataContext.getTableName();
- }
-
- /**
- * Set the name of the schema for this insert.
- * @param schemaName the new schema name
- */
- public void setSchemaName(final String schemaName) {
- this.checkIfConfigurationModificationIsAllowed();
- this.tableMetaDataContext.setSchemaName(StringTools.isEmpty(schemaName) ? null : schemaName);
- }
-
- /**
- * Get the name of the schema for this insert.
- * @return the schema name
- */
- public String getSchemaName() {
- this.checkCompiled();
- return this.tableMetaDataContext.getSchemaName();
- }
-
- /**
- * Set the name of the catalog for this insert.
- * @param catalogName the new catalog name
- */
- public void setCatalogName(final String catalogName) {
- this.checkIfConfigurationModificationIsAllowed();
- this.tableMetaDataContext.setCatalogName(StringTools.isEmpty(catalogName) ? null : catalogName);
- }
-
- /**
- * Get the name of the catalog for this insert.
- * @return the catalog name
- */
- public String getCatalogName() {
- this.checkCompiled();
- return this.tableMetaDataContext.getCatalogName();
- }
-
- /**
- * Set the names of the columns to be used.
- * @param columnNames the new column names
- */
- public void setColumnNames(final List columnNames) {
- this.checkIfConfigurationModificationIsAllowed();
- this.declaredColumns.clear();
- this.declaredColumns.addAll(columnNames);
- }
-
- /**
- * Get the names of the columns used.
- * @return the column names
- */
- public List getColumnNames() {
- this.checkCompiled();
- return Collections.unmodifiableList(this.declaredColumns);
- }
-
- /**
- * Get the names of any generated keys.
- * @return the generated key names
- */
- public String[] getGeneratedKeyNames() {
- this.checkCompiled();
- return this.generatedKeyNames;
- }
-
- /**
- * Set the names of any generated keys.
- * @param generatedKeyNames the new generated key names
- */
- public void setGeneratedKeyNames(final String[] generatedKeyNames) {
- this.checkIfConfigurationModificationIsAllowed();
- this.generatedKeyNames = generatedKeyNames;
- }
-
- /**
- * Specify the name of a single generated key column.
- * @param generatedKeyName the new generated key name
- */
- public void setGeneratedKeyName(final String generatedKeyName) {
- this.checkIfConfigurationModificationIsAllowed();
- if (generatedKeyName == null) {
- this.generatedKeyNames = new String[] {};
- } else {
- this.generatedKeyNames = new String[] { generatedKeyName };
- }
- }
-
- /**
- * Specify whether the parameter metadata for the call should be used. The default is true.
- * @param accessTableColumnMetaData the new access table column meta data
- */
- public void setAccessTableColumnMetaData(final boolean accessTableColumnMetaData) {
- this.tableMetaDataContext.setAccessTableColumnMetaData(accessTableColumnMetaData);
- }
-
- /**
- * Specify whether the default for including synonyms should be changed. The default is false.
- * @param override the new override include synonyms default
- */
- public void setOverrideIncludeSynonymsDefault(final boolean override) {
- this.tableMetaDataContext.setOverrideIncludeSynonymsDefault(override);
- }
-
-
- // -------------------------------------------------------------------------
- // Methods handling compilation issues
- // -------------------------------------------------------------------------
- /**
- * Compile this JdbcInsert using provided parameters and meta data plus other settings. This
- * finalizes the configuration for this object and subsequent attempts to compile are ignored. This
- * will be implicitly called the first time an un-compiled insert is executed.
- * @throws InvalidDataAccessApiUsageException if the object hasn't been correctly initialized, for
- * example if no DataSource has been provided
- */
- public synchronized final void compile() throws InvalidDataAccessApiUsageException {
- if (!this.isCompiled()) {
- final ConfigurationFile annotation = this.getClass().getAnnotation(ConfigurationFile.class);
- if (annotation != null) {
- this.configurationFile = annotation.configurationFile();
- this.configurationFilePlaceholder = annotation.configurationFilePlaceholder();
- }
- this.loadConfigurationFile(this.configurationFile, this.configurationFilePlaceholder);
-
- if (this.getJdbcTemplate() == null) {
- throw new IllegalArgumentException("'dataSource' or 'jdbcTemplate' is required");
- }
-
- if (this.tableMetaDataContext.getTableName() == null) {
- throw new InvalidDataAccessApiUsageException("Table name is required");
- }
- try {
- this.getJdbcTemplate().afterPropertiesSet();
- } catch (final IllegalArgumentException ex) {
- throw new InvalidDataAccessApiUsageException(ex.getMessage(), ex);
- }
- this.compileInternal();
- this.compiled = true;
- if (OntimizeJdbcDaoSupport.logger.isDebugEnabled()) {
- OntimizeJdbcDaoSupport.logger.debug("JdbcInsert for table [{}] compiled", this.getTableName());
- }
- }
- }
-
- @Override
- public void reload() {
- OntimizeJdbcDaoSupport.logger.debug("dao {} - {} marked to recompile", this.getClass().getName(),
- this.getTableName());
- this.compiled = false;
- this.setTableName(null);
- this.setSchemaName(null);
- this.setCatalogName(null);
- this.setDeleteKeys(null);
- this.setUpdateKeys(null);
- this.sqlQueries.clear();
- this.setGeneratedKeyName(null);
- this.setStatementHandler(null);
- this.setNameConverter(null);
- }
-
- /**
- * Load the configuration file.
- * @param path the path
- * @param pathToPlaceHolder the path to place holder
- * @throws InvalidDataAccessApiUsageException the invalid data access api usage exception
- */
- protected void loadConfigurationFile(final String path, final String pathToPlaceHolder)
- throws InvalidDataAccessApiUsageException {
-
- try (InputStream is = Thread.currentThread().getContextClassLoader().getResourceAsStream(path);) {
- Reader reader = null;
- if (pathToPlaceHolder != null) {
- try (InputStream isPlaceHolder = Thread.currentThread()
- .getContextClassLoader()
- .getResourceAsStream(pathToPlaceHolder);) {
- final Properties prop = new Properties();
- if (isPlaceHolder != null) {
- prop.load(isPlaceHolder);
- }
- reader = new ReplaceTokensFilterReader(new InputStreamReader(is),
- new HashMap((Map) prop));
- }
- } else {
- reader = new InputStreamReader(is);
- }
-
- JdbcEntitySetupType baseSetup = JAXB.unmarshal(reader, JdbcEntitySetupType.class);
-
- // Support to Dao extensions
- JdbcEntitySetupType setupConfig = this.checkDaoExtensions(baseSetup, path, pathToPlaceHolder);
-
- // Process setup information to configure dao
- this.setTableName(setupConfig.getTable());
- this.setSchemaName(setupConfig.getSchema());
- this.setCatalogName(setupConfig.getCatalog());
- this.setDeleteKeys(setupConfig.getDeleteKeys().getColumn());
- this.setUpdateKeys(setupConfig.getUpdateKeys().getColumn());
- if (setupConfig.getQueries() != null) {
- for (final QueryType query : setupConfig.getQueries().getQuery()) {//
- this.addQueryTemplateInformation(query.getId(), query.getSentence().getValue(), //
- query.getAmbiguousColumns() == null ? null
- : query.getAmbiguousColumns().getAmbiguousColumn(), //
- query.getFunctionColumns() == null ? null : query.getFunctionColumns().getFunctionColumn(), //
- query.getValidColumns() != null ? query.getValidColumns().getColumn()
- : new ArrayList(), //
- query.getOrderColumns() == null ? null : query.getOrderColumns().getOrderColumn());
- }
- }
- this.setGeneratedKeyName(setupConfig.getGeneratedKey());
- this.setDataSource((DataSource) this.applicationContext.getBean(setupConfig.getDatasource()));
- this.setStatementHandler(
- (SQLStatementHandler) this.applicationContext.getBean(setupConfig.getSqlhandler()));
-
- final String nameConverter = setupConfig.getNameconverter();
- if (!CheckingTools.isStringEmpty(nameConverter)) {
- this.setNameConverter((INameConverter) this.applicationContext.getBean(nameConverter));
- }
- this.tableMetaDataContext.setNameConvention(this.nameConvention);
- } catch (final IOException e) {
- throw new InvalidDataAccessApiUsageException(I18NNaming.M_ERROR_LOADING_CONFIGURATION_FILE, e);
- }
-
- }
-
- protected JdbcEntitySetupType checkDaoExtensions(JdbcEntitySetupType baseSetup, final String path,
- final String pathToPlaceHolder) {
- if (this.daoExtensionHelper == null) {
- return baseSetup;
- }
- return this.daoExtensionHelper.checkDaoExtensions(baseSetup, path, pathToPlaceHolder);
- }
-
- /**
- * Sets the bean property converter.
- * @param converter the new bean property converter
- */
- protected void setNameConverter(final INameConverter converter) {
- this.nameConverter = converter;
- }
-
- /**
- * Gets the bean property converter.
- * @return the bean property converter
- */
- public INameConverter getNameConverter() {
- this.checkCompiled();
- return this.nameConverter;
- }
-
- /**
- * Sets the configuration file.
- * @param configurationFile the new configuration file
- */
- public synchronized void setConfigurationFile(final String configurationFile) {
- this.configurationFile = configurationFile;
- }
-
- /**
- * Gets the configuration file.
- * @return the configuration file
- */
- public String getConfigurationFile() {
- return this.configurationFile;
- }
-
- /**
- * Sets the configuration file placeholder.
- * @param configurationFilePlaceholder the new configuration file placeholder
- */
- public synchronized void setConfigurationFilePlaceholder(final String configurationFilePlaceholder) {
- this.configurationFilePlaceholder = configurationFilePlaceholder;
- }
-
- /**
- * Gets the configuration file placeholder.
- * @return the configuration file placeholder
- */
- public String getConfigurationFilePlaceholder() {
- return this.configurationFilePlaceholder;
- }
-
- public INameConvention getNameConvention() {
- return nameConvention;
- }
-
- public void setNameConvention(INameConvention nameConvention) {
- this.nameConvention = nameConvention;
- }
-
- /**
- * Check dao config.
- */
- @Override
- protected void checkDaoConfig() {
- // no need of jdbctemplate at this point
- }
-
- /**
- * Adds a query.
- * @param id the id
- * @param value the value
- * @param ambiguousColumns the ambiguous columns
- * @param functionColumns the function columns
- */
- public void addQueryTemplateInformation(final String id, final String value,
- final List ambiguousColumns, final List functionColumns,
- List orderColumns) {
- this.addQueryTemplateInformation(id, value, ambiguousColumns, functionColumns, new ArrayList(),
- orderColumns);
- }
-
- /**
- * Adds a query, allowing determine valid columns to query to DB.
- * @param id
- * @param value
- * @param ambiguousColumns
- * @param functionColumns
- * @param validColumns
- */
- public void addQueryTemplateInformation(final String id, final String value,
- final List ambiguousColumns, final List functionColumns,
- List validColumns, List orderColumns) {
- this.sqlQueries.put(id,
- new QueryTemplateInformation(value, ambiguousColumns, functionColumns, validColumns, orderColumns));
- }
-
- /**
- * Gets the template query.
- * @param id the id
- * @return the template query
- */
- public QueryTemplateInformation getQueryTemplateInformation(final String id) {
- this.checkCompiled();
- return this.sqlQueries.get(id);
- }
-
- /**
- * Method to perform the actual compilation. Subclasses can override this template method to perform
- * their own compilation. Invoked after this base class's compilation is complete.
- */
- protected void compileInternal() {
- this.tableMetaDataContext.processMetaData(this.getJdbcTemplate().getDataSource(), this.declaredColumns,
- this.generatedKeyNames);
- this.onCompileInternal();
- }
-
- /**
- * Hook method that subclasses may override to react to compilation. This implementation does
- * nothing.
- */
- protected void onCompileInternal() {
- // This implementation does nothing.
- }
-
- /**
- * Is this operation "compiled"?.
- * @return whether this operation is compiled, and ready to use.
- */
- public boolean isCompiled() {
- return this.compiled;
- }
-
- /**
- * Check whether this operation has been compiled already; lazily compile it if not already
- * compiled.
- *
- * Automatically called by {@code validateParameters}.
- */
- public void checkCompiled() {
- if (!this.isCompiled()) {
- OntimizeJdbcDaoSupport.logger.debug("JdbcInsert not compiled before execution - invoking compile");
- this.compile();
- }
- }
-
- /**
- * Method to check whether we are allowd to make any configuration changes at this time. If the
- * class has been compiled, then no further changes to the configuration are allowed.
- */
- protected void checkIfConfigurationModificationIsAllowed() {
- if (this.isCompiled()) {
- throw new InvalidDataAccessApiUsageException(
- "Configuration can't be altered once the class has been compiled or used");
- }
- }
-
- // -------------------------------------------------------------------------
- // Methods handling execution
- // -------------------------------------------------------------------------
-
- /**
- * Method that provides execution of the insert using the passed in Map of parameters.
- * @param args Map with parameter names and values to be used in insert
- * @return number of rows affected
- */
- protected int doExecuteInsert(final Map args) {
- this.checkCompiled();
- final InsertMetaInfoHolder holder = this.matchInParameterValuesWithInsertColumns(args);
- return this.executeInsertInternal(holder);
- }
-
- /**
- * Method that provides execution of the insert using the passed in.
- * @param parameterSource parameter names and values to be used in insert
- * @return number of rows affected {@link SqlParameterSource}
- */
- protected int doExecuteInsert(final SqlParameterSource parameterSource) {
- this.checkCompiled();
- final InsertMetaInfoHolder holder = this.matchInParameterValuesWithInsertColumns(parameterSource);
- return this.executeInsertInternal(holder);
- }
-
- /**
- * Method to execute the insert.
- * @param values the values
- * @return the int
- */
- protected int executeInsertInternal(InsertMetaInfoHolder holder) {
- OntimizeJdbcDaoSupport.logger.debug("The following parameters are used for insert {} with: {}",
- holder.getInsertString(), holder.getValues());
- return this.getJdbcTemplate()
- .update(holder.getInsertString(), holder.getValues().toArray(), holder.getInsertTypes());
- }
-
- /**
- * Method that provides execution of the insert using the passed in Map of parameters and returning
- * a generated key.
- * @param args Map with parameter names and values to be used in insert
- * @return the key generated by the insert
- */
- protected Object doExecuteInsertAndReturnKey(final Map args) {
- this.checkCompiled();
- final InsertMetaInfoHolder holder = this.matchInParameterValuesWithInsertColumns(args);
- return this.executeInsertAndReturnKeyInternal(holder);
- }
-
- /**
- * Method that provides execution of the insert using the passed in.
- * @param parameterSource parameter names and values to be used in insert
- * @return the key generated by the insert {@link SqlParameterSource} and returning a generated key
- */
- protected Object doExecuteInsertAndReturnKey(final SqlParameterSource parameterSource) {
- this.checkCompiled();
- final InsertMetaInfoHolder holder = this.matchInParameterValuesWithInsertColumns(parameterSource);
- return this.executeInsertAndReturnKeyInternal(holder);
- }
-
- /**
- * Method that provides execution of the insert using the passed in Map of parameters and returning
- * all generated keys.
- * @param args Map with parameter names and values to be used in insert
- * @return the KeyHolder containing keys generated by the insert
- */
- protected KeyHolder doExecuteInsertAndReturnKeyHolder(final Map args) {
- this.checkCompiled();
- final InsertMetaInfoHolder holder = this.matchInParameterValuesWithInsertColumns(args);
- return this.executeInsertAndReturnKeyHolderInternal(holder);
- }
-
- /**
- * Method that provides execution of the insert using the passed in.
- * @param parameterSource parameter names and values to be used in insert
- * @return the KeyHolder containing keys generated by the insert {@link SqlParameterSource} and
- * returning all generated keys
- */
- protected KeyHolder doExecuteInsertAndReturnKeyHolder(final SqlParameterSource parameterSource) {
- this.checkCompiled();
- final InsertMetaInfoHolder holder = this.matchInParameterValuesWithInsertColumns(parameterSource);
- return this.executeInsertAndReturnKeyHolderInternal(holder);
- }
-
- /**
- * Method to execute the insert generating single key.
- * @param values the values
- * @return the number
- */
- protected Object executeInsertAndReturnKeyInternal(final InsertMetaInfoHolder holder) {
- final KeyHolder kh = this.executeInsertAndReturnKeyHolderInternal(holder);
- if ((kh != null) && (kh.getKeyAs(Object.class) != null)) {
- return kh.getKeyAs(Object.class);
- }
- throw new DataIntegrityViolationException(
- "Unable to retrieve the generated key for the insert: " + holder.getInsertString());
- }
-
- /**
- * Method to execute the insert generating any number of keys.
- * @param values the values
- * @return the key holder
- */
- protected KeyHolder executeInsertAndReturnKeyHolderInternal(final InsertMetaInfoHolder holder) {
- OntimizeJdbcDaoSupport.logger.debug("The following parameters are used for call {} with: {}",
- holder.getInsertString(), holder.getValues());
- final KeyHolder keyHolder = new GeneratedKeyHolder();
- if (!this.tableMetaDataContext.isGetGeneratedKeysSupported()) {
- if (!this.tableMetaDataContext.isGetGeneratedKeysSimulated()) {
- throw new InvalidDataAccessResourceUsageException(
- "The getGeneratedKeys feature is not supported by this database");
- }
- if (this.getGeneratedKeyNames().length < 1) {
- throw new InvalidDataAccessApiUsageException(
- "Generated Key Name(s) not specificed. "
- + "Using the generated keys features requires specifying the name(s) of the generated column(s)");
- }
- if (this.getGeneratedKeyNames().length > 1) {
- throw new InvalidDataAccessApiUsageException(
- "Current database only supports retreiving the key for a single column. There are "
- + this.getGeneratedKeyNames().length + " columns specified: " + Arrays
- .asList(this.getGeneratedKeyNames()));
- }
- // This is a hack to be able to get the generated key from a
- // database that doesn't support
- // get generated keys feature. HSQL is one, PostgreSQL is another.
- // Postgres uses a RETURNING
- // clause while HSQL uses a second query that has to be executed
- // with the same connection.
- final String keyQuery = this.tableMetaDataContext.getSimulationQueryForGetGeneratedKey(
- this.tableMetaDataContext.getTableName(), this.getGeneratedKeyNames()[0]);
- Assert.notNull(keyQuery, "Query for simulating get generated keys can't be null");
- if (keyQuery.toUpperCase().startsWith("RETURNING")) {
- final Long key = this.getJdbcTemplate()
- .queryForObject(holder.getInsertString() + " " + keyQuery,
- holder.getValues().toArray(new Object[holder.getValues().size()]),
- Long.class);
- final Map keys = new HashMap<>(1);
- keys.put(this.getGeneratedKeyNames()[0], key);
- keyHolder.getKeyList().add(keys);
- } else {
- this.getJdbcTemplate().execute(new ConnectionCallback() {
-
- @Override
- public Object doInConnection(final Connection con) throws SQLException, DataAccessException {
- // Do the insert
- PreparedStatement ps = null;
- try {
- ps = con.prepareStatement(holder.getInsertString());
- OntimizeJdbcDaoSupport.this.setParameterValues(ps, holder.getValues(),
- holder.getInsertTypes());
- ps.executeUpdate();
- } finally {
- JdbcUtils.closeStatement(ps);
- }
- // Get the key
- ResultSet rs = null;
- final Map keys = new HashMap<>(1);
- final Statement keyStmt = con.createStatement();
- try {
- rs = keyStmt.executeQuery(keyQuery);
- if (rs.next()) {
- final long key = rs.getLong(1);
- keys.put(OntimizeJdbcDaoSupport.this.getGeneratedKeyNames()[0], key);
- keyHolder.getKeyList().add(keys);
- }
- } finally {
- JdbcUtils.closeResultSet(rs);
- JdbcUtils.closeStatement(keyStmt);
- }
- return null;
- }
- });
- }
- return keyHolder;
- }
- this.getJdbcTemplate().update(new PreparedStatementCreator() {
-
- @Override
- public PreparedStatement createPreparedStatement(final Connection con) throws SQLException {
- final PreparedStatement ps = OntimizeJdbcDaoSupport.this.prepareInsertStatementForGeneratedKeys(con,
- holder.getInsertString());
- OntimizeJdbcDaoSupport.this.setParameterValues(ps, holder.getValues(), holder.getInsertTypes());
- return ps;
- }
- }, keyHolder);
- return keyHolder;
- }
-
- /**
- * Create the PreparedStatement to be used for insert that have generated keys.
- * @param con the connection used
- * @return PreparedStatement to use
- * @throws SQLException the sQL exception
- */
- protected PreparedStatement prepareInsertStatementForGeneratedKeys(final Connection con, String insertString)
- throws SQLException {
- if (this.getGeneratedKeyNames().length < 1) {
- throw new InvalidDataAccessApiUsageException(
- "Generated Key Name(s) not specificed. "
- + "Using the generated keys features requires specifying the name(s) of the generated column(s)");
- }
- PreparedStatement ps;
- if (this.tableMetaDataContext.isGeneratedKeysColumnNameArraySupported()) {
- OntimizeJdbcDaoSupport.logger.debug("Using generated keys support with array of column names.");
- ps = con.prepareStatement(insertString, this.getGeneratedKeyNames());
- } else {
- OntimizeJdbcDaoSupport.logger.debug("Using generated keys support with Statement.RETURN_GENERATED_KEYS.");
- ps = con.prepareStatement(insertString, Statement.RETURN_GENERATED_KEYS);
- }
- return ps;
- }
-
- /**
- * Method that provides execution of a batch insert using the passed in Maps of parameters.
- * @param batch array of Maps with parameter names and values to be used in batch insert
- * @return array of number of rows affected
- */
- @Override
- public int[] insertBatch(final Map[] batch) {
- this.checkCompiled();
- final List[] batchValues = new ArrayList[batch.length];
- int i = 0;
- for (final Map args : batch) {
- final List values = this.matchInParameterValuesWithInsertColumnsForBatch(args);
- batchValues[i++] = values;
- }
- return this.executeInsertBatchInternal(batchValues,
- this.tableMetaDataContext.createInsertString(this.getGeneratedKeyNames()),
- this.tableMetaDataContext.createInsertTypes());
- }
-
- /**
- * Method that provides execution of a batch insert using the passed in array of
- * {@link SqlParameterSource}.
- * @param batch array of SqlParameterSource with parameter names and values to be used in insert
- * @return array of number of rows affected
- */
- protected int[] doExecuteInsertBatch(final SqlParameterSource[] batch, final String insertString,
- final int[] insertTypes) {
- this.checkCompiled();
- final List[] batchValues = new ArrayList[batch.length];
- int i = 0;
- for (final SqlParameterSource parameterSource : batch) {
- final List values = this.matchInParameterValuesWithInsertColumnsForBatch(parameterSource);
- batchValues[i++] = values;
- }
- return this.executeInsertBatchInternal(batchValues, insertString, insertTypes);
- }
-
- /**
- * Method to execute the batch insert.
- * @param batchValues the batch values
- * @return the int[]
- */
- protected int[] executeInsertBatchInternal(final List[] batchValues, final String insertString,
- final int[] insertTypes) {
- OntimizeJdbcDaoSupport.logger.debug("Executing statement {} with batch of size: {}", insertString,
- batchValues.length);
- return this.getJdbcTemplate().batchUpdate(insertString, new BatchPreparedStatementSetter() {
-
- @Override
- public void setValues(final PreparedStatement ps, final int i) throws SQLException {
- final List values = batchValues[i];
- OntimizeJdbcDaoSupport.this.setParameterValues(ps, values, insertTypes);
- }
-
- @Override
- public int getBatchSize() {
- return batchValues.length;
- }
- });
- }
-
- /**
- * Internal implementation for setting parameter values.
- * @param preparedStatement the PreparedStatement
- * @param values the values to be set
- * @param columnTypes the column types
- * @throws SQLException the sQL exception
- */
- protected void setParameterValues(final PreparedStatement preparedStatement, final List values,
- final int[] columnTypes) throws SQLException {
-
- int colIndex = 0;
- for (Object value : values) {
- colIndex++;
- if ((columnTypes == null) || (colIndex > columnTypes.length)) {
- StatementCreatorUtils.setParameterValue(preparedStatement, colIndex, SqlTypeValue.TYPE_UNKNOWN, value);
- } else {
- final int sqlType = columnTypes[colIndex - 1];
- if (ObjectTools.isIn(sqlType, Types.BLOB, Types.BINARY, Types.VARBINARY)
- && ((value instanceof byte[]) || (value instanceof InputStream))) {
- if (value instanceof byte[]) {
- preparedStatement.setBytes(colIndex, (byte[]) value);
- } else {
- try {
- // TODO esto no esta soportado por los drivers jdbc 4.0
- // TODO segun el driver puede ser que sea mas rapido llamar al metodo con la longitud
- preparedStatement.setBlob(colIndex, (InputStream) value);
- } catch (AbstractMethodError ex) {
- OntimizeJdbcDaoSupport.logger.debug(null, ex);
- try {
- preparedStatement.setBinaryStream(colIndex, (InputStream) value,
- ((InputStream) value).available());
- } catch (IOException error) {
- throw new SQLException(error);
- }
- }
- }
- } else if (value instanceof NullValue) {
- // TODO At this point we could retrieve sqlType from ((NullValue)value).getSQLDataType()
- // but it is preferable to use the sqlType retrieved from table metadata.
- value = new SqlParameterValue(sqlType, null);
- StatementCreatorUtils.setParameterValue(preparedStatement, colIndex, sqlType, value);
- } else {
- StatementCreatorUtils.setParameterValue(preparedStatement, colIndex, sqlType, value);
- }
- }
- }
- }
-
- /**
- * Match the provided in parameter values with regitered parameters and parameters defined via
- * metadata processing.
- * @param parameterSource the parameter vakues provided as a {@link SqlParameterSource}
- * @return Map with parameter names and values
- */
- protected InsertMetaInfoHolder matchInParameterValuesWithInsertColumns(final SqlParameterSource parameterSource) {
- return this.tableMetaDataContext.getInsertMetaInfo(parameterSource);
- }
-
- /**
- * Match the provided in parameter values with regitered parameters and parameters defined via
- * metadata processing.
- * @param args the parameter values provided in a Map
- * @return Map with parameter names and values
- */
- protected InsertMetaInfoHolder matchInParameterValuesWithInsertColumns(final Map args) {
- return this.tableMetaDataContext.getInsertMetaInfo(args);
- }
-
- /**
- * Match the provided in parameter values with regitered parameters and parameters defined via
- * metadata processing.
- * @param parameterSource the parameter vakues provided as a {@link SqlParameterSource}
- * @return Map with parameter names and values
- */
- protected List matchInParameterValuesWithInsertColumnsForBatch(final SqlParameterSource parameterSource) {
- return this.tableMetaDataContext.matchInParameterValuesWithInsertColumns(parameterSource);
- }
-
- /**
- * Match the provided in parameter values with regitered parameters and parameters defined via
- * metadata processing.
- * @param args the parameter values provided in a Map
- * @return Map with parameter names and values
- */
- protected List matchInParameterValuesWithInsertColumnsForBatch(final Map args) {
- return this.tableMetaDataContext.matchInParameterValuesWithInsertColumns(args);
- }
-
- /**
- * Gets the schema table.
- * @return the schema table
- */
- protected String getSchemaTable() {
- String sTableToUse = this.getTableName();
- if (this.getSchemaName() != null) {
- sTableToUse = this.getSchemaName() + "." + sTableToUse;
- }
- return sTableToUse;
- }
-
- /**
- * Establish SQL statement builder.
- * @param statementHandler the new statement handler
- */
- public void setStatementHandler(final SQLStatementHandler statementHandler) {
- this.statementHandler = statementHandler;
- }
-
- /**
- * Get the SQL statement builder.
- * @return the statement builder
- */
- public SQLStatementHandler getStatementHandler() {
- return this.statementHandler;
- }
-
- /**
- * Sets the delete keys.
- * @param deleteKeys the new delete keys
- */
- public void setDeleteKeys(final List deleteKeys) {
- this.deleteKeys = deleteKeys;
- }
-
- /**
- * Gets the delete keys.
- * @return the delete keys
- */
- public List getDeleteKeys() {
- return this.deleteKeys;
- }
-
- /**
- * Sets the update keys.
- * @param updateKeys the new update keys
- */
- public void setUpdateKeys(final List updateKeys) {
- this.updateKeys = updateKeys;
- }
-
- /**
- * Gets the update keys.
- * @return the update keys
- */
- public List getUpdateKeys() {
- return this.updateKeys;
- }
-
- /*
- * (non-Javadoc)
- *
- * @see org.springframework.context.ApplicationContextAware#setApplicationContext
- * (org.springframework.context.ApplicationContext)
- */
- /**
- * Sets the application context.
- * @param applicationContext the new application context
- * @throws BeansException the beans exception
- */
- @Override
- public void setApplicationContext(final ApplicationContext applicationContext) throws BeansException {
- this.applicationContext = applicationContext;
- }
-
- /**
- * Gets the application context.
- * @return the application context
- */
- public ApplicationContext getApplicationContext() {
- return this.applicationContext;
- }
-
- /*
- * (non-Javadoc)
- *
- * @see com.ontimize.jee.server.dao.IOntimizeDaoSupport#getCudProperties()
- */
- @Override
- public List getCudProperties() {
- this.compile();
- List res = new ArrayList<>();
- for (TableParameterMetaData data : this.tableMetaDataContext.getTableParameters()) {
- String name = data.getParameterName();
- int type = data.getSqlType();
- DaoProperty property = new DaoProperty();
- property.setSqlType(type);
- property.setPropertyName(name);
- res.add(property);
- }
- return res;
- }
-
- /**
- * Gets the table meta data context.
- * @return the table meta data context
- */
- public OntimizeTableMetaDataContext getTableMetaDataContext() {
- if (!this.tableMetaDataContext.isProcessed()) {
- this.compile();
- }
- return this.tableMetaDataContext;
- }
-
- protected OntimizeTableMetaDataContext createTableMetadataContext() {
- return new OntimizeTableMetaDataContext();
- }
-
- @Override
- protected JdbcTemplate createJdbcTemplate(DataSource dataSource) {
- OntimizeJdbcDaoSupport.logger.trace("Creating new JdbcTemplate with fetchSize=1000");
- JdbcTemplate template = super.createJdbcTemplate(dataSource);
- template.setFetchSize(1000);
- OntimizeJdbcDaoSupport.logger
- .trace("Creating new JdbcTemplate has finally fetchSize=" + template.getFetchSize());
- return template;
- }
-
- private Integer getColumnSQLType(final String column) {
- if (!this.tableMetaDataContext.isProcessed()) {
- this.compile();
- }
- for (final TableParameterMetaData data : this.tableMetaDataContext.getTableParameters()) {
- if (column.equalsIgnoreCase(data.getParameterName())) {
- return data.getSqlType();
- }
- }
- return null;
- }
-}
+ /**
+ * Replaces the WHERE placeholders in the SQL template and updates the values.
+ *
+ * @param sqlTemplate The original SQL template.
+ * @param cond The generated condition.
+ * @param vValues The list of values to be updated.
+ * @param vValuesTemp Temporary list of condition values.
+ * @return The modified sqlTemplate with the placeholders replaced.
+ */
+ public String applyWherePlaceholders(String sqlTemplate, String cond, List vValues, List vValuesTemp) {
+ Pair replaceAll = StringTools.replaceAll(sqlTemplate, OntimizeJdbcDaoSupport.PLACEHOLDER_WHERE_CONCAT, cond.isEmpty() ? "" : SQLStatementBuilder.AND + " " + cond);
+ sqlTemplate = replaceAll.getFirst();
+ for (int i = 1; i < replaceAll.getSecond(); i++) {
+ vValues.addAll(vValuesTemp);
+ }
+
+ replaceAll = StringTools.replaceAll(sqlTemplate, OntimizeJdbcDaoSupport.PLACEHOLDER_WHERE, cond.isEmpty() ? "" : SQLStatementBuilder.WHERE + " " + cond);
+ sqlTemplate = replaceAll.getFirst();
+ for (int i = 1; i < replaceAll.getSecond(); i++) {
+ vValues.addAll(vValuesTemp);
+ }
+ return sqlTemplate;
+ }
+
+ @Override
+ public List query(final Map, ?> keysValues, final List> sort, final String queryId, final Class clazz) {
+ return this.query(keysValues, sort, queryId, clazz, null);
+ }
+
+ /**
+ * Query.
+ *
+ * @param the generic type
+ * @param keysValues the keys values
+ * @param sort the sort
+ * @param queryId the query id
+ * @param clazz the clazz
+ * @return the list
+ */
+ @Override
+ public List query(final Map, ?> keysValues, final List> sort, final String queryId, final Class clazz, ISQLQueryAdapter queryAdapter) {
+ this.checkCompiled();
+ BeanPropertyRowMapper rowMapper = this.createRowMapper(clazz);
+ final SQLStatementBuilder.SQLStatement stSQL = this.composeQuerySql(queryId, rowMapper.convertBeanPropertiesToDB(clazz), keysValues, sort, null, queryAdapter);
+ final String sqlQuery = stSQL.getSQLStatement();
+ final List> vValues = stSQL.getValues();
+ return this.getJdbcTemplate().query(sqlQuery, rowMapper, vValues.toArray());
+ }
+
+ /**
+ * Executes a single SQL statement using a prepared statement with the given parameters.
+ *
+ *
This method is intended for executing a single SQL command (e.g., CREATE, DROP, UPDATE), and supports parameter
+ * substitution using placeholders.
+ *
+ *
Restrictions:
+ *
+ *
Only one SQL statement can be executed per call. Do not pass multiple statements separated by semicolons.
+ *
Dynamic WHERE clauses not working with DLL statements.
+ *
This method does not return results.
+ *
+ *
+ *
+ * @param sqlStatement The SQL statement to execute.
+ * @param vValues The list of values for the prepared statement.
+ */
+ public boolean executeComposeSQLStatement(String sqlStatement, List> vValues) {
+ Chronometer chrono = new Chronometer().start();
+ try {
+ JdbcTemplate jdbcTemplate = this.getJdbcTemplate();
+ if (jdbcTemplate != null) {
+ jdbcTemplate.execute((ConnectionCallback) con -> {
+ PreparedStatement ps = con.prepareStatement(sqlStatement);
+ ArgumentPreparedStatementSetter pss = new ArgumentPreparedStatementSetter(vValues.toArray());
+ pss.setValues(ps);
+ return ps.execute();
+ });
+ }
+ return false;
+ } finally {
+ OntimizeJdbcDaoSupport.logger.trace("Time consumed in statement= {} ms", chrono.stopMs());
+ }
+ }
+
+ /**
+ * Creates the row mapper.
+ *
+ * @param the generic type
+ * @param clazz the clazz
+ * @return the bean property row mapper
+ */
+ protected BeanPropertyRowMapper createRowMapper(final Class clazz) {
+ return new BeanPropertyRowMapper<>(this.getNameConverter(), this.getDataSource(), clazz);
+ }
+
+ /**
+ * Apply template prefix.
+ *
+ * @param templateInformation the template information
+ * @param vValidAttributes the v valid attributes
+ * @return the list
+ */
+ protected List applyTransformations(final QueryTemplateInformation templateInformation, final List> vValidAttributes) {
+ final List ambiguousColumns = templateInformation.getAmbiguousColumns();
+ final List functionColumns = templateInformation.getFunctionColumns();
+
+ final List res = new ArrayList<>(vValidAttributes.size());
+ for (final Object ob : vValidAttributes) {
+ boolean transformed = false;
+ if (ambiguousColumns != null) {
+ transformed = this.transformWithAmbiguousColumns(ob, ambiguousColumns, res, transformed);
+ }
+ if (!transformed && (functionColumns != null)) {
+ transformed = this.transformWithFunctionColumns(ob, functionColumns, res, transformed);
+ }
+ if (!transformed) {
+ res.add(ob);
+ }
+ }
+ return res;
+ }
+
+ private boolean transformWithFunctionColumns(Object ob, List functionColumns, List res, boolean transformed) {
+ for (final FunctionColumnType functionColumn : functionColumns) {
+ if (ob.toString().equalsIgnoreCase(functionColumn.getName())) {
+ String sb = SQLStatementBuilder.OPEN_PARENTHESIS + functionColumn.getValue() + SQLStatementBuilder.CLOSE_PARENTHESIS + SQLStatementBuilder.AS + functionColumn.getName();
+ res.add(sb);
+ transformed = true;
+ break;
+ }
+ }
+ return transformed;
+ }
+
+ private boolean transformWithAmbiguousColumns(Object ob, List ambiguousColumns, List res, boolean transformed) {
+ for (final AmbiguousColumnType ambiguosColumn : ambiguousColumns) {
+ if (ob.toString().equalsIgnoreCase(ambiguosColumn.getName())) {
+ final String dbName = ambiguosColumn.getDatabaseName() == null ? ambiguosColumn.getName() : ambiguosColumn.getDatabaseName();
+ String sb = ambiguosColumn.getPrefix() + "." + dbName + SQLStatementBuilder.AS + ambiguosColumn.getName();
+ res.add(sb);
+ transformed = true;
+ break;
+ }
+ }
+ return transformed;
+ }
+
+ /**
+ * Apply template prefix.
+ *
+ * @param templateInformation the template information
+ * @param kvValidKeysValues the kv valid keys values
+ * @return the Map
+ */
+ protected Map applyTransformations(final QueryTemplateInformation templateInformation, final Map kvValidKeysValues) {
+ final List ambiguousColumns = templateInformation.getAmbiguousColumns();
+ final List functionColumns = templateInformation.getFunctionColumns();
+
+ final Map res = new HashMap<>();
+ for (final Map.Entry kvEntry : kvValidKeysValues.entrySet()) {
+ if (kvEntry.getKey() instanceof String) {
+ String key = (String) kvEntry.getKey();
+ boolean transformed = false;
+ transformed = this.transformedStringKey(kvEntry, key, res, ambiguousColumns, functionColumns, transformed);
+ if (!transformed) {
+ res.put(key, kvEntry.getValue());
+ }
+ } else {
+ res.put(kvEntry.getKey(), kvEntry.getValue());
+ }
+ }
+ return res;
+ }
+
+ private boolean transformedStringKey(Map.Entry kvEntry, String key, Map res, List ambiguousColumns, List functionColumns, boolean transformed) {
+ if ((SQLStatementBuilder.ExtendedSQLConditionValuesProcessor.EXPRESSION_KEY.equals(key) || SQLStatementBuilder.ExtendedSQLConditionValuesProcessor.FILTER_KEY.equals(key)) && (kvEntry.getValue() instanceof SQLStatementBuilder.BasicExpression)) {
+ res.put(key, this.applyTransformationsToBasicExpression((SQLStatementBuilder.BasicExpression) kvEntry.getValue(), ambiguousColumns, functionColumns));
+ transformed = true;
+ } else {
+ String resolvedAmbiguousColumn = this.resolveAmbiguousColumn(key, ambiguousColumns);
+ if (resolvedAmbiguousColumn != null) {
+ res.put(resolvedAmbiguousColumn, kvEntry.getValue());
+ transformed = true;
+ } else {
+ String resolvedFunctionColumn = this.resolveFunctionColumn(key, functionColumns);
+ if (resolvedFunctionColumn != null) {
+ res.put(resolvedFunctionColumn, kvEntry.getValue());
+ transformed = true;
+ }
+ }
+ }
+ return transformed;
+ }
+
+ protected List applyOrderColumns(final List> sort, final List orderColumns) {
+ List vResult = new ArrayList<>();
+ if ((sort != null) && (sort.size() > 0)) {
+ vResult.addAll(sort);
+ }
+
+ if ((orderColumns != null) && (orderColumns.size() > 0)) {
+ for (OrderColumnType orderColumnType : orderColumns) {
+ SQLStatementBuilder.SQLOrder sqlOrder = new SQLStatementBuilder.SQLOrder(orderColumnType.getName(), "ASC".equals(orderColumnType.getType()));
+ vResult.add(sqlOrder);
+ }
+ }
+
+ return vResult;
+ }
+
+ /**
+ * Resolve function column.
+ *
+ * @param key the key
+ * @param functionColumns the function columns
+ * @return the string
+ */
+ protected String resolveFunctionColumn(String key, List functionColumns) {
+ if (functionColumns != null) {
+ for (final FunctionColumnType functionColumn : functionColumns) {
+ if (key.equalsIgnoreCase(functionColumn.getName())) {
+ return functionColumn.getValue();
+ }
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Resolve ambiguous column.
+ *
+ * @param key the key
+ * @param ambiguousColumns the ambiguous columns
+ * @return the string
+ */
+ protected String resolveAmbiguousColumn(String key, List ambiguousColumns) {
+ if (ambiguousColumns != null) {
+ for (final AmbiguousColumnType ambiguosColumn : ambiguousColumns) {
+ if (key.equalsIgnoreCase(ambiguosColumn.getName())) {
+ final String dbName = ambiguosColumn.getDatabaseName() == null ? key : ambiguosColumn.getDatabaseName();
+ return ambiguosColumn.getPrefix() + "." + dbName;
+ }
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Apply transformations to basic expression.
+ *
+ * @param functionColumns
+ * @param ambiguousColumns
+ * @param value the value
+ * @return the object
+ */
+ protected SQLStatementBuilder.BasicExpression applyTransformationsToBasicExpression(final SQLStatementBuilder.BasicExpression original, List ambiguousColumns, List functionColumns) {
+ Object originalLeftOperand = original.getLeftOperand();
+ SQLStatementBuilder.Operator originalOperator = original.getOperator();
+ Object originalRightOperand = original.getRightOperand();
+ Object transformedLeftOperand = null;
+ SQLStatementBuilder.Operator transformedOperator = originalOperator;
+ Object transformedRightOperand = null;
+ if (originalLeftOperand instanceof SQLStatementBuilder.BasicField) {
+ transformedLeftOperand = this.applyTransformationsToBasicField((SQLStatementBuilder.BasicField) originalLeftOperand, ambiguousColumns, functionColumns);
+ } else if (originalLeftOperand instanceof SQLStatementBuilder.BasicExpression) {
+ transformedLeftOperand = this.applyTransformationsToBasicExpression((SQLStatementBuilder.BasicExpression) originalLeftOperand, ambiguousColumns, functionColumns);
+ } else {
+ transformedLeftOperand = originalLeftOperand;
+ }
+
+ if (originalRightOperand instanceof SQLStatementBuilder.BasicField) {
+ transformedRightOperand = this.applyTransformationsToBasicField((SQLStatementBuilder.BasicField) originalRightOperand, ambiguousColumns, functionColumns);
+ } else if (originalRightOperand instanceof SQLStatementBuilder.BasicExpression) {
+ transformedRightOperand = this.applyTransformationsToBasicExpression((SQLStatementBuilder.BasicExpression) originalRightOperand, ambiguousColumns, functionColumns);
+ } else {
+ transformedRightOperand = originalRightOperand;
+ }
+
+ return new SQLStatementBuilder.BasicExpression(transformedLeftOperand, transformedOperator, transformedRightOperand);
+ }
+
+ /**
+ * Apply transformations to basic field.
+ *
+ * @param originalField the original field
+ * @param ambiguousColumns the ambiguous columns
+ * @param functionColumns the function columns
+ * @return the basic field
+ */
+ protected SQLStatementBuilder.BasicField applyTransformationsToBasicField(SQLStatementBuilder.BasicField originalField, List ambiguousColumns, List functionColumns) {
+ String columnName = originalField.toString();
+ Integer columnType = originalField.getSqlType();
+ if (columnType == null) columnType = this.getColumnSQLType(columnName);
+ String resolvedAmbiguousColumn = this.resolveAmbiguousColumn(columnName, ambiguousColumns);
+ if (resolvedAmbiguousColumn != null) {
+ return new SQLStatementBuilder.BasicField(resolvedAmbiguousColumn, columnType);
+ }
+ String resolvedFunctionColumn = this.resolveFunctionColumn(columnName, functionColumns);
+ if (resolvedFunctionColumn != null) {
+ return new SQLStatementBuilder.BasicField(resolvedFunctionColumn, columnType);
+ }
+ return new SQLStatementBuilder.BasicField(columnName, columnType);
+ }
+
+ /**
+ * Insert.
+ *
+ * @param attributesValues the attributes values
+ * @return the entity result
+ */
+ @Override
+ public EntityResult insert(final Map, ?> attributesValues) {
+ this.checkCompiled();
+ final EntityResult erResult = new EntityResultMapImpl();
+
+ final Map, ?> avWithoutMultipleTableAttributes = this.processMultipleTableAttribute(attributesValues);
+ final Map, ?> avWithoutReferenceAttributes = this.processReferenceDataFieldAttributes(avWithoutMultipleTableAttributes);
+ final Map, ?> avWithoutMultipleValueAttributes = this.processMultipleValueAttributes(avWithoutReferenceAttributes);
+ final Map avValidPre = this.getValidAttributes(this.processStringKeys(avWithoutMultipleValueAttributes));
+ final Map avValid = this.removeNullValues(avValidPre);
+ if (avValid.isEmpty()) {
+ // TODO se deber�a lanzar excepci�n, pero puede tener colaterales con la one-2-one
+ OntimizeJdbcDaoSupport.logger.warn("Insert: Attributes does not contain any pair key-value valid");
+ return erResult;
+ }
+
+ if (this.getGeneratedKeyNames().length < 1) {
+ final int res = this.doExecuteInsert(avValid);
+ if (res != 1) {
+ throw new SQLWarningException(I18NNaming.M_IT_HAS_NOT_CHANGED_ANY_RECORD, null);
+ }
+ } else if (this.getGeneratedKeyNames().length == 1) {
+ final Object res = this.doExecuteInsertAndReturnKey(avValid);
+ if (res == null) {
+ throw new DataRetrievalFailureException(I18NNaming.M_IT_HAS_NOT_CHANGED_ANY_RECORD);
+ }
+ erResult.put(this.nameConvention.convertName(this.getGeneratedKeyNames()[0]), res);
+ }
+ return erResult;
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see com.ontimize.jee.server.entity.IOntimizeDaoSupport#insert(java.util.Map)
+ */
+
+ /**
+ * Removes the null values.
+ *
+ * @param inputAttributesValues the input attributes values
+ * @return the map
+ */
+ protected Map removeNullValues(Map inputAttributesValues) {
+ final Map hValidKeysValues = new HashMap<>();
+ for (final Map.Entry entry : inputAttributesValues.entrySet()) {
+ final String oKey = entry.getKey();
+ final Object oValue = entry.getValue();
+ if ((oValue != null) && !(oValue instanceof NullValue)) {
+ hValidKeysValues.put(oKey, oValue);
+ }
+ }
+ return hValidKeysValues;
+ }
+
+ /**
+ * Unsafe update.
+ *
+ * @param attributesValues the attributes values
+ * @param keysValues the keys values
+ * @return the entity result
+ */
+ @Override
+ public EntityResult unsafeUpdate(final Map, ?> attributesValues, final Map, ?> keysValues) {
+ return this.innerUpdate(attributesValues, keysValues, false);
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see com.ontimize.jee.server.entity.IOntimizeDaoSupport#unsafeUpdate(java.util .Map,
+ * java.util.Map)
+ */
+
+ /**
+ * Update.
+ *
+ * @param attributesValues the attributes values
+ * @param keysValues the keys values
+ * @return the entity result
+ */
+ @Override
+ public EntityResult update(final Map, ?> attributesValues, final Map, ?> keysValues) {
+ return this.innerUpdate(attributesValues, keysValues, true);
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see com.ontimize.jee.server.entity.IOntimizeDaoSupport#update(java.util.Map, java.util.Map)
+ */
+
+ /**
+ * Inner update.
+ *
+ * @param attributesValues the attributes values
+ * @param keysValues the keys values
+ * @param safe the safe
+ * @return the entity result
+ */
+ protected EntityResult innerUpdate(final Map, ?> attributesValues, final Map, ?> keysValues, final boolean safe) {
+ this.checkCompiled();
+ final EntityResult erResult = new EntityResultMapImpl();
+
+ // Check the primary keys
+ final Map, ?> avWithoutMultipleTableAttributes = this.processMultipleTableAttribute(attributesValues);
+ final Map, ?> avWithoutReferenceAttributes = this.processReferenceDataFieldAttributes(avWithoutMultipleTableAttributes);
+ final Map, ?> avWithoutMultipleValue = this.processMultipleValueAttributes(avWithoutReferenceAttributes);
+
+ final Map, ?> kvWithoutMulpleTableAttributes = this.processMultipleTableAttribute(keysValues);
+ final Map, ?> kvWithoutReferenceAttributessRef = this.processReferenceDataFieldAttributes(kvWithoutMulpleTableAttributes);
+
+ final Map hValidAttributesValues = this.getValidAttributes(avWithoutMultipleValue);
+ Map, ?> hValidKeysValues = null;
+ if (safe) {
+ hValidKeysValues = this.getValidUpdatingKeysValues(kvWithoutReferenceAttributessRef);
+ this.checkUpdateKeys(hValidKeysValues);
+ } else {
+ hValidKeysValues = kvWithoutReferenceAttributessRef;
+ }
+
+ if (hValidAttributesValues.isEmpty() || hValidKeysValues.isEmpty()) {
+ OntimizeJdbcDaoSupport.logger.debug("Update: Attributes or Keys do not contain any pair key-value valid");
+ throw new SQLWarningException(I18NNaming.M_IT_HAS_NOT_CHANGED_ANY_RECORD, null);
+ }
+ final SQLStatementBuilder.SQLStatement stSQL = this.getStatementHandler().createUpdateQuery(this.getSchemaTable(), new HashMap<>(hValidAttributesValues), new HashMap<>(hValidKeysValues));
+ final String sqlQuery = stSQL.getSQLStatement();
+ final List> vValues = this.processNullValues(stSQL.getValues());
+ final int update = this.getJdbcTemplate().update(sqlQuery, vValues.toArray());
+ if (update == 0) {
+ erResult.setCode(EntityResult.OPERATION_SUCCESSFUL_SHOW_MESSAGE);
+ erResult.setMessage(I18NNaming.M_IT_HAS_NOT_CHANGED_ANY_RECORD);
+ }
+ return erResult;
+ }
+
+ /**
+ * Process null values.
+ *
+ * @param values the values
+ * @return the list
+ */
+ protected List> processNullValues(final List> values) {
+ for (int i = 0; i < values.size(); i++) {
+ final Object ob = values.get(i);
+ if (ob instanceof NullValue) {
+ ((List) values).set(i, new SqlParameterValue(((NullValue) ob).getSQLDataType(), null));
+ }
+ }
+ return values;
+ }
+
+ /**
+ * Unsafe delete.
+ *
+ * @param keysValues the keys values
+ * @return the entity result
+ */
+ @Override
+ public EntityResult unsafeDelete(final Map, ?> keysValues) {
+ return this.innerDelete(keysValues, false);
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see com.ontimize.jee.server.entity.IOntimizeDaoSupport#unsafeDelete(java.util .Map)
+ */
+
+ /**
+ * Delete.
+ *
+ * @param keysValues the keys values
+ * @return the entity result
+ */
+ @Override
+ public EntityResult delete(final Map, ?> keysValues) {
+ return this.innerDelete(keysValues, true);
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see com.ontimize.jee.server.entity.IOntimizeDaoSupport#delete(java.util.Map)
+ */
+
+ /**
+ * Inner delete.
+ *
+ * @param keysValues the keys values
+ * @param safe the safe
+ * @return the entity result
+ */
+ public EntityResult innerDelete(final Map, ?> keysValues, final boolean safe) {
+ this.checkCompiled();
+ final EntityResult erResult = new EntityResultMapImpl();
+ Map, ?> keysValuesChecked = keysValues;
+ if (safe) {
+ keysValuesChecked = this.checkDeleteKeys(keysValues);
+ }
+
+ if (keysValuesChecked.isEmpty()) {
+ OntimizeJdbcDaoSupport.logger.debug("Delete: Keys does not contain any pair key-value valid: {}", keysValues);
+ throw new SQLWarningException(I18NNaming.M_IT_HAS_NOT_CHANGED_ANY_RECORD, null);
+ }
+
+ final SQLStatementBuilder.SQLStatement stSQL = this.getStatementHandler().createDeleteQuery(this.getSchemaTable(), new HashMap<>(keysValuesChecked));
+ final String sqlQuery = stSQL.getSQLStatement();
+ final List> vValues = stSQL.getValues();
+ this.getJdbcTemplate().update(sqlQuery, vValues.toArray());
+
+ return erResult;
+ }
+
+ /**
+ * Checks if keysValues contains a value for all columns defined in 'delete_keys'
+ * parameter.
+ *
+ *
+ * @param keysValues the keys values
+ */
+ protected Map checkDeleteKeys(final Map, ?> keysValues) {
+ Map res = new HashMap<>();
+ for (String element : this.deleteKeys) {
+ String mapKey = element;
+ if (!keysValues.containsKey(mapKey)) {
+ throw new SQLWarningException("M_NECESSARY_" + mapKey.toUpperCase(), null);
+ }
+ res.put(mapKey, keysValues.get(mapKey));
+ }
+ OntimizeJdbcDaoSupport.logger.debug(" Delete valid keys values: Input: {} -> Result: {}", keysValues, res);
+ return res;
+ }
+
+ /**
+ * Checks if keysValues contains a value for all columns defined in 'update_keys'
+ * parameter.
+ *
+ *
+ * @param keysValues the keys values
+ */
+ protected void checkUpdateKeys(final Map, ?> keysValues) {
+ for (String element : this.updateKeys) {
+ if (!keysValues.containsKey(element)) {
+ throw new SQLWarningException("M_NECESSARY_" + element.toUpperCase(), new SQLWarning());
+ }
+ }
+ }
+
+ /**
+ * Returns a Map containing a list of valid key-value pairs from those contained in the
+ * keysValues argument.
+ *
+ * A key-value pair is valid if the key is valid.
+ *
+ * Only keys matching (case-sensitive) any of the columns defined by the 'update_keys' parameter are
+ * considered valid.
+ *
+ *
+ * @param keysValues the keys values
+ * @return the valid updating keys values
+ */
+ public Map, ?> getValidUpdatingKeysValues(final Map, ?> keysValues) {
+ final Map hValidKeysValues = new HashMap<>();
+ for (String element : this.updateKeys) {
+ if (keysValues.containsKey(element)) {
+ hValidKeysValues.put(element, keysValues.get(element));
+ }
+ }
+ OntimizeJdbcDaoSupport.logger.debug(" Update valid keys values: Input: {} -> Result: {}",keysValues, hValidKeysValues);
+ return hValidKeysValues;
+ }
+
+ /**
+ * Returns a Map containing a list of valid key-value pairs from those contained in the
+ * attributesValues argument.
+ *
+ * A key-value pair is valid if the key is in the table column list.
+ *
+ *
+ * @param inputAttributesValues the attributes values
+ * @return the valid attributes
+ */
+ public Map getValidAttributes(final Map, ?> inputAttributesValues) {
+ final Map hValidKeysValues = new HashMap<>();
+ List nameConventionTableColumns = this.tableMetaDataContext.getNameConventionTableColumns();
+ for (final Map.Entry, ?> entry : inputAttributesValues.entrySet()) {
+ final Object oKey = entry.getKey();
+ final Object oValue = entry.getValue();
+ if (nameConventionTableColumns.contains(oKey)) {
+ hValidKeysValues.put((String) oKey, oValue);
+ }
+ }
+ OntimizeJdbcDaoSupport.logger.debug(" Update valid attributes values: Input: {} -> Result: {}", inputAttributesValues, hValidKeysValues);
+ return hValidKeysValues;
+ }
+
+ /**
+ * Processes all the MultipleTableAttribute contained as keys ih the Map av. All other
+ * objects are added to the resulting List with no changes. The MultipleTableAttribute objects are
+ * replaced by their attribute.
+ *
+ * @param av the av
+ * @return a new HashMap with the processed objects.
+ */
+ protected Map, ?> processMultipleTableAttribute(final Map, ?> av) {
+ final Map res = new HashMap<>();
+ for (final Map.Entry, ?> entry : av.entrySet()) {
+ final Object oKey = entry.getKey();
+ final Object oValue = entry.getValue();
+ if (oKey instanceof MultipleTableAttribute) {
+ res.put(((MultipleTableAttribute) oKey).getAttribute(), oValue);
+ } else {
+ res.put(oKey, oValue);
+ }
+ }
+ return res;
+ }
+
+ /**
+ * Processes the ReferenceFieldAttribute objects contained in keysValues.
+ *
+ * Returns a Map containing all the objects contained in the argument keysValues except
+ * in the case of keys that are ReferenceFieldAttribute objects, which are replaced by
+ * ((ReferenceFieldAttribute)object).getAttr()
+ *
+ *
+ * @param keysValues the keysValues to process
+ * @return a Map containing the processed objects
+ */
+ public Map, ?> processReferenceDataFieldAttributes(final Map, ?> keysValues) {
+ if (keysValues == null) {
+ return null;
+ }
+ final Map res = new HashMap<>();
+ for (final Map.Entry, ?> entry : keysValues.entrySet()) {
+ final Object oKey = entry.getKey();
+ final Object oValue = entry.getValue();
+ if (oKey instanceof ReferenceFieldAttribute) {
+ final String attr = ((ReferenceFieldAttribute) oKey).getAttr();
+ res.put(attr, oValue);
+ } else {
+ res.put(oKey, oValue);
+ }
+ }
+ return res;
+ }
+
+ /**
+ * Processes the ReferenceFieldAttribute objects contained in list.
+ *
+ * Returns a List containing all the objects in the argument list except in the case of
+ * keys that are ReferenceFieldAttribute objects, which are maintained but also
+ * ((ReferenceFieldAttribute)object).getAttr() is added
+ *
+ *
+ * @param list the list to process
+ * @return a List containing the processed objects
+ */
+ public List> processReferenceDataFieldAttributes(final List> list) {
+ if (list == null) {
+ return null;
+ }
+ final List res = new ArrayList<>();
+ for (final Object ob : list) {
+ // Add the attribute
+ if (!res.contains(ob)) {
+ res.add(ob);
+ }
+ // If the attribute is ReferenceFieldAttribute add the string to
+ if ((ob instanceof ReferenceFieldAttribute) && !res.contains(((ReferenceFieldAttribute) ob).getAttr())) {
+ res.add(((ReferenceFieldAttribute) ob).getAttr());
+ }
+ }
+ return res;
+ }
+
+ /**
+ * Returns a list containing the valid attributes of those included in the List
+ * attributes
+ *
+ * If valid column names have been specified for this entity, only attributes matching
+ * (case-sensitive) any of this column names are considered valid.
+ *
+ * If no columns have been defined, all attributes will be considered valid.
+ *
+ * @param attributes the attributes
+ * @param validColumns the valid columns
+ * @return a List with the valid attributes
+ */
+ public List> getValidAttributes(final List> attributes, List validColumns) {
+ List inputValidColumns = validColumns == null ? (List) Collections.EMPTY_LIST : validColumns;
+ final List validAttributes = new ArrayList<>();
+ for (final Object ob : attributes) {
+ if ((ob instanceof String) || (ob instanceof DBFunctionName)) {
+ boolean isValid = true;
+ if (ob instanceof String) {
+ isValid = this.isInstanceOfStringValid(ob, inputValidColumns);
+ }
+ if (isValid) {
+ validAttributes.add(ob);
+ }
+ }
+ }
+ return validAttributes;
+ }
+
+ public boolean isInstanceOfStringValid(Object ob, List inputValidColumns) {
+ boolean isValid;
+ if (SQLStatementBuilder.ExtendedSQLConditionValuesProcessor.EXPRESSION_KEY.equals(ob)) {
+ isValid = true;
+ } else if (!inputValidColumns.isEmpty() && !inputValidColumns.contains(ob)) {
+ isValid = false;
+ } else {
+ isValid = this.isColumnNameValid((String) ob);
+ }
+ return isValid;
+ }
+
+ /**
+ * Checks if is column name valid.
+ *
+ * @param ob the ob
+ * @return true, if is column name valid
+ */
+ protected boolean isColumnNameValid(String ob) {
+ boolean notValid = ob.contains(" ") || ob.contains("*");
+ return !notValid;
+ }
+
+ /**
+ * Returns a cleaned map containing the valid pairs of those included in the map
+ * inputKeysValues
+ *
+ * If valid column names have been specified for this entity/query, only attributes matching
+ * (case-sensitive) any of this column names are considered valid.
+ *
+ * If no columns have been defined, all attributes will be considered valid.
+ *
+ * Returns cleaned keys values to do query according valid columns (if defined).
+ *
+ * @param inputKeysValues
+ * @param validColumns
+ * @return
+ */
+ protected Map getValidQueryingKeysValues(Map inputKeysValues, List validColumns) {
+ if ((validColumns == null) || validColumns.isEmpty()) {
+ return inputKeysValues;
+ }
+ final Map hValidKeysValues = new HashMap<>();
+ for (Map.Entry entry : inputKeysValues.entrySet()) {
+ if (SQLStatementBuilder.ExtendedSQLConditionValuesProcessor.EXPRESSION_KEY.equals(entry.getKey()) || validColumns.contains(entry.getKey())) {
+ hValidKeysValues.put(entry.getKey(), entry.getValue());
+ }
+ }
+ OntimizeJdbcDaoSupport.logger.debug(" Query valid keys values: Input: {} -> Result: {}", inputKeysValues, hValidKeysValues);
+ return hValidKeysValues;
+ }
+
+ /**
+ * Processes the MultipleValue objects contained in keysValues. Returns a new HashMap
+ * with the same data as keysValues except that MultipleValue objects are deleted and
+ * the key-value pairs of these objects are added to the new HashMap.
+ *
+ * @param keysValues the keys values
+ * @return a new HashMap with MultipleValue objects replaced by their key-value pairs
+ */
+ public Map, ?> processMultipleValueAttributes(final Map, ?> keysValues) {
+ if (keysValues == null) {
+ return null;
+ }
+ final Map res = new HashMap<>();
+ for (final Map.Entry, ?> entry : keysValues.entrySet()) {
+ final Object oKey = entry.getKey();
+ final Object oValue = entry.getValue();
+ if (oValue instanceof MultipleValue) {
+ final Enumeration> mvKeys = ((MultipleValue) oValue).keys();
+ while (mvKeys.hasMoreElements()) {
+ final Object iMvKeyM = mvKeys.nextElement();
+ final Object oMvValue = ((MultipleValue) oValue).get(iMvKeyM);
+ res.put(iMvKeyM, oMvValue);
+ }
+ } else {
+ res.put(oKey, oValue);
+ }
+ }
+ return res;
+ }
+
+ /**
+ * Processes the keys in order to get String as column name.
+ *
+ * @param keysValues the keys values
+ * @return a new HashMap with MultipleValue objects replaced by their key-value pairs
+ */
+ public Map processStringKeys(final Map, ?> keysValues) {
+ if (keysValues == null) {
+ return null;
+ }
+ final Map res = new HashMap<>();
+ for (final Map.Entry, ?> entry : keysValues.entrySet()) {
+ final Object oKey = entry.getKey();
+ final Object oValue = entry.getValue();
+ res.put(oKey.toString(), oValue);
+ }
+ return res;
+ }
+
+ /**
+ * Get the name of the table for this insert.
+ *
+ * @return the table name
+ */
+ public String getTableName() {
+ this.checkCompiled();
+ return this.tableMetaDataContext.getTableName();
+ }
+
+ // -------------------------------------------------------------------------
+ // Methods dealing with configuration properties
+ // -------------------------------------------------------------------------
+
+ /**
+ * Set the name of the table for this insert.
+ *
+ * @param tableName the new table name
+ */
+ public void setTableName(final String tableName) {
+ this.checkIfConfigurationModificationIsAllowed();
+ this.tableMetaDataContext.setTableName(tableName);
+ }
+
+ /**
+ * Get the name of the schema for this insert.
+ *
+ * @return the schema name
+ */
+ public String getSchemaName() {
+ this.checkCompiled();
+ return this.tableMetaDataContext.getSchemaName();
+ }
+
+ /**
+ * Set the name of the schema for this insert.
+ *
+ * @param schemaName the new schema name
+ */
+ public void setSchemaName(final String schemaName) {
+ this.checkIfConfigurationModificationIsAllowed();
+ this.tableMetaDataContext.setSchemaName(StringTools.isEmpty(schemaName) ? null : schemaName);
+ }
+
+ /**
+ * Get the name of the catalog for this insert.
+ *
+ * @return the catalog name
+ */
+ public String getCatalogName() {
+ this.checkCompiled();
+ return this.tableMetaDataContext.getCatalogName();
+ }
+
+ /**
+ * Set the name of the catalog for this insert.
+ *
+ * @param catalogName the new catalog name
+ */
+ public void setCatalogName(final String catalogName) {
+ this.checkIfConfigurationModificationIsAllowed();
+ this.tableMetaDataContext.setCatalogName(StringTools.isEmpty(catalogName) ? null : catalogName);
+ }
+
+ /**
+ * Get the names of the columns used.
+ *
+ * @return the column names
+ */
+ public List getColumnNames() {
+ this.checkCompiled();
+ return Collections.unmodifiableList(this.declaredColumns);
+ }
+
+ /**
+ * Set the names of the columns to be used.
+ *
+ * @param columnNames the new column names
+ */
+ public void setColumnNames(final List columnNames) {
+ this.checkIfConfigurationModificationIsAllowed();
+ this.declaredColumns.clear();
+ this.declaredColumns.addAll(columnNames);
+ }
+
+ /**
+ * Get the names of any generated keys.
+ *
+ * @return the generated key names
+ */
+ public String[] getGeneratedKeyNames() {
+ this.checkCompiled();
+ return this.generatedKeyNames;
+ }
+
+ /**
+ * Set the names of any generated keys.
+ *
+ * @param generatedKeyNames the new generated key names
+ */
+ public void setGeneratedKeyNames(final String[] generatedKeyNames) {
+ this.checkIfConfigurationModificationIsAllowed();
+ this.generatedKeyNames = generatedKeyNames;
+ }
+
+ /**
+ * Specify the name of a single generated key column.
+ *
+ * @param generatedKeyName the new generated key name
+ */
+ public void setGeneratedKeyName(final String generatedKeyName) {
+ this.checkIfConfigurationModificationIsAllowed();
+ if (generatedKeyName == null) {
+ this.generatedKeyNames = new String[]{};
+ } else {
+ this.generatedKeyNames = new String[]{generatedKeyName};
+ }
+ }
+
+ /**
+ * Specify whether the parameter metadata for the call should be used. The default is true.
+ *
+ * @param accessTableColumnMetaData the new access table column meta data
+ */
+ public void setAccessTableColumnMetaData(final boolean accessTableColumnMetaData) {
+ this.tableMetaDataContext.setAccessTableColumnMetaData(accessTableColumnMetaData);
+ }
+
+ /**
+ * Specify whether the default for including synonyms should be changed. The default is false.
+ *
+ * @param override the new override include synonyms default
+ */
+ public void setOverrideIncludeSynonymsDefault(final boolean override) {
+ this.tableMetaDataContext.setOverrideIncludeSynonymsDefault(override);
+ }
+
+ /**
+ * Compile this JdbcInsert using provided parameters and meta data plus other settings. This
+ * finalizes the configuration for this object and subsequent attempts to compile are ignored. This
+ * will be implicitly called the first time an un-compiled insert is executed.
+ *
+ * @throws InvalidDataAccessApiUsageException if the object hasn't been correctly initialized, for
+ * example if no DataSource has been provided
+ */
+ public synchronized final void compile() throws InvalidDataAccessApiUsageException {
+ if (!this.isCompiled()) {
+ final ConfigurationFile annotation = this.getClass().getAnnotation(ConfigurationFile.class);
+ if (annotation != null) {
+ this.configurationFile = annotation.configurationFile();
+ this.configurationFilePlaceholder = annotation.configurationFilePlaceholder();
+ }
+ this.loadConfigurationFile(this.configurationFile, this.configurationFilePlaceholder);
+
+ if (this.getJdbcTemplate() == null) {
+ throw new IllegalArgumentException("'dataSource' or 'jdbcTemplate' is required");
+ }
+
+ if (this.tableMetaDataContext.getTableName() == null) {
+ throw new InvalidDataAccessApiUsageException("Table name is required");
+ }
+ try {
+ this.getJdbcTemplate().afterPropertiesSet();
+ } catch (final IllegalArgumentException ex) {
+ throw new InvalidDataAccessApiUsageException(ex.getMessage(), ex);
+ }
+ this.compileInternal();
+ this.compiled = true;
+ if (OntimizeJdbcDaoSupport.logger.isDebugEnabled()) {
+ OntimizeJdbcDaoSupport.logger.debug("JdbcInsert for table [{}] compiled", this.getTableName());
+ }
+ }
+ }
+
+
+ // -------------------------------------------------------------------------
+ // Methods handling compilation issues
+ // -------------------------------------------------------------------------
+
+ @Override
+ public void reload() {
+ OntimizeJdbcDaoSupport.logger.debug("dao {} - {} marked to recompile", this.getClass().getName(), this.getTableName());
+ this.compiled = false;
+ this.setTableName(null);
+ this.setSchemaName(null);
+ this.setCatalogName(null);
+ this.setDeleteKeys(null);
+ this.setUpdateKeys(null);
+ this.sqlQueries.clear();
+ this.setGeneratedKeyName(null);
+ this.setStatementHandler(null);
+ this.setNameConverter(null);
+ }
+
+ /**
+ * Load the configuration file.
+ *
+ * @param path the path
+ * @param pathToPlaceHolder the path to place holder
+ * @throws InvalidDataAccessApiUsageException the invalid data access api usage exception
+ */
+ protected void loadConfigurationFile(final String path, final String pathToPlaceHolder) throws InvalidDataAccessApiUsageException {
+
+ try (InputStream is = Thread.currentThread().getContextClassLoader().getResourceAsStream(path)) {
+ Reader reader = null;
+ reader = this.createReaderConfigurationFile(pathToPlaceHolder, is);
+
+ JdbcEntitySetupType baseSetup = JAXB.unmarshal(reader, JdbcEntitySetupType.class);
+
+ // Support to Dao extensions
+ JdbcEntitySetupType setupConfig = this.checkDaoExtensions(baseSetup, path, pathToPlaceHolder);
+
+ // Process setup information to configure dao
+ this.setTableName(setupConfig.getTable());
+ this.setSchemaName(setupConfig.getSchema());
+ this.setCatalogName(setupConfig.getCatalog());
+ this.setDeleteKeys(setupConfig.getDeleteKeys().getColumn());
+ this.setUpdateKeys(setupConfig.getUpdateKeys().getColumn());
+ this.setupsConfigureQueries(setupConfig);
+ this.setGeneratedKeyName(setupConfig.getGeneratedKey());
+ this.setDataSource((DataSource) this.applicationContext.getBean(setupConfig.getDatasource()));
+ this.setStatementHandler((SQLStatementHandler) this.applicationContext.getBean(setupConfig.getSqlhandler()));
+
+ final String nameConverter = setupConfig.getNameconverter();
+ if (!CheckingTools.isStringEmpty(nameConverter)) {
+ this.setNameConverter((INameConverter) this.applicationContext.getBean(nameConverter));
+ }
+ this.tableMetaDataContext.setNameConvention(this.nameConvention);
+ } catch (final IOException e) {
+ throw new InvalidDataAccessApiUsageException(I18NNaming.M_ERROR_LOADING_CONFIGURATION_FILE, e);
+ }
+
+ }
+
+ /**
+ * Setups the queries defined in configuration file.
+ * @param setupConfig
+ */
+ protected void setupsConfigureQueries(JdbcEntitySetupType setupConfig) {
+ if (setupConfig.getQueries() != null) {
+ for (final QueryType query : setupConfig.getQueries().getQuery()) {//
+ this.addQueryTemplateInformation(query.getId(), query.getSentence().getValue(), //
+ query.getAmbiguousColumns() == null ? null : query.getAmbiguousColumns().getAmbiguousColumn(), //
+ query.getFunctionColumns() == null ? null : query.getFunctionColumns().getFunctionColumn(), //
+ query.getValidColumns() != null ? query.getValidColumns().getColumn() : new ArrayList(), //
+ query.getOrderColumns() == null ? null : query.getOrderColumns().getOrderColumn());
+ }
+ }
+ }
+
+ protected Reader createReaderConfigurationFile(String pathToPlaceHolder, InputStream is) throws IOException {
+ Reader reader;
+ if (pathToPlaceHolder != null) {
+ try (InputStream isPlaceHolder = Thread.currentThread().getContextClassLoader().getResourceAsStream(pathToPlaceHolder)) {
+ final Properties prop = new Properties();
+ if (isPlaceHolder != null) {
+ prop.load(isPlaceHolder);
+ }
+
+ Map mapProperties = prop.stringPropertyNames().stream().collect(Collectors.toMap(Function.identity(), prop::getProperty));
+ reader = new ReplaceTokensFilterReader(new InputStreamReader(is), mapProperties);
+ }
+ } else {
+ reader = new InputStreamReader(is);
+ }
+ return reader;
+ }
+
+ protected JdbcEntitySetupType checkDaoExtensions(JdbcEntitySetupType baseSetup, final String path, final String pathToPlaceHolder) {
+ if (this.daoExtensionHelper == null) {
+ return baseSetup;
+ }
+ return this.daoExtensionHelper.checkDaoExtensions(baseSetup, path, pathToPlaceHolder);
+ }
+
+ /**
+ * Gets the bean property converter.
+ *
+ * @return the bean property converter
+ */
+ public INameConverter getNameConverter() {
+ this.checkCompiled();
+ return this.nameConverter;
+ }
+
+ /**
+ * Sets the bean property converter.
+ *
+ * @param converter the new bean property converter
+ */
+ protected void setNameConverter(final INameConverter converter) {
+ this.nameConverter = converter;
+ }
+
+ /**
+ * Gets the configuration file.
+ *
+ * @return the configuration file
+ */
+ public String getConfigurationFile() {
+ return this.configurationFile;
+ }
+
+ /**
+ * Sets the configuration file.
+ *
+ * @param configurationFile the new configuration file
+ */
+ public synchronized void setConfigurationFile(final String configurationFile) {
+ this.configurationFile = configurationFile;
+ }
+
+ /**
+ * Gets the configuration file placeholder.
+ *
+ * @return the configuration file placeholder
+ */
+ public String getConfigurationFilePlaceholder() {
+ return this.configurationFilePlaceholder;
+ }
+
+ /**
+ * Sets the configuration file placeholder.
+ *
+ * @param configurationFilePlaceholder the new configuration file placeholder
+ */
+ public synchronized void setConfigurationFilePlaceholder(final String configurationFilePlaceholder) {
+ this.configurationFilePlaceholder = configurationFilePlaceholder;
+ }
+
+ public INameConvention getNameConvention() {
+ return this.nameConvention;
+ }
+
+ public void setNameConvention(INameConvention nameConvention) {
+ this.nameConvention = nameConvention;
+ }
+
+ /**
+ * Check dao config.
+ */
+ @Override
+ protected void checkDaoConfig() {
+ // no need of jdbctemplate at this point
+ }
+
+ /**
+ * Adds a query.
+ *
+ * @param id the id
+ * @param value the value
+ * @param ambiguousColumns the ambiguous columns
+ * @param functionColumns the function columns
+ */
+ public void addQueryTemplateInformation(final String id, final String value, final List ambiguousColumns, final List functionColumns, List orderColumns) {
+ this.addQueryTemplateInformation(id, value, ambiguousColumns, functionColumns, new ArrayList<>(), orderColumns);
+ }
+
+ /**
+ * Adds a query, allowing determine valid columns to query to DB.
+ *
+ * @param id
+ * @param value
+ * @param ambiguousColumns
+ * @param functionColumns
+ * @param validColumns
+ */
+ public void addQueryTemplateInformation(final String id, final String value, final List ambiguousColumns, final List functionColumns, List validColumns, List orderColumns) {
+ this.sqlQueries.put(id, new QueryTemplateInformation(value, ambiguousColumns, functionColumns, validColumns, orderColumns));
+ }
+
+ /**
+ * Gets the template query.
+ *
+ * @param id the id
+ * @return the template query
+ */
+ public QueryTemplateInformation getQueryTemplateInformation(final String id) {
+ this.checkCompiled();
+ return this.sqlQueries.get(id);
+ }
+
+ /**
+ * Method to perform the actual compilation. Subclasses can override this template method to perform
+ * their own compilation. Invoked after this base class's compilation is complete.
+ */
+ protected void compileInternal() {
+ this.tableMetaDataContext.processMetaData(this.getJdbcTemplate().getDataSource(), this.declaredColumns, this.generatedKeyNames);
+ this.onCompileInternal();
+ }
+
+ /**
+ * Hook method that subclasses may override to react to compilation. This implementation does
+ * nothing.
+ */
+ protected void onCompileInternal() {
+ // This implementation does nothing.
+ }
+
+ /**
+ * Is this operation "compiled"?.
+ *
+ * @return whether this operation is compiled, and ready to use.
+ */
+ public boolean isCompiled() {
+ return this.compiled;
+ }
+
+ /**
+ * Check whether this operation has been compiled already; lazily compile it if not already
+ * compiled.
+ *
+ * Automatically called by {@code validateParameters}.
+ */
+ public void checkCompiled() {
+ if (!this.isCompiled()) {
+ OntimizeJdbcDaoSupport.logger.debug("JdbcInsert not compiled before execution - invoking compile");
+ this.compile();
+ }
+ }
+
+ /**
+ * Method to check whether we are allowd to make any configuration changes at this time. If the
+ * class has been compiled, then no further changes to the configuration are allowed.
+ */
+ protected void checkIfConfigurationModificationIsAllowed() {
+ if (this.isCompiled()) {
+ throw new InvalidDataAccessApiUsageException("Configuration can't be altered once the class has been compiled or used");
+ }
+ }
+
+ /**
+ * Method that provides execution of the insert using the passed in Map of parameters.
+ *
+ * @param args Map with parameter names and values to be used in insert
+ * @return number of rows affected
+ */
+ protected int doExecuteInsert(final Map args) {
+ this.checkCompiled();
+ final InsertMetaInfoHolder holder = this.matchInParameterValuesWithInsertColumns(args);
+ return this.executeInsertInternal(holder);
+ }
+
+ // -------------------------------------------------------------------------
+ // Methods handling execution
+ // -------------------------------------------------------------------------
+
+ /**
+ * Method that provides execution of the insert using the passed in.
+ *
+ * @param parameterSource parameter names and values to be used in insert
+ * @return number of rows affected {@link SqlParameterSource}
+ */
+ protected int doExecuteInsert(final SqlParameterSource parameterSource) {
+ this.checkCompiled();
+ final InsertMetaInfoHolder holder = this.matchInParameterValuesWithInsertColumns(parameterSource);
+ return this.executeInsertInternal(holder);
+ }
+
+ /**
+ * Method to execute the insert.
+ *
+ * @param values the values
+ * @return the int
+ */
+ protected int executeInsertInternal(InsertMetaInfoHolder holder) {
+ OntimizeJdbcDaoSupport.logger.debug("The following parameters are used for insert {} with: {}", holder.getInsertString(), holder.getValues());
+ return this.getJdbcTemplate().update(holder.getInsertString(), holder.getValues().toArray(), holder.getInsertTypes());
+ }
+
+ /**
+ * Method that provides execution of the insert using the passed in Map of parameters and returning
+ * a generated key.
+ *
+ * @param args Map with parameter names and values to be used in insert
+ * @return the key generated by the insert
+ */
+ protected Object doExecuteInsertAndReturnKey(final Map args) {
+ this.checkCompiled();
+ final InsertMetaInfoHolder holder = this.matchInParameterValuesWithInsertColumns(args);
+ return this.executeInsertAndReturnKeyInternal(holder);
+ }
+
+ /**
+ * Method that provides execution of the insert using the passed in.
+ *
+ * @param parameterSource parameter names and values to be used in insert
+ * @return the key generated by the insert {@link SqlParameterSource} and returning a generated key
+ */
+ protected Object doExecuteInsertAndReturnKey(final SqlParameterSource parameterSource) {
+ this.checkCompiled();
+ final InsertMetaInfoHolder holder = this.matchInParameterValuesWithInsertColumns(parameterSource);
+ return this.executeInsertAndReturnKeyInternal(holder);
+ }
+
+ /**
+ * Method that provides execution of the insert using the passed in Map of parameters and returning
+ * all generated keys.
+ *
+ * @param args Map with parameter names and values to be used in insert
+ * @return the KeyHolder containing keys generated by the insert
+ */
+ protected KeyHolder doExecuteInsertAndReturnKeyHolder(final Map args) {
+ this.checkCompiled();
+ final InsertMetaInfoHolder holder = this.matchInParameterValuesWithInsertColumns(args);
+ return this.executeInsertAndReturnKeyHolderInternal(holder);
+ }
+
+ /**
+ * Method that provides execution of the insert using the passed in.
+ *
+ * @param parameterSource parameter names and values to be used in insert
+ * @return the KeyHolder containing keys generated by the insert {@link SqlParameterSource} and
+ * returning all generated keys
+ */
+ protected KeyHolder doExecuteInsertAndReturnKeyHolder(final SqlParameterSource parameterSource) {
+ this.checkCompiled();
+ final InsertMetaInfoHolder holder = this.matchInParameterValuesWithInsertColumns(parameterSource);
+ return this.executeInsertAndReturnKeyHolderInternal(holder);
+ }
+
+ /**
+ * Method to execute the insert generating single key.
+ *
+ * @param values the values
+ * @return the number
+ */
+ protected Object executeInsertAndReturnKeyInternal(final InsertMetaInfoHolder holder) {
+ final KeyHolder kh = this.executeInsertAndReturnKeyHolderInternal(holder);
+ if ((kh != null) && (kh.getKeyAs(Object.class) != null)) {
+ return kh.getKeyAs(Object.class);
+ }
+ throw new DataIntegrityViolationException("Unable to retrieve the generated key for the insert: " + holder.getInsertString());
+ }
+
+ /**
+ * Method to execute the insert generating any number of keys.
+ *
+ * @param values the values
+ * @return the key holder
+ */
+ protected KeyHolder executeInsertAndReturnKeyHolderInternal(final InsertMetaInfoHolder holder) {
+ OntimizeJdbcDaoSupport.logger.debug("The following parameters are used for call {} with: {}", holder.getInsertString(), holder.getValues());
+ final KeyHolder keyHolder = new GeneratedKeyHolder();
+ if (!this.tableMetaDataContext.isGetGeneratedKeysSupported()) {
+ if (!this.tableMetaDataContext.isGetGeneratedKeysSimulated()) {
+ throw new InvalidDataAccessResourceUsageException("The getGeneratedKeys feature is not supported by this database");
+ }
+ if (this.getGeneratedKeyNames().length < 1) {
+ throw new InvalidDataAccessApiUsageException("Generated Key Name(s) not specificed. " + "Using the generated keys features requires specifying the name(s) of the generated column(s)");
+ }
+ if (this.getGeneratedKeyNames().length > 1) {
+ throw new InvalidDataAccessApiUsageException("Current database only supports retreiving the key for a single column. There are " + this.getGeneratedKeyNames().length + " columns specified: " + Arrays.asList(this.getGeneratedKeyNames()));
+ }
+
+ // This is a hack to be able to get the generated key from a
+ // database that doesn't support
+ // get generated keys feature. HSQL is one, PostgreSQL is another.
+ // Postgres uses a RETURNING
+ // clause while HSQL uses a second query that has to be executed
+ // with the same connection.
+
+ String tableName = this.tableMetaDataContext.getTableName() != null ? this.tableMetaDataContext.getTableName() : null;
+
+ if (tableName != null) {
+ final String keyQuery = this.tableMetaDataContext.getSimpleQueryForGetGeneratedKey(tableName, this.getGeneratedKeyNames()[0]);
+
+ Assert.notNull(keyQuery, "Query for simulating get generated keys can't be null");
+
+ if (keyQuery.toUpperCase().startsWith("RETURNING")) {
+ this.simulatingWithReturning(holder, keyQuery, keyHolder);
+ } else {
+ this.simulatingWithoutReturning(holder, keyQuery, keyHolder);
+ }
+ }
+ return keyHolder;
+ }
+ this.getJdbcTemplate().update(new PreparedStatementCreator() {
+
+ @Override
+ public PreparedStatement createPreparedStatement(final Connection con) throws SQLException {
+ final PreparedStatement ps = OntimizeJdbcDaoSupport.this.prepareInsertStatementForGeneratedKeys(con, holder.getInsertString());
+ OntimizeJdbcDaoSupport.this.setParameterValues(ps, holder.getValues(), holder.getInsertTypes());
+ return ps;
+ }
+ }, keyHolder);
+ return keyHolder;
+ }
+
+ protected void simulatingWithoutReturning(InsertMetaInfoHolder holder, String keyQuery, KeyHolder keyHolder) {
+ this.getJdbcTemplate().execute(new ConnectionCallback() {
+
+ @Override
+ public Object doInConnection(final Connection con) throws SQLException, DataAccessException {
+ // Do the insert
+ PreparedStatement ps = null;
+ try {
+ ps = con.prepareStatement(holder.getInsertString());
+ OntimizeJdbcDaoSupport.this.setParameterValues(ps, holder.getValues(), holder.getInsertTypes());
+ ps.executeUpdate();
+ } finally {
+ JdbcUtils.closeStatement(ps);
+ }
+ // Get the key
+ ResultSet rs = null;
+ final Map keys = new HashMap<>(1);
+ final Statement keyStmt = con.createStatement();
+ try {
+ rs = keyStmt.executeQuery(keyQuery);
+ if (rs.next()) {
+ final long key = rs.getLong(1);
+ keys.put(OntimizeJdbcDaoSupport.this.getGeneratedKeyNames()[0], key);
+ keyHolder.getKeyList().add(keys);
+ }
+ } finally {
+ JdbcUtils.closeResultSet(rs);
+ JdbcUtils.closeStatement(keyStmt);
+ }
+ return null;
+ }
+ });
+ }
+
+ protected void simulatingWithReturning(InsertMetaInfoHolder holder, String keyQuery, KeyHolder keyHolder) {
+ final Long key = this.getJdbcTemplate().queryForObject(holder.getInsertString() + " " + keyQuery, Long.class, holder.getValues().toArray(new Object[holder.getValues().size()]));
+ final Map keys = new HashMap<>(1);
+ keys.put(this.getGeneratedKeyNames()[0], key);
+ keyHolder.getKeyList().add(keys);
+ }
+
+ /**
+ * Create the PreparedStatement to be used for insert that have generated keys.
+ *
+ * @param con the connection used
+ * @return PreparedStatement to use
+ * @throws SQLException the sQL exception
+ */
+ protected PreparedStatement prepareInsertStatementForGeneratedKeys(final Connection con, String insertString) throws SQLException {
+ if (this.getGeneratedKeyNames().length < 1) {
+ throw new InvalidDataAccessApiUsageException("Generated Key Name(s) not specificed. " + "Using the generated keys features requires specifying the name(s) of the generated column(s)");
+ }
+ PreparedStatement ps;
+ if (this.tableMetaDataContext.isGeneratedKeysColumnNameArraySupported()) {
+ OntimizeJdbcDaoSupport.logger.debug("Using generated keys support with array of column names.");
+ ps = con.prepareStatement(insertString, this.getGeneratedKeyNames());
+ } else {
+ OntimizeJdbcDaoSupport.logger.debug("Using generated keys support with Statement.RETURN_GENERATED_KEYS.");
+ ps = con.prepareStatement(insertString, Statement.RETURN_GENERATED_KEYS);
+ }
+ return ps;
+ }
+
+ /**
+ * Method that provides execution of a batch insert using the passed in Maps of parameters.
+ *
+ * @param batch array of Maps with parameter names and values to be used in batch insert
+ * @return array of number of rows affected
+ */
+ @Override
+ public int[] insertBatch(final Map[] batch) {
+ this.checkCompiled();
+ final List[] batchValues = new ArrayList[batch.length];
+ int i = 0;
+ for (final Map args : batch) {
+ final List values = this.matchInParameterValuesWithInsertColumnsForBatch(args);
+ batchValues[i++] = values;
+ }
+ return this.executeInsertBatchInternal(batchValues, this.tableMetaDataContext.createInsertString(this.getGeneratedKeyNames()), this.tableMetaDataContext.createInsertTypes());
+ }
+
+ /**
+ * Method that provides execution of a batch insert using the passed in array of
+ * {@link SqlParameterSource}.
+ *
+ * @param batch array of SqlParameterSource with parameter names and values to be used in insert
+ * @return array of number of rows affected
+ */
+ protected int[] doExecuteInsertBatch(final SqlParameterSource[] batch, final String insertString, final int[] insertTypes) {
+ this.checkCompiled();
+ final List[] batchValues = new ArrayList[batch.length];
+ int i = 0;
+ for (final SqlParameterSource parameterSource : batch) {
+ final List values = this.matchInParameterValuesWithInsertColumnsForBatch(parameterSource);
+ batchValues[i++] = values;
+ }
+ return this.executeInsertBatchInternal(batchValues, insertString, insertTypes);
+ }
+
+ /**
+ * Method to execute the batch insert.
+ *
+ * @param batchValues the batch values
+ * @return the int[]
+ */
+ protected int[] executeInsertBatchInternal(final List[] batchValues, final String insertString, final int[] insertTypes) {
+ OntimizeJdbcDaoSupport.logger.debug("Executing statement {} with batch of size: {}", insertString, batchValues.length);
+ return this.getJdbcTemplate().batchUpdate(insertString, new BatchPreparedStatementSetter() {
+
+ @Override
+ public void setValues(final PreparedStatement ps, final int i) throws SQLException {
+ final List values = batchValues[i];
+ OntimizeJdbcDaoSupport.this.setParameterValues(ps, values, insertTypes);
+ }
+
+ @Override
+ public int getBatchSize() {
+ return batchValues.length;
+ }
+ });
+ }
+
+ /**
+ * Internal implementation for setting parameter values.
+ *
+ * @param preparedStatement the PreparedStatement
+ * @param values the values to be set
+ * @param columnTypes the column types
+ * @throws SQLException the sQL exception
+ */
+ protected void setParameterValues(final PreparedStatement preparedStatement, final List values, final int[] columnTypes) throws SQLException {
+
+ int colIndex = 0;
+ for (Object value : values) {
+ colIndex++;
+ if ((columnTypes == null) || (colIndex > columnTypes.length)) {
+ StatementCreatorUtils.setParameterValue(preparedStatement, colIndex, SqlTypeValue.TYPE_UNKNOWN, value);
+ } else {
+ final int sqlType = columnTypes[colIndex - 1];
+ if (ObjectTools.isIn(sqlType, Types.BLOB, Types.BINARY, Types.VARBINARY) && ((value instanceof byte[]) || (value instanceof InputStream)))
+ this.parametersIfTheyAreBinary(preparedStatement, value, colIndex);
+ else if (value instanceof NullValue) {
+ // TODO At this point we could retrieve sqlType from ((NullValue)value).getSQLDataType()
+ // but it is preferable to use the sqlType retrieved from table metadata.
+ value = new SqlParameterValue(sqlType, null);
+ StatementCreatorUtils.setParameterValue(preparedStatement, colIndex, sqlType, value);
+ } else {
+ StatementCreatorUtils.setParameterValue(preparedStatement, colIndex, sqlType, value);
+ }
+ }
+ }
+ }
+
+ protected void parametersIfTheyAreBinary(PreparedStatement preparedStatement, Object value, int colIndex) throws SQLException {
+ if (value instanceof byte[]) {
+ preparedStatement.setBytes(colIndex, (byte[]) value);
+ } else {
+ try {
+ // TODO esto no esta soportado por los drivers jdbc 4.0
+ // TODO segun el driver puede ser que sea mas rapido llamar al metodo con la longitud
+ preparedStatement.setBlob(colIndex, (InputStream) value);
+ } catch (AbstractMethodError ex) {
+ OntimizeJdbcDaoSupport.logger.debug(null, ex);
+ try {
+ preparedStatement.setBinaryStream(colIndex, (InputStream) value, ((InputStream) value).available());
+ } catch (IOException error) {
+ throw new SQLException(error);
+ }
+ }
+ }
+ }
+
+ /**
+ * Match the provided in parameter values with regitered parameters and parameters defined via
+ * metadata processing.
+ *
+ * @param parameterSource the parameter vakues provided as a {@link SqlParameterSource}
+ * @return Map with parameter names and values
+ */
+ protected InsertMetaInfoHolder matchInParameterValuesWithInsertColumns(final SqlParameterSource parameterSource) {
+ return this.tableMetaDataContext.getInsertMetaInfo(parameterSource);
+ }
+
+ /**
+ * Match the provided in parameter values with regitered parameters and parameters defined via
+ * metadata processing.
+ *
+ * @param args the parameter values provided in a Map
+ * @return Map with parameter names and values
+ */
+ protected InsertMetaInfoHolder matchInParameterValuesWithInsertColumns(final Map args) {
+ return this.tableMetaDataContext.getInsertMetaInfo(args);
+ }
+
+ /**
+ * Match the provided in parameter values with regitered parameters and parameters defined via
+ * metadata processing.
+ *
+ * @param parameterSource the parameter vakues provided as a {@link SqlParameterSource}
+ * @return Map with parameter names and values
+ */
+ protected List matchInParameterValuesWithInsertColumnsForBatch(final SqlParameterSource parameterSource) {
+ return this.tableMetaDataContext.matchInParameterValuesWithInsertColumns(parameterSource);
+ }
+
+ /**
+ * Match the provided in parameter values with regitered parameters and parameters defined via
+ * metadata processing.
+ *
+ * @param args the parameter values provided in a Map
+ * @return Map with parameter names and values
+ */
+ protected List matchInParameterValuesWithInsertColumnsForBatch(final Map args) {
+ return this.tableMetaDataContext.matchInParameterValuesWithInsertColumns(args);
+ }
+
+ /**
+ * Gets the schema table.
+ *
+ * @return the schema table
+ */
+ protected String getSchemaTable() {
+ String sTableToUse = this.getTableName();
+ if (this.getSchemaName() != null) {
+ sTableToUse = this.getSchemaName() + "." + sTableToUse;
+ }
+ return sTableToUse;
+ }
+
+ /**
+ * Get the SQL statement builder.
+ *
+ * @return the statement builder
+ */
+ public SQLStatementHandler getStatementHandler() {
+ return this.statementHandler;
+ }
+
+ /**
+ * Establish SQL statement builder.
+ *
+ * @param statementHandler the new statement handler
+ */
+ public void setStatementHandler(final SQLStatementHandler statementHandler) {
+ this.statementHandler = statementHandler;
+ }
+
+ /**
+ * Gets the delete keys.
+ *
+ * @return the delete keys
+ */
+ public List getDeleteKeys() {
+ return this.deleteKeys;
+ }
+
+ /**
+ * Sets the delete keys.
+ *
+ * @param deleteKeys the new delete keys
+ */
+ public void setDeleteKeys(final List deleteKeys) {
+ this.deleteKeys = deleteKeys;
+ }
+
+ /**
+ * Gets the update keys.
+ *
+ * @return the update keys
+ */
+ public List getUpdateKeys() {
+ return this.updateKeys;
+ }
+
+ /**
+ * Sets the update keys.
+ *
+ * @param updateKeys the new update keys
+ */
+ public void setUpdateKeys(final List updateKeys) {
+ this.updateKeys = updateKeys;
+ }
+
+ /**
+ * Gets the application context.
+ *
+ * @return the application context
+ */
+ public ApplicationContext getApplicationContext() {
+ return this.applicationContext;
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see org.springframework.context.ApplicationContextAware#setApplicationContext
+ * (org.springframework.context.ApplicationContext)
+ */
+
+ /**
+ * Sets the application context.
+ *
+ * @param applicationContext the new application context
+ * @throws BeansException the beans exception
+ */
+ @Override
+ public void setApplicationContext(final ApplicationContext applicationContext) throws BeansException {
+ this.applicationContext = applicationContext;
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see com.ontimize.jee.server.dao.IOntimizeDaoSupport#getCudProperties()
+ */
+ @Override
+ public List getCudProperties() {
+ this.compile();
+ List res = new ArrayList<>();
+ for (TableParameterMetaData data : this.tableMetaDataContext.getTableParameters()) {
+ String name = data.getParameterName();
+ int type = data.getSqlType();
+ DaoProperty property = new DaoProperty();
+ property.setSqlType(type);
+ property.setPropertyName(name);
+ res.add(property);
+ }
+ return res;
+ }
+
+ /**
+ * Gets the table meta data context.
+ *
+ * @return the table meta data context
+ */
+ public OntimizeTableMetaDataContext getTableMetaDataContext() {
+ if (!this.tableMetaDataContext.isProcessed()) {
+ this.compile();
+ }
+ return this.tableMetaDataContext;
+ }
+
+ protected OntimizeTableMetaDataContext createTableMetadataContext() {
+ return new OntimizeTableMetaDataContext();
+ }
+
+ @Override
+ protected JdbcTemplate createJdbcTemplate(DataSource dataSource) {
+ OntimizeJdbcDaoSupport.logger.trace("Creating new JdbcTemplate with fetchSize=1000");
+ JdbcTemplate template = super.createJdbcTemplate(dataSource);
+ template.setFetchSize(1000);
+ OntimizeJdbcDaoSupport.logger.trace("Creating new JdbcTemplate has finally fetchSize={}", template.getFetchSize());
+ return template;
+ }
+
+ private Integer getColumnSQLType(final String column) {
+ if (!this.tableMetaDataContext.isProcessed()) {
+ this.compile();
+ }
+ for (final TableParameterMetaData data : this.tableMetaDataContext.getTableParameters()) {
+ if (column.equalsIgnoreCase(data.getParameterName())) {
+ return data.getSqlType();
+ }
+ }
+ return null;
+ }
+
+ protected static class SimpleScrollablePreparedStatementCreator implements PreparedStatementCreator, SqlProvider {
+
+ private final String sql;
+
+ public SimpleScrollablePreparedStatementCreator(String sql) {
+ Assert.notNull(sql, "SQL must not be null");
+ this.sql = sql;
+ }
+
+ @Override
+ public PreparedStatement createPreparedStatement(Connection con) throws SQLException {
+ return con.prepareStatement(this.sql, ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
+ }
+
+ @Override
+ public String getSql() {
+ return this.sql;
+ }
+ }
+}
\ No newline at end of file
diff --git a/ontimize-jee-server-jdbc/src/main/java/com/ontimize/jee/server/dao/jdbc/extension/DefaultDaoExtensionHelper.java b/ontimize-jee-server-jdbc/src/main/java/com/ontimize/jee/server/dao/jdbc/extension/DefaultDaoExtensionHelper.java
index fb4c5aeb..fb1fb88c 100644
--- a/ontimize-jee-server-jdbc/src/main/java/com/ontimize/jee/server/dao/jdbc/extension/DefaultDaoExtensionHelper.java
+++ b/ontimize-jee-server-jdbc/src/main/java/com/ontimize/jee/server/dao/jdbc/extension/DefaultDaoExtensionHelper.java
@@ -8,10 +8,11 @@
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
-import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
+import java.util.function.Function;
+import java.util.stream.Collectors;
import javax.xml.bind.JAXB;
@@ -139,8 +140,17 @@ protected Reader readWithPlaceHolders(InputStream is, final String pathToPlaceHo
if (isPlaceHolder != null) {
prop.load(isPlaceHolder);
}
+
+ Map mapProperties = prop.stringPropertyNames()
+ .stream()
+ .collect(Collectors.toMap(
+ Function.identity(),
+ prop::getProperty
+ ));
+
return new ReplaceTokensFilterReader(new InputStreamReader(is),
- new HashMap((Map) prop));
+ mapProperties);
+
}
} else {
return new InputStreamReader(is);
diff --git a/ontimize-jee-server-jdbc/src/test/java/com/ontimize/jee/server/dao/jdbc/OntimizeJdbcDaoSupportTest.java b/ontimize-jee-server-jdbc/src/test/java/com/ontimize/jee/server/dao/jdbc/OntimizeJdbcDaoSupportTest.java
index d5e16013..6d523ee2 100644
--- a/ontimize-jee-server-jdbc/src/test/java/com/ontimize/jee/server/dao/jdbc/OntimizeJdbcDaoSupportTest.java
+++ b/ontimize-jee-server-jdbc/src/test/java/com/ontimize/jee/server/dao/jdbc/OntimizeJdbcDaoSupportTest.java
@@ -122,7 +122,7 @@ void when_receive_keysValues_and_attributes_and_sort_and_queryId_and_queryAdapte
JdbcTemplate jdbcTemplate = Mockito.mock(JdbcTemplate.class);
Mockito.doReturn(new AdvancedEntityResultMapImpl(EntityResult.OPERATION_SUCCESSFUL, EntityResult.DATA_RESULT)).when(jdbcTemplate).query(Mockito.any(PreparedStatementCreator.class), Mockito.any(), Mockito.any());
- Mockito.doReturn(new EntityResultMapImpl()).when(jdbcTemplate).query(Mockito.any(String.class), (Object[]) Mockito.any(), Mockito.any(ResultSetExtractor.class));
+ Mockito.doReturn(new EntityResultMapImpl()).when(jdbcTemplate).query(Mockito.any(String.class), Mockito.any(ResultSetExtractor.class), (Object[]) Mockito.any());
ontimizeJdbcDaoSupport.setJdbcTemplate(jdbcTemplate);
ontimizeJdbcDaoSupport.setStatementHandler(new DefaultSQLStatementHandler());
ReflectionTestUtils.setField(ontimizeJdbcDaoSupport, "compiled", true);
diff --git a/ontimize-jee-server-keycloak/pom.xml b/ontimize-jee-server-keycloak/pom.xml
index ebb03e7f..0d51058f 100644
--- a/ontimize-jee-server-keycloak/pom.xml
+++ b/ontimize-jee-server-keycloak/pom.xml
@@ -4,7 +4,7 @@
com.ontimize.jeeontimize-jee
- 5.13.0
+ 5.14.0ontimize-jee-server-keycloak
diff --git a/ontimize-jee-server-rest/pom.xml b/ontimize-jee-server-rest/pom.xml
index 5c3444f1..515880a3 100644
--- a/ontimize-jee-server-rest/pom.xml
+++ b/ontimize-jee-server-rest/pom.xml
@@ -4,7 +4,7 @@
com.ontimize.jeeontimize-jee
- 5.13.0
+ 5.14.0ontimize-jee-server-rest
diff --git a/ontimize-jee-server/pom.xml b/ontimize-jee-server/pom.xml
index 11cac425..55683b1d 100644
--- a/ontimize-jee-server/pom.xml
+++ b/ontimize-jee-server/pom.xml
@@ -4,7 +4,7 @@
com.ontimize.jeeontimize-jee
- 5.13.0
+ 5.14.0ontimize-jee-server
diff --git a/ontimize-jee-server/src/main/java/com/ontimize/jee/server/services/management/Log4j2LoggerHelper.java b/ontimize-jee-server/src/main/java/com/ontimize/jee/server/services/management/Log4j2LoggerHelper.java
index 934809e7..6ac9ecb2 100644
--- a/ontimize-jee-server/src/main/java/com/ontimize/jee/server/services/management/Log4j2LoggerHelper.java
+++ b/ontimize-jee-server/src/main/java/com/ontimize/jee/server/services/management/Log4j2LoggerHelper.java
@@ -35,11 +35,12 @@ public class Log4j2LoggerHelper implements ILoggerHelper {
private static final Logger logger = LoggerFactory.getLogger(Log4j2LoggerHelper.class);
public Log4j2LoggerHelper() {
+ // Empty constructor required for framework instantiation and reflection.
+ // No initialization logic needed here.
}
@Override
public InputStream openLogStream() throws IOException {
- // TODO Auto-generated method stub
return null;
}
@@ -50,7 +51,7 @@ public EntityResult getLogFiles() throws Exception {
return new EntityResultMapImpl(EntityResult.OPERATION_SUCCESSFUL_SHOW_MESSAGE, EntityResult.NODATA_RESULT,
"No hay ficheros que mostrar");
}
- final EntityResult res = new EntityResultMapImpl(Arrays.asList(new String[] { "FILE_NAME", "FILE_SIZE" }));
+ final EntityResult res = new EntityResultMapImpl(Arrays.asList("FILE_NAME", "FILE_SIZE"));
Files.walkFileTree(folder, new java.nio.file.SimpleFileVisitor() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
@@ -62,17 +63,28 @@ public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IO
return res;
}
+
+ /**
+ * @param fileName The name of the log file to retrieve.
+ * @return InputStream zipped with the content of the log file. This stream must be closed by the caller.
+ * @throws Exception
+ */
+ // Suppress resource leak warning because the PipedInputStream is returned to the caller.
+ @SuppressWarnings("java:S2095")
@Override
public InputStream getLogFileContent(String fileName) throws Exception {
Path folder = this.getLogFolder();
+ if (folder == null) {
+ throw new OntimizeJEEException("Folder not found");
+ }
final Path file = folder.resolve(fileName);
if (!Files.exists(file)) {
throw new OntimizeJEEException("File not found");
}
- final PipedInputStream pis = new PipedInputStream();
- final PipedOutputStream pos = new PipedOutputStream(pis);
+ final PipedInputStream inputStream = new PipedInputStream();
+ final PipedOutputStream outputStream = new PipedOutputStream(inputStream);
new Thread(() -> {
- try (ZipOutputStream zos = new ZipOutputStream(pos)) {
+ try (ZipOutputStream zos = new ZipOutputStream(outputStream)) {
zos.putNextEntry(new ZipEntry(file.getFileName().toString()));
StreamUtils.copy(Files.newInputStream(file), zos);
zos.closeEntry();
@@ -81,24 +93,22 @@ public InputStream getLogFileContent(String fileName) throws Exception {
}
}, "LoggerHelper copy stream").start();
- return pis;
+ return inputStream;
}
private Path getLogFolder() {
- for (Logger logger : LogManagerFactory.getLogManager().getLoggerList()) {
+ for (Logger log : LogManagerFactory.getLogManager().getLoggerList()) {
ILoggerFactory loggerFactory = LoggerFactory.getILoggerFactory();
- Map loggersToUse = this.getValidLoggersToUse(loggerFactory);
- org.apache.logging.log4j.core.Logger innerLogger = this.getInnerLogger(loggersToUse.get(logger.getName()));
+ Map loggersToUse = this.getValidLoggersToUse(loggerFactory);
+ org.apache.logging.log4j.core.Logger innerLogger = this.getInnerLogger(loggersToUse.get(log.getName()));
for (Appender appender : innerLogger.getAppenders().values()) {
if (appender instanceof FileAppender) {
Path file = Paths.get(((FileAppender) appender).getFileName());
- Path folder = file.getParent();
- return folder;
+ return file.getParent();
} else if (appender instanceof RollingFileAppender) {
Path file = Paths.get(((RollingFileAppender) appender).getFileName());
- Path folder = file.getParent();
- return folder;
+ return file.getParent();
}
}
}
@@ -111,13 +121,11 @@ private org.apache.logging.log4j.core.Logger getInnerLogger(Object logger2) {
return (org.apache.logging.log4j.core.Logger) Log4jManager.getReflectionFieldValue(logger2, "logger");
}
- // For some extrange reason, when a lloger is requested to logerFactory it gets from a "Default"
+ // For some strange reason, when a logger is requested to loggerFactory it gets from a "Default"
// context, and not from our own context.
- private Map getValidLoggersToUse(ILoggerFactory loggerFactory) {
- Map registry = (Map) Log4jManager.getReflectionFieldValue(loggerFactory,
+ private Map getValidLoggersToUse(ILoggerFactory loggerFactory) {
+ Map> registry = (Map>) Log4jManager.getReflectionFieldValue(loggerFactory,
"registry");
- Map loggersToUse = registry.get(org.apache.logging.log4j.core.LoggerContext.getContext(false));
- return loggersToUse;
+ return registry.get(org.apache.logging.log4j.core.LoggerContext.getContext(false));
}
-
}
diff --git a/ontimize-jee-server/src/main/java/com/ontimize/jee/server/services/management/LogbackLoggerHelper.java b/ontimize-jee-server/src/main/java/com/ontimize/jee/server/services/management/LogbackLoggerHelper.java
index 2c7c6484..1ff69398 100644
--- a/ontimize-jee-server/src/main/java/com/ontimize/jee/server/services/management/LogbackLoggerHelper.java
+++ b/ontimize-jee-server/src/main/java/com/ontimize/jee/server/services/management/LogbackLoggerHelper.java
@@ -230,6 +230,13 @@ private Path getLogFolder() {
return null;
}
+ /**
+ * @param fileName The name of the log file to retrieve.
+ * @return InputStream zipped with the content of the log file. This stream must be closed by the caller.
+ * @throws Exception
+ */
+ // Suppress resource leak warning because the PipedInputStream is returned to the caller.
+ @SuppressWarnings("java:S2095")
public InputStream getLogFileContent(String fileName) throws Exception {
Path folder = this.getLogFolder();
final Path file = folder.resolve(fileName);
diff --git a/ontimize-jee-server/src/main/java/com/ontimize/jee/server/spring/namespace/SharePreferencesBeanDefinitionParser.java b/ontimize-jee-server/src/main/java/com/ontimize/jee/server/spring/namespace/SharePreferencesBeanDefinitionParser.java
index 2ec4177f..8f019a6d 100644
--- a/ontimize-jee-server/src/main/java/com/ontimize/jee/server/spring/namespace/SharePreferencesBeanDefinitionParser.java
+++ b/ontimize-jee-server/src/main/java/com/ontimize/jee/server/spring/namespace/SharePreferencesBeanDefinitionParser.java
@@ -64,17 +64,18 @@ protected void doParse(Element element, ParserContext ctx, BeanDefinitionBuilder
// Set the directory property
Element item = DomUtils.getChildElementByTagName(element,
SharePreferencesBeanDefinitionParser.SHARE_PREF_ENGINE_PROPERTY);
- Element child = DomUtils.getChildElements(item).get(0);
Object engine = null;
-
- if (SharePreferencesBeanDefinitionParser.SHARE_PREF_DATABASE_CONFIGURATION_PROPERTY
- .equals(child.getLocalName())) {
- final ParserContext nestedCtx = new ParserContext(ctx.getReaderContext(), ctx.getDelegate(),
- builder.getBeanDefinition());
- engine = new DatabaseSharePreferencesParser().parse(child, nestedCtx);
- } else {
- engine = DefinitionParserUtil.parseNode(child, ctx, builder.getBeanDefinition(),
- element.getAttribute(SharePreferencesBeanDefinitionParser.SCOPE), false);
+ if (item != null) {
+ Element child = DomUtils.getChildElements(item).get(0);
+ if (SharePreferencesBeanDefinitionParser.SHARE_PREF_DATABASE_CONFIGURATION_PROPERTY
+ .equals(child.getLocalName())) {
+ final ParserContext nestedCtx = new ParserContext(ctx.getReaderContext(), ctx.getDelegate(),
+ builder.getBeanDefinition());
+ engine = new DatabaseSharePreferencesParser().parse(child, nestedCtx);
+ } else {
+ engine = DefinitionParserUtil.parseNode(child, ctx, builder.getBeanDefinition(),
+ element.getAttribute(SharePreferencesBeanDefinitionParser.SCOPE), false);
+ }
}
builder.addPropertyValue(SharePreferencesBeanDefinitionParser.ENGINE, engine);
builder.setLazyInit(true);
diff --git a/ontimize-jee-webclient-addons/pom.xml b/ontimize-jee-webclient-addons/pom.xml
index 88e379fb..7cdda91c 100644
--- a/ontimize-jee-webclient-addons/pom.xml
+++ b/ontimize-jee-webclient-addons/pom.xml
@@ -1,171 +1,180 @@
-
- 4.0.0
-
- com.ontimize.jee
- ontimize-jee
- 5.13.0
-
- ontimize-jee-webclient-addons
+
+ 4.0.0
+
+ com.ontimize.jee
+ ontimize-jee
+ 5.14.0
+
+ ontimize-jee-webclient-addons
- Ontimize EE (WebClient Addons module)
- Ontimize EE (WebClient Addons module)
- https://www.ontimize.com
+ Ontimize EE (WebClient Addons module)
+ Ontimize EE (WebClient Addons module)
+ https://www.ontimize.com
-
- Imatia Innovation
- http://imatia.com
-
+
+ Imatia Innovation
+ http://imatia.com
+
-
-
- The Apache License, Version 2.0
- http://www.apache.org/licenses/LICENSE-2.0.txt
-
-
+
+
+ The Apache License, Version 2.0
+ http://www.apache.org/licenses/LICENSE-2.0.txt
+
+
-
-
- Alberto Quintela Trabazos
- alberto.quintela@imatia.com
- Imatia Innovation
- http://imatia.com
-
-
- Ángel Vázquez Vázquez
- angel.vazquez@imatia.com
- Imatia Innovation
- http://imatia.com
-
-
- Daniel Graña Cousido
- daniel.grana@imatia.com
- Imatia Innovation
- http://imatia.com
-
-
- Enrique Álvarez Pereira
- enrique.alvarez@imatia.com
- Imatia Innovation
- http://imatia.com
-
-
- Faustino Lage Rego
- faustino.lage@imatia.com
- Imatia Innovation
- http://imatia.com
-
-
- Gonzalo Martínez Fernández
- gonzalo.martinez@imatia.com
- Imatia Innovation
- http://imatia.com
-
-
- Joaquín Romero Riveiro
- joaquin.romero@imatia.com
- Imatia Innovation
- http://imatia.com
-
-
- Jorge Diaz Seijo
- jorge.diaz@imatia.com
- Imatia Innovation
- http://imatia.com
-
-
- Pablo Martínez Kirsten
- pablo.martinez@imatia.com
- Imatia Innovation
- http://imatia.com
-
-
- Senén Diéguez López
- senen.dieguez@imatia.com
- Imatia Innovation
- http://imatia.com
-
-
- Tomás Fuentes Facal
- tomas.fuentes@imatia.com
- Imatia Innovation
- http://imatia.com
-
-
- Xoán Loureiro Santamaría
- xoan.loureiro@imatia.com
- Imatia Innovation
- http://imatia.com
-
-
+
+
+ Alberto Quintela Trabazos
+ alberto.quintela@imatia.com
+ Imatia Innovation
+ http://imatia.com
+
+
+ Ángel Vázquez Vázquez
+ angel.vazquez@imatia.com
+ Imatia Innovation
+ http://imatia.com
+
+
+ Daniel Graña Cousido
+ daniel.grana@imatia.com
+ Imatia Innovation
+ http://imatia.com
+
+
+ Enrique Álvarez Pereira
+ enrique.alvarez@imatia.com
+ Imatia Innovation
+ http://imatia.com
+
+
+ Faustino Lage Rego
+ faustino.lage@imatia.com
+ Imatia Innovation
+ http://imatia.com
+
+
+ Gonzalo Martínez Fernández
+ gonzalo.martinez@imatia.com
+ Imatia Innovation
+ http://imatia.com
+
+
+ Joaquín Romero Riveiro
+ joaquin.romero@imatia.com
+ Imatia Innovation
+ http://imatia.com
+
+
+ Jorge Diaz Seijo
+ jorge.diaz@imatia.com
+ Imatia Innovation
+ http://imatia.com
+
+
+ Pablo Martínez Kirsten
+ pablo.martinez@imatia.com
+ Imatia Innovation
+ http://imatia.com
+
+
+ Senén Diéguez López
+ senen.dieguez@imatia.com
+ Imatia Innovation
+ http://imatia.com
+
+
+ Tomás Fuentes Facal
+ tomas.fuentes@imatia.com
+ Imatia Innovation
+ http://imatia.com
+
+
+ Xoán Loureiro Santamaría
+ xoan.loureiro@imatia.com
+ Imatia Innovation
+ http://imatia.com
+
+
-
- scm:git:git://github.com/ontimize/ontimize-jee.git
- scm:git:ssh://github.com:ontimize/ontimize-jee.git
- https://github.com/ontimize/ontimize-jee/tree/master
-
+
+ scm:git:git://github.com/ontimize/ontimize-jee.git
+ scm:git:ssh://github.com:ontimize/ontimize-jee.git
+ https://github.com/ontimize/ontimize-jee/tree/master
+
-
-
- ${projectGroupId}
- ontimize-jee-server-rest
-
-
-
- javax.servlet
- javax.servlet-api
- true
-
-
-
- org.apache.poi
- poi
- true
-
-
- org.apache.poi
- poi-ooxml
- true
-
-
- org.apache.poi
- poi-ooxml-schemas
-
-
- xmlbeans
- org.apache.xmlbeans
-
-
- true
-
+
+
+ ${projectGroupId}
+ ontimize-jee-server-rest
+
+
+
+ javax.servlet
+ javax.servlet-api
+ true
+
+
+
+ org.apache.poi
+ poi
+ true
+
+
+ org.apache.poi
+ poi-ooxml
+ true
+
+
+ org.apache.poi
+ poi-ooxml-schemas
+
+
+ xmlbeans
+ org.apache.xmlbeans
+
+
+ true
+
-
- org.dom4j
- dom4j
- provided
-
-
- com.itextpdf
- kernel
-
-
- com.itextpdf
- layout
-
-
- com.ontimize.jee
- ontimize-jee-common
-
-
- org.apache.commons
- commons-collections4
-
-
- org.kie.modules
- org-junit
- pom
-
-
+
+ org.dom4j
+ dom4j
+ provided
+
+
+ com.itextpdf
+ kernel
+
+
+ com.itextpdf
+ layout
+
+
+ com.ontimize.jee
+ ontimize-jee-common
+
+
+ org.apache.commons
+ commons-collections4
+
+
+ org.kie.modules
+ org-junit
+ pom
+
+
+ com.github.erosb
+ everit-json-schema
+
+
+ com.github.victools
+ jsonschema-generator
+
+
diff --git a/ontimize-jee-webclient-addons/src/main/java/com/ontimize/jee/webclient/export/util/ApplicationContextUtils.java b/ontimize-jee-webclient-addons/src/main/java/com/ontimize/jee/webclient/export/util/ApplicationContextUtils.java
index 8b277309..a134a7cb 100644
--- a/ontimize-jee-webclient-addons/src/main/java/com/ontimize/jee/webclient/export/util/ApplicationContextUtils.java
+++ b/ontimize-jee-webclient-addons/src/main/java/com/ontimize/jee/webclient/export/util/ApplicationContextUtils.java
@@ -27,52 +27,52 @@ public ApplicationContextUtils() {
public Object getServiceBean(final String serviceName , final String servicePath) throws ExportException {
- Object serviceBean = null;
-
- //Method 1. Retrieve bean from controller 'path'
- if(!StringUtils.isBlank(servicePath)) {
- RequestMappingHandlerMapping requestMappingHandlerMapping = applicationContext
- .getBean("requestMappingHandlerMapping", RequestMappingHandlerMapping.class);
- Map requestMap = requestMappingHandlerMapping.getHandlerMethods();
+ try {
+ Object serviceBean = null;
- List requestMapHandlerMethodList = requestMap.keySet().stream()
- .filter(key -> key.getActivePatternsCondition().toString().equals("[" + servicePath + "/{name}/search]"))
- .map(requestMap::get)
- .collect(Collectors.toList());
+ //Method 1. Retrieve bean from controller 'path'
+ if(!StringUtils.isBlank(servicePath)) {
+ RequestMappingHandlerMapping requestMappingHandlerMapping = applicationContext
+ .getBean("requestMappingHandlerMapping", RequestMappingHandlerMapping.class);
+ Map requestMap = requestMappingHandlerMapping.getHandlerMethods();
- if (requestMapHandlerMethodList.size() == 1) {
- Class> restControllerBeanType = requestMapHandlerMethodList.get(0).getBeanType();
- serviceBean = getBean(restControllerBeanType);
+ List requestMapHandlerMethodList = requestMap.keySet().stream()
+ .filter(key -> key.getActivePatternsCondition().toString().equals("[" + servicePath + "/{name}/search]"))
+ .map(requestMap::get)
+ .collect(Collectors.toList());
+
+ if (requestMapHandlerMethodList.size() == 1) {
+ Class> restControllerBeanType = requestMapHandlerMethodList.get(0).getBeanType();
+ serviceBean = getBean(restControllerBeanType);
+ }
}
- }
-
- //Method 2. Retrieve controller from service name and then the service bean
- if(serviceBean == null && !StringUtils.isBlank(serviceName)) {
- String[] beanNamesForType = applicationContext.getBeanNamesForType(ORestController.class);
- List restControllerNames = findCandidates(serviceName, beanNamesForType);
-
- if(restControllerNames.size() > 0) {
- if(restControllerNames.size() == 1) {
- serviceBean = getBeanForName(restControllerNames.get(0));
- } else {
- String beanName = this.fitBestControllerName(serviceName, restControllerNames);
- if(!StringUtils.isBlank(beanName)) {
- serviceBean = getBeanForName(beanName);
+
+ //Method 2. Retrieve controller from service name and then the service bean
+ if(serviceBean == null && !StringUtils.isBlank(serviceName)) {
+ String[] beanNamesForType = applicationContext.getBeanNamesForType(ORestController.class);
+ List restControllerNames = findCandidates(serviceName, beanNamesForType);
+
+ if(restControllerNames.size() > 0) {
+ if(restControllerNames.size() == 1) {
+ serviceBean = getBeanForName(restControllerNames.get(0));
+ } else {
+ String beanName = this.fitBestControllerName(serviceName, restControllerNames);
+ if(!StringUtils.isBlank(beanName)) {
+ serviceBean = getBeanForName(beanName);
+ }
}
}
}
+
+ // Method 3. Retrieve bean from service name
+ if(serviceBean == null) {
+ serviceBean = this.applicationContext.getBean(serviceName.concat("Service"));
+ }
+
+ return serviceBean;
+ } catch (BeansException e) {
+ throw new ExportException("Impossible to retrieve service to query data", e);
}
-
- // Method 3. Retrieve bean from service name
- if(serviceBean == null) {
- serviceBean = this.applicationContext.getBean(serviceName.concat("Service"));
- }
-
- if(serviceBean == null) {
- throw new ExportException("Impossible to retrieve service to query data");
- }
-
- return serviceBean;
}
@Override
@@ -84,9 +84,7 @@ public void setApplicationContext(ApplicationContext applicationContext) throws
private Object getBeanForName(final String beanName) {
Object bean = null;
ORestController> oRestController = applicationContext.getBean(beanName, ORestController.class);
- if (oRestController != null) {
- bean = oRestController.getService();
- }
+ bean = oRestController.getService();
return bean;
}
private Object getBean(final Class> beanClazz) {
diff --git a/ontimize-jee-webclient-addons/src/main/java/com/ontimize/jee/webclient/openai/client/OpenAIClient.java b/ontimize-jee-webclient-addons/src/main/java/com/ontimize/jee/webclient/openai/client/OpenAIClient.java
new file mode 100644
index 00000000..5c84fbfd
--- /dev/null
+++ b/ontimize-jee-webclient-addons/src/main/java/com/ontimize/jee/webclient/openai/client/OpenAIClient.java
@@ -0,0 +1,21 @@
+package com.ontimize.jee.webclient.openai.client;
+
+
+import com.ontimize.jee.webclient.openai.model.ProcessRequest;
+import com.ontimize.jee.webclient.openai.model.ProcessResult;
+import com.ontimize.jee.webclient.openai.service.OpenAiImageProcessorService;
+import com.ontimize.jee.webclient.openai.util.JsonSchemaValidator;
+
+public class OpenAIClient {
+
+ private final String apiKey;
+
+ public OpenAIClient(String apiKey) {
+ this.apiKey = apiKey;
+ }
+
+ public ProcessResult processImage(ProcessRequest request) {
+ OpenAiImageProcessorService service = new OpenAiImageProcessorService<>(this.apiKey, new JsonSchemaValidator());
+ return service.processImage(request);
+ }
+}
\ No newline at end of file
diff --git a/ontimize-jee-webclient-addons/src/main/java/com/ontimize/jee/webclient/openai/exception/OpenAIClientException.java b/ontimize-jee-webclient-addons/src/main/java/com/ontimize/jee/webclient/openai/exception/OpenAIClientException.java
new file mode 100644
index 00000000..4f78bca3
--- /dev/null
+++ b/ontimize-jee-webclient-addons/src/main/java/com/ontimize/jee/webclient/openai/exception/OpenAIClientException.java
@@ -0,0 +1,11 @@
+package com.ontimize.jee.webclient.openai.exception;
+
+public class OpenAIClientException extends RuntimeException {
+ public OpenAIClientException(String message) {
+ super(message);
+ }
+
+ public OpenAIClientException(String message, Throwable cause) {
+ super(message, cause);
+ }
+}
\ No newline at end of file
diff --git a/ontimize-jee-webclient-addons/src/main/java/com/ontimize/jee/webclient/openai/model/ProcessRequest.java b/ontimize-jee-webclient-addons/src/main/java/com/ontimize/jee/webclient/openai/model/ProcessRequest.java
new file mode 100644
index 00000000..a7eef505
--- /dev/null
+++ b/ontimize-jee-webclient-addons/src/main/java/com/ontimize/jee/webclient/openai/model/ProcessRequest.java
@@ -0,0 +1,70 @@
+package com.ontimize.jee.webclient.openai.model;
+
+import org.springframework.web.multipart.MultipartFile;
+
+public class ProcessRequest {
+
+ private MultipartFile file;
+ private String prompt;
+ private int retries;
+ private Class outputClass;
+ private String model;
+ private int maxTokens;
+ private double temperature;
+
+ public MultipartFile getFile() {
+ return file;
+ }
+
+ public void setFile(MultipartFile file) {
+ this.file = file;
+ }
+
+ public String getPrompt() {
+ return prompt;
+ }
+
+ public void setPrompt(String prompt) {
+ this.prompt = prompt;
+ }
+
+ public Class getOutputClass() {
+ return outputClass;
+ }
+
+ public void setOutputClass(Class outputClass) {
+ this.outputClass = outputClass;
+ }
+
+ public int getRetries() {
+ return retries;
+ }
+
+ public void setRetries(int retries) {
+ this.retries = retries;
+ }
+
+ public String getModel() {
+ return model;
+ }
+
+ public void setModel(String model) {
+ this.model = model;
+ }
+
+ public int getMaxTokens() {
+ return maxTokens;
+ }
+
+ public void setMaxTokens(int maxTokens) {
+ this.maxTokens = maxTokens;
+ }
+
+ public double getTemperature() {
+ return temperature;
+ }
+
+ public void setTemperature(double temperature) {
+ this.temperature = temperature;
+ }
+}
\ No newline at end of file
diff --git a/ontimize-jee-webclient-addons/src/main/java/com/ontimize/jee/webclient/openai/model/ProcessResult.java b/ontimize-jee-webclient-addons/src/main/java/com/ontimize/jee/webclient/openai/model/ProcessResult.java
new file mode 100644
index 00000000..5e7bb836
--- /dev/null
+++ b/ontimize-jee-webclient-addons/src/main/java/com/ontimize/jee/webclient/openai/model/ProcessResult.java
@@ -0,0 +1,30 @@
+package com.ontimize.jee.webclient.openai.model;
+
+import java.util.List;
+
+public class ProcessResult {
+ private T data;
+ private List errors;
+ private int retries;
+
+ public ProcessResult() {
+ }
+
+ public ProcessResult(T data, List errors, int retries) {
+ this.data = data;
+ this.errors = errors;
+ this.retries = retries;
+ }
+
+ public T getData() {
+ return data;
+ }
+
+ public List getErrors() {
+ return errors;
+ }
+
+ public int getRetries() {
+ return retries;
+ }
+}
diff --git a/ontimize-jee-webclient-addons/src/main/java/com/ontimize/jee/webclient/openai/naming/OpenAINaming.java b/ontimize-jee-webclient-addons/src/main/java/com/ontimize/jee/webclient/openai/naming/OpenAINaming.java
new file mode 100644
index 00000000..076b719d
--- /dev/null
+++ b/ontimize-jee-webclient-addons/src/main/java/com/ontimize/jee/webclient/openai/naming/OpenAINaming.java
@@ -0,0 +1,70 @@
+package com.ontimize.jee.webclient.openai.naming;
+
+import com.ontimize.jee.webclient.openai.exception.OpenAIClientException;
+
+public final class OpenAINaming {
+
+ private OpenAINaming() {
+ throw new OpenAIClientException("");
+ }
+
+ public static final String MODEL = "model";
+ public static final String MESSAGES = "messages";
+ public static final String ROLE = "role";
+ public static final String USER = "user";
+ public static final String CONTENT = "content";
+ public static final String TYPE = "type";
+ public static final String TEXT = "text";
+ public static final String IMAGE_URL = "image_url";
+ public static final String IMAGE_TYPE = "data:image/jpeg;base64,";
+ public static final String URL = "url";
+ public static final String DETAIL = "detail";
+ public static final String HIGH = "high";
+ public static final String MAX_TOKENS = "max_tokens";
+ public static final String TEMPERATURE = "temperature";
+ public static final String COMPLETIONS_URL = "https://api.openai.com/v1/chat/completions";
+ public static final String CHOICES = "choices";
+ public static final String MESSAGE = "message";
+
+ public static final String INITIAL_PROMPT_FORMAT =
+ "Your task is to process the following image and return the structured information "
+ + "in the JSON format described below.\n\n"
+ + "=== CONTEXT INSTRUCTIONS ===\n"
+ + "%s\n\n"
+ + "=== EXPECTED STRUCTURE ===\n"
+ + "Return only a JSON that follows this structure:\n%s\n\n"
+ + "IMPORTANT:\n"
+ + "- Do not include explanations or comments.\n"
+ + "- Use null if you cannot identify a value.\n"
+ + "- Respect the specified data type: if a number or date is expected, "
+ + "return it correctly formatted.\n"
+ + "- Make sure the JSON is valid and parseable.";
+
+ public static final String RETRY_PROMPT_FORMAT =
+ "The following JSON does not meet the expected structure or validation rules. "
+ + "Please correct the errors and regenerate only the corrected JSON.\n\n"
+ + "=== CONTEXT INSTRUCTIONS ===\n"
+ + "%s\n\n"
+ + "=== EXPECTED STRUCTURE ===\n"
+ + "%s\n\n"
+ + "=== PREVIOUS INVALID RESPONSE ===\n"
+ + "%s\n\n"
+ + "=== DETECTED ERRORS ===\n"
+ + "%s\n\n"
+ + "Please generate a new version of the JSON that is valid, well-formed, "
+ + "and strictly follows the defined structure.\n\n"
+ + "REMEMBER:\n"
+ + "- Do not include explanations or comments.\n"
+ + "- Use null if you cannot identify a value.\n"
+ + "- Respect the specified data type: if a number or date is expected, "
+ + "return it correctly formatted.\n"
+ + "- Make sure the JSON is valid and parseable.";
+
+ public static final String OPENAI_API_ERROR = "OpenAI API error: ";
+ public static final String OPENAI_API_NO_JSON_ERROR = "No JSON found in the input string";
+ public static final String OPENAI_API_SCHEMA_GENERATION_ERROR = "Error generating schema: ";
+ public static final String OPENAI_API_SCHEMA_SERIALIZATION_ERROR = "Error serializing schema: ";
+
+ public static final String PROPERTIES = "properties";
+}
+
diff --git a/ontimize-jee-webclient-addons/src/main/java/com/ontimize/jee/webclient/openai/service/OpenAiImageProcessorService.java b/ontimize-jee-webclient-addons/src/main/java/com/ontimize/jee/webclient/openai/service/OpenAiImageProcessorService.java
new file mode 100644
index 00000000..5812fbf8
--- /dev/null
+++ b/ontimize-jee-webclient-addons/src/main/java/com/ontimize/jee/webclient/openai/service/OpenAiImageProcessorService.java
@@ -0,0 +1,115 @@
+package com.ontimize.jee.webclient.openai.service;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.ontimize.jee.webclient.openai.exception.OpenAIClientException;
+import com.ontimize.jee.webclient.openai.model.ProcessRequest;
+import com.ontimize.jee.webclient.openai.model.ProcessResult;
+import com.ontimize.jee.webclient.openai.util.JsonSchemaValidator;
+import com.ontimize.jee.webclient.openai.util.Utils;
+import org.springframework.http.HttpEntity;
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.MediaType;
+import org.springframework.http.ResponseEntity;
+import org.springframework.web.client.RestTemplate;
+import org.springframework.web.multipart.MultipartFile;
+
+import java.io.IOException;
+import java.util.*;
+
+import static com.ontimize.jee.webclient.openai.naming.OpenAINaming.*;
+
+public class OpenAiImageProcessorService {
+ private final JsonSchemaValidator jsonSchemaValidator;
+ private final ObjectMapper objectMapper = new ObjectMapper();
+ private final String apiKey;
+
+ public OpenAiImageProcessorService(String apiKey, JsonSchemaValidator jsonSchemaValidator) {
+ this.jsonSchemaValidator = jsonSchemaValidator;
+ this.apiKey = apiKey;
+ }
+
+ public ProcessResult processImage(ProcessRequest request) {
+ int actualTry = 0;
+ List errors = new ArrayList<>();
+ MultipartFile file = request.getFile();
+ Class outputClass = request.getOutputClass();
+ ObjectMapper localObjectMapper = new ObjectMapper();
+
+ String schemaStr;
+ try {
+ schemaStr = Utils.generateFullSchemaJson(outputClass);
+ } catch (Exception e) {
+ errors.add(OPENAI_API_SCHEMA_GENERATION_ERROR + e.getMessage());
+ return new ProcessResult<>(null, errors, actualTry);
+ }
+
+ String parsedResult = null;
+ ProcessResult finalResult = null;
+
+ while (actualTry < request.getRetries()) {
+ try {
+ String prompt = Utils.buildPrompt(request.getPrompt(), schemaStr, parsedResult, errors);
+
+ String responseJsonRaw = callVisionApi(prompt, file, request.getModel(), request.getMaxTokens(),
+ request.getTemperature());
+
+ String responseJson = JsonSchemaValidator.extractRawJson(responseJsonRaw);
+ if (responseJson == null || responseJson.isBlank()) {
+ throw new IllegalStateException(OPENAI_API_NO_JSON_ERROR);
+ }
+
+ parsedResult = responseJsonRaw;
+ jsonSchemaValidator.validate(responseJson, schemaStr);
+
+ T result = localObjectMapper.readValue(responseJson, outputClass);
+ finalResult = new ProcessResult<>(result, new ArrayList<>(errors), actualTry);
+ break;
+
+ } catch (Exception e) {
+ errors.add(e.getMessage());
+ actualTry++;
+ }
+ }
+ return finalResult != null ? finalResult : new ProcessResult<>(null, errors, actualTry);
+ }
+
+ private String callVisionApi(String promptText, MultipartFile image, String model, int maxTokens,
+ double temperature) throws OpenAIClientException, JsonProcessingException {
+ HttpEntity