diff --git a/cloudsql-postgresql-plugin/src/e2e-test/features/source/DesignTimeWithValidation.feature b/cloudsql-postgresql-plugin/src/e2e-test/features/source/DesignTimeWithValidation.feature index 397c4dffe..93760efc1 100644 --- a/cloudsql-postgresql-plugin/src/e2e-test/features/source/DesignTimeWithValidation.feature +++ b/cloudsql-postgresql-plugin/src/e2e-test/features/source/DesignTimeWithValidation.feature @@ -24,6 +24,7 @@ Feature: CloudSQL-postgreSQL source - Verify CloudSQL-postgreSQL source plugin Then Click on the Validate button Then Verify mandatory property error for below listed properties: | jdbcPluginName | + | connectionName | | database | | referenceName | | importQuery | @@ -228,3 +229,19 @@ Feature: CloudSQL-postgreSQL source - Verify CloudSQL-postgreSQL source plugin Then Enter textarea plugin property: "importQuery" with value: "selectQuery" Then Click on the Validate button Then Verify that the Plugin is displaying an error message: "errorMessageInvalidPassword" on the header + + Scenario: To verify CloudSQLPostgreSQL source plugin validation error message with invalid connection name with private instance + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "private" + Then Replace input plugin property: "connectionName" with value: "invalidConnectionName" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Validate button + Then Verify that the Plugin Property: "connectionName" is displaying an in-line error message: "errorMessagePrivateConnectionName" diff --git a/cloudsql-postgresql-plugin/src/e2e-test/features/source/RunTime.feature b/cloudsql-postgresql-plugin/src/e2e-test/features/source/RunTime.feature index b2f96539f..a9e3ff26b 100644 --- a/cloudsql-postgresql-plugin/src/e2e-test/features/source/RunTime.feature +++ b/cloudsql-postgresql-plugin/src/e2e-test/features/source/RunTime.feature @@ -64,7 +64,7 @@ Feature: CloudSQL-PostGreSQL Source - Run Time scenarios Then Validate the values of records transferred to target Big Query table is equal to the values from source table @CLOUDSQLPOSTGRESQL_SOURCE_TEST @BQ_SINK_TEST @PLUGIN-1526 - Scenario: To verify data is getting transferred from PostgreSQL source to BigQuery sink successfully when connection arguments are set + Scenario: To verify data is getting transferred from CloudSQLPostgreSQL source to BigQuery sink successfully when connection arguments are set Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" @@ -188,11 +188,106 @@ Feature: CloudSQL-PostGreSQL Source - Run Time scenarios Then Close the Plugin Properties page Then Save the pipeline Then Preview and run the pipeline - Then Wait till pipeline preview is in running state + Then Wait till pipeline preview is in running state and check if any error occurs Then Verify the preview run status of pipeline in the logs is "failed" @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_TARGET_TEST - Scenario: To verify data is getting transferred from PostgreSQL to PostgreSQL successfully with supported datatypes + Scenario: To verify data is getting transferred from CloudSQLPostgreSQL to CloudSQLPostgreSQL successfully with supported datatypes + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL PostgreSQL" and "CloudSQL PostgreSQL2" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "datatypesSchema" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL2" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "dbSchemaName" with value: "schema" + Then Validate "CloudSQL PostgreSQL2" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Verify the preview of pipeline is "success" + Then Click on the Preview Data link on the Sink plugin node: "CloudSQLPostgreSQL" + Then Close the preview data + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Validate the values of records transferred to target table is equal to the values from source table + + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_TARGET_TEST @CONNECTION @Source_Required + Scenario: To verify data is getting transferred from CloudSQLPostgreSQL to CloudSQLPostgreSQL successfully with use connection + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL PostgreSQL" and "CloudSQL PostgreSQL2" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + And Click plugin property: "switch-useConnection" + And Click on the Browse Connections button + And Click on the Add Connection button + Then Click plugin property: "connector-CloudSQLPostgreSQL" + And Enter input plugin property: "name" with value: "connection.name" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Replace input plugin property: "database" with value: "databaseName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Click on the Test Connection button + And Verify the test connection is successful + Then Click on the Create button + Then Select connection: "connection.name" + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "datatypesSchema" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL2" + And Click plugin property: "switch-useConnection" + And Click on the Browse Connections button + Then Select connection: "connection.name" + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "dbSchemaName" with value: "schema" + Then Validate "CloudSQL PostgreSQL2" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Verify the preview of pipeline is "success" + And Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Validate the values of records transferred to target table is equal to the values from source table + + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_TARGET_TEST @Source_Required + Scenario: To verify data is getting transferred from CloudSQLPostgreSQL to CloudSQLPostgreSQL successfully with bounding query Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" @@ -206,6 +301,7 @@ Feature: CloudSQL-PostGreSQL Source - Run Time scenarios Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Enter textarea plugin property: "boundingQuery" with value: "boundingQuery" Then Replace input plugin property: "database" with value: "databaseName" Then Enter textarea plugin property: "importQuery" with value: "selectQuery" Then Click on the Get Schema button diff --git a/cloudsql-postgresql-plugin/src/e2e-test/features/source/RunTimeMacro.feature b/cloudsql-postgresql-plugin/src/e2e-test/features/source/RunTimeMacro.feature index 7e2e5200d..5e9919f44 100644 --- a/cloudsql-postgresql-plugin/src/e2e-test/features/source/RunTimeMacro.feature +++ b/cloudsql-postgresql-plugin/src/e2e-test/features/source/RunTimeMacro.feature @@ -332,3 +332,66 @@ Feature: CloudSQL-PostGreSQL source - Verify CloudSQL-PostGreSQL plugin data tra Then Verify the pipeline status is "Succeeded" Then Close the pipeline logs Then Validate the values of records transferred to target Big Query table is equal to the values from source table + + @CLOUDSQLPOSTGRESQL_SOURCE_TEST @CLOUDSQLPOSTGRESQL_TARGET_TEST @Source_Required + Scenario: To verify data is getting transferred from CloudSQLPostgreSQL to CloudSQLPostgreSQL successfully when macro enabled + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL PostgreSQL" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL PostgreSQL" and "CloudSQL PostgreSQL2" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Click on the Macro button of Property: "connectionName" and set the value to: "cloudSQLPostgreSQLConnectionName" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Click on the Macro button of Property: "connectionArguments" and set the value to: "connArgumentsSource" + Then Click on the Macro button of Property: "database" and set the value to: "cloudSQLPostgreSQLdatabaseName" + Then Click on the Macro button of Property: "importQuery" and set the value in textarea: "cloudSQLPostgreSQLImportQuery" + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Click on the Macro button of Property: "boundingQuery" and set the value in textarea: "cloudSQLPostgreSQLBoundingQuery" + Then Validate "CloudSQL PostgreSQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL PostgreSQL2" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "dbSchemaName" with value: "schema" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Click on the Macro button of Property: "connectionArguments" and set the value to: "connArgumentsSink" + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Validate "CloudSQL PostgreSQL2" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "connectionArguments" for key "connArgumentsSource" + Then Enter runtime argument value "connectionArguments" for key "connArgumentsSink" + Then Enter runtime argument value "selectQuery" for key "cloudSQLPostgreSQLImportQuery" + Then Enter runtime argument value "databaseName" for key "cloudSQLPostgreSQLdatabaseName" + Then Enter runtime argument value "boundingQuery" for key "cloudSQLPostgreSQLBoundingQuery" + Then Enter runtime argument value from environment variable "connectionName" for key "cloudSQLPostgreSQLConnectionName" + Then Run the preview of pipeline with runtime arguments + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "connectionArguments" for key "connArgumentsSource" + Then Enter runtime argument value "connectionArguments" for key "connArgumentsSink" + Then Enter runtime argument value "selectQuery" for key "cloudSQLPostgreSQLImportQuery" + Then Enter runtime argument value "databaseName" for key "cloudSQLPostgreSQLdatabaseName" + Then Enter runtime argument value "boundingQuery" for key "cloudSQLPostgreSQLBoundingQuery" + Then Enter runtime argument value from environment variable "connectionName" for key "cloudSQLPostgreSQLConnectionName" + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate the values of records transferred to target table is equal to the values from source table diff --git a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetUpHooks.java b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetUpHooks.java index f5851dc5c..8fd6eb6e0 100644 --- a/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetUpHooks.java +++ b/cloudsql-postgresql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetUpHooks.java @@ -17,6 +17,8 @@ package io.cdap.plugin.common.stepsdesign; import com.google.cloud.bigquery.BigQueryException; +import io.cdap.e2e.pages.actions.CdfConnectionActions; +import io.cdap.e2e.pages.actions.CdfPluginPropertiesActions; import io.cdap.e2e.utils.BigQueryClient; import io.cdap.e2e.utils.PluginPropertyUtils; import io.cdap.plugin.CloudSqlPostgreSqlClient; @@ -49,8 +51,10 @@ public static void setTableName() { PluginPropertyUtils.addPluginProp("sourceTable", sourceTableName); PluginPropertyUtils.addPluginProp("targetTable", targetTableName); String schema = PluginPropertyUtils.pluginProp("schema"); - PluginPropertyUtils.addPluginProp("selectQuery", - String.format("select * from %s.%s", schema, sourceTableName)); + PluginPropertyUtils.addPluginProp("selectQuery", String.format("select * from %s.%s" + + " WHERE $CONDITIONS", schema, sourceTableName)); + PluginPropertyUtils.addPluginProp("boundingQuery", String.format("select MIN(id),MAX(id)" + + " from %s.%s", schema, sourceTableName)); } @Before(order = 2, value = "@CLOUDSQLPOSTGRESQL_SOURCE_TEST") @@ -177,4 +181,25 @@ private static void createSourceBQTableWithQueries(String bqCreateTableQueryFile PluginPropertyUtils.addPluginProp("bqSourceTable", bqSourceTable); BeforeActions.scenario.write("BQ Source Table " + bqSourceTable + " created successfully"); } + + @Before(order = 1, value = "@CONNECTION") + public static void setNewConnectionName() { + String connectionName = "CloudSQLPostgreSQL" + RandomStringUtils.randomAlphanumeric(10); + PluginPropertyUtils.addPluginProp("connection.name", connectionName); + BeforeActions.scenario.write("New Connection name: " + connectionName); + } + + private static void deleteConnection(String connectionType, String connectionName) throws IOException { + CdfConnectionActions.openWranglerConnectionsPage(); + CdfConnectionActions.expandConnections(connectionType); + CdfConnectionActions.openConnectionActionMenu(connectionType, connectionName); + CdfConnectionActions.selectConnectionAction(connectionType, connectionName, "Delete"); + CdfPluginPropertiesActions.clickPluginPropertyButton("Delete"); + } + + @After(order = 1, value = "@CONNECTION") + public static void deleteTestConnection() throws IOException { + deleteConnection("CloudSQLPostgreSQL", "connection.name"); + PluginPropertyUtils.removePluginProp("connection.name"); + } } diff --git a/cloudsql-postgresql-plugin/src/e2e-test/resources/errorMessage.properties b/cloudsql-postgresql-plugin/src/e2e-test/resources/errorMessage.properties index 1d094aedd..f780ab75f 100644 --- a/cloudsql-postgresql-plugin/src/e2e-test/resources/errorMessage.properties +++ b/cloudsql-postgresql-plugin/src/e2e-test/resources/errorMessage.properties @@ -7,16 +7,17 @@ errorMessageInvalidNumberOfSplits=Invalid value for Number of Splits '0'. Must b errorMessageNumberOfSplitNotNumber=Unable to create config for batchsource CloudSQLPostgreSQL 'numSplits' is invalid: Value of \ field class io.cdap.plugin.db.config.AbstractDBSpecificSourceConfig.numSplits is expected to be a number. errorMessageInvalidFetchSize=Invalid fetch size. Fetch size must be a positive integer. -errorMessageInvalidSourceDatabase=SQL error while getting query schema: FATAL: database "invalidDatabase" does not exist +errorMessageInvalidSourceDatabase=SQL error while getting query schema: Error: FATAL: database "invalidDatabase" does not exist, errorMessageInvalidImportQuery=Import Query select must contain the string '$CONDITIONS'. if Number of Splits is not set\ \ to 1. Include '$CONDITIONS' in the Import Query errorMessageBlankUsername=Username is required when password is given. errorMessageBlankPassword=SQL error while getting query schema: -errorMessageInvalidPassword=SQL error while getting query schema: FATAL: password authentication failed for user +errorMessageInvalidPassword=SQL error while getting query schema: Error: FATAL: password authentication failed for user errorMessageInvalidSourceHost=SQL error while getting query schema: The connection attempt failed. errorMessageInvalidTableName=Table 'table' does not exist. Ensure table '"table"' is set correctly and that the errorMessageInvalidSinkDatabase=Exception while trying to validate schema of database table -errorLogsMessageInvalidBoundingQuery=Spark program 'phase-1' failed with error: The column index is out of range: 1, \ - number of columns: 0.. Please check the system logs for more details. +errorLogsMessageInvalidBoundingQuery=The column index is out of range: 1, number of columns: 0.. errorMessageConnectionName=Connection Name must be in the format :: to connect to \ a public CloudSQL PostgreSQL instance. +errorMessagePrivateConnectionName=Enter the internal IP address of the Compute Engine VM cloudsql proxy is running on, \ + to connect to a private CloudSQL PostgreSQL instance. diff --git a/cloudsql-postgresql-plugin/src/e2e-test/resources/pluginParameters.properties b/cloudsql-postgresql-plugin/src/e2e-test/resources/pluginParameters.properties index 3212af0a9..082e8db83 100644 --- a/cloudsql-postgresql-plugin/src/e2e-test/resources/pluginParameters.properties +++ b/cloudsql-postgresql-plugin/src/e2e-test/resources/pluginParameters.properties @@ -13,12 +13,13 @@ datatypesColumns=( id varchar(100) primary key, col1 bpchar, col2 bpchar(10), co col40 bit(2), col41 varbit(5), col42 json, col43 jsonb, col44 _pg_lsn, col45 pg_snapshot, col46 tsquery, \ col47 tsvector, col48 txid_snapshot, col49 uuid, col50 xml, col51 int4range, col52 int8range, col53 numrange, \ col54 tsrange, col55 tstzrange, col56 daterange, col57 pg_lsn, col58 int4, col59 int2, col60 int8, col61 real, \ - col62 smallint, col63 serial, col64 smallserial, col65 double precision, col66 bigint, col67 bigserial, col68 boolean) + col62 smallint, col63 serial, col64 smallserial, col65 double precision, col66 bigint, col67 bigserial, col68 boolean,\ + col69 char(10), col70 decimal(10, 2), col71 numeric) datatypesColumnsList=( id, col1, col2, col3, col4, col5, col6, col7, col8, col10, col11, col12, col13, col14, \ col15, col16, col17, col18, col22,col23, col24, col25, col26, col27, col28, col29, col30, col31, col32, col33, \ col34, col35, col36, col37, col38, col39, col40, col41, col42, col43, col44, col45, col46, col47, col48, col49, \ col50, col51, col52, col53, col54, col55, col56, col57, col58, col59, col60, col61, col62, col63, col64, col65,\ - col66, col67, col68 ) + col66, col67, col68, col69, col70, col71 ) datatypesValues=VALUES ('User5', 'M', 'ABC...1234', 'B', 'ABC', decode('48656C6C6F20576F726C6421','hex'), 123, 123, \ 123456, 123.4567, 123456789, 123.456, 123.456, 100.26, 'Hello World!', 'User 5', 123.456, 100, \ '2023-01-01 07:30:00.000', '2023-01-01 15:30:00.000', '02:00:00', '6 mons 02:30:00'::interval, \ @@ -33,7 +34,7 @@ datatypesValues=VALUES ('User5', 'M', 'ABC...1234', 'B', 'ABC', decode('48656C6C '(1.0,14.0)'::numrange, '["2010-01-01 14:30:00","2010-01-01 15:30:00")'::tsrange, \ '["2010-01-01 20:00:00+05:30","2010-01-01 21:00:00+05:30")'::tstzrange, '[1992-03-21,1994-06-26)'::daterange, \ '16/B374D848'::pg_lsn, 2, 2, 2, '1234.5679', '600', DEFAULT, DEFAULT, '61.823765812', '2500000000000', \ - DEFAULT, false); + DEFAULT, false, 'Example1', 12345.67, '12345'); datatypesSchema=[{"key":"id","value":"string"},{"key":"col1","value":"string"},{"key":"col2","value":"string"},\ {"key":"col3","value":"string"},{"key":"col4","value":"string"},{"key":"col5","value":"bytes"},\ {"key":"col6","value":"int"},{"key":"col7","value":"int"},{"key":"col8","value":"long"},\ @@ -55,7 +56,8 @@ datatypesSchema=[{"key":"id","value":"string"},{"key":"col1","value":"string"},{ {"key":"col58","value":"int"},{"key":"col59","value":"int"},{"key":"col60","value":"long"}, \ {"key":"col61","value":"float"},{"key":"col62","value":"int"},{"key":"col63","value":"int"},\ {"key":"col64","value":"int"},{"key":"col65","value":"double"},{"key":"col66","value":"long"},\ - {"key":"col67","value":"long"},{"key":"col68","value":"boolean"}] + {"key":"col67","value":"long"},{"key":"col68","value":"boolean"},{"key":"col69","value":"string"},\ + {"key":"col70","value":"decimal"},{"key":"col71","value":"string"}] #CLOUDSQLPOSTGRESQL Invalid Properties invalidRef=invalidRef&^*&&* @@ -77,6 +79,7 @@ numberOfSplits=2 zeroValue=0 splitByColumn=ID importQuery=where $CONDITIONS +connectionArguments=queryTimeout=50 #bq properties projectId=cdf-athena