diff --git a/oracle-plugin/src/e2e-test/features/source/OracleDesignTimeValidation.feature b/oracle-plugin/src/e2e-test/features/source/OracleDesignTimeValidation.feature index 9efb734db..64e9a1037 100644 --- a/oracle-plugin/src/e2e-test/features/source/OracleDesignTimeValidation.feature +++ b/oracle-plugin/src/e2e-test/features/source/OracleDesignTimeValidation.feature @@ -199,3 +199,54 @@ Feature: Oracle source- Verify Oracle source plugin design time validation scena Then Enter textarea plugin property: "importQuery" with value: "invalidImportQuery" Then Click on the Validate button Then Verify that the Plugin Property: "user" is displaying an in-line error message: "errorMessageBlankUsername" + + @Oracle_Required + Scenario: To verify Oracle source plugin validation error message with blank password + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "Oracle" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "Oracle" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Replace input plugin property: "host" with value: "host" for Credentials and Authorization related fields + Then Replace input plugin property: "port" with value: "port" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Select radio button plugin property: "connectionType" with value: "service" + Then Select radio button plugin property: "role" with value: "normal" + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Validate button + Then Verify that the Plugin is displaying an error message: "errorMessageBlankPassword" on the header + + @Oracle_Required + Scenario: To verify Oracle source plugin validation error message with blank Host + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "Oracle" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "Oracle" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Replace input plugin property: "port" with value: "port" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Select radio button plugin property: "connectionType" with value: "service" + Then Select radio button plugin property: "role" with value: "normal" + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Validate button + Then Verify that the Plugin is displaying an error message: "errorMessageBlankHost" on the header + + @Oracle_Required + Scenario: Verify the validation error message on header with blank database value + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "Oracle" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "Oracle" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Replace input plugin property: "host" with value: "host" for Credentials and Authorization related fields + Then Replace input plugin property: "port" with value: "port" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Click plugin property: "switch-useConnection" + Then Click on the Validate button + Then Verify that the Plugin is displaying an error message: "blank.database.message" on the header diff --git a/oracle-plugin/src/e2e-test/features/source/OracleRunTime.feature b/oracle-plugin/src/e2e-test/features/source/OracleRunTime.feature index 2d1ca9ad1..2040c33f6 100644 --- a/oracle-plugin/src/e2e-test/features/source/OracleRunTime.feature +++ b/oracle-plugin/src/e2e-test/features/source/OracleRunTime.feature @@ -438,3 +438,110 @@ Feature: Oracle - Verify data transfer from Oracle source to BigQuery sink Then Verify the pipeline status is "Succeeded" Then Close the pipeline logs Then Validate the values of records transferred to target Big Query table is equal to the values from source table + + @ORACLE_SOURCE_TEST @BQ_SINK_TEST @Oracle_Required + Scenario: To verify data is getting transferred from Oracle source to BigQuery sink successfully with bounding query + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "Oracle" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "Oracle" and "BigQuery" to establish connection + Then Navigate to the properties page of plugin: "Oracle" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Replace input plugin property: "host" with value: "host" for Credentials and Authorization related fields + Then Replace input plugin property: "port" with value: "port" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Select radio button plugin property: "connectionType" with value: "service" + Then Select radio button plugin property: "role" with value: "normal" + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Enter textarea plugin property: "boundingQuery" with value: "boundingQuery" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "outputSchema" + Then Validate "Oracle" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Click plugin property: "truncateTable" + Then Click plugin property: "updateTableSchema" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate the values of records transferred to target Big Query table is equal to the values from source table + + @ORACLE_SOURCE_TEST @BQ_SINK_TEST @CONNECTION @Oracle_Required + Scenario: To verify data is getting transferred from Oracle source to BigQuery sink successfully with use connection + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "Oracle" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "Oracle" and "BigQuery" to establish connection + Then Navigate to the properties page of plugin: "Oracle" + And Click plugin property: "switch-useConnection" + And Click on the Browse Connections button + And Click on the Add Connection button + Then Click plugin property: "connector-Oracle" + And Enter input plugin property: "name" with value: "connection.name" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Replace input plugin property: "host" with value: "host" for Credentials and Authorization related fields + Then Replace input plugin property: "port" with value: "port" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Select radio button plugin property: "connectionType" with value: "service" + Then Replace input plugin property: "database" with value: "databaseName" + Then Select radio button plugin property: "role" with value: "normal" + Then Click on the Test Connection button + And Verify the test connection is successful + Then Click on the Create button + Then Select connection: "connection.name" + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "outputSchema" + Then Validate "Oracle" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Click plugin property: "truncateTable" + Then Click plugin property: "updateTableSchema" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate the values of records transferred to target Big Query table is equal to the values from source table + diff --git a/oracle-plugin/src/e2e-test/features/source/OracleRunTimeMacro.feature b/oracle-plugin/src/e2e-test/features/source/OracleRunTimeMacro.feature index 106d4ec36..79c584a95 100644 --- a/oracle-plugin/src/e2e-test/features/source/OracleRunTimeMacro.feature +++ b/oracle-plugin/src/e2e-test/features/source/OracleRunTimeMacro.feature @@ -305,3 +305,64 @@ Feature: Oracle - Verify Oracle plugin data transfer with macro arguments Then Verify the pipeline status is "Succeeded" Then Close the pipeline logs Then Validate the values of records transferred to target Big Query table is equal to the values from source table + + @ORACLE_SOURCE_TEST @ORACLE_TARGET_TEST @Oracle_Required + Scenario: To verify data is getting transferred from Oracle to Oracle successfully when connection arguments,Isolation level,bounding query are macro enabled + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "Oracle" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "Oracle" from the plugins list as: "Sink" + Then Connect plugins: "Oracle" and "Oracle2" to establish connection + Then Navigate to the properties page of plugin: "Oracle" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Replace input plugin property: "host" with value: "host" for Credentials and Authorization related fields + Then Replace input plugin property: "port" with value: "port" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Select radio button plugin property: "connectionType" with value: "service" + Then Select radio button plugin property: "role" with value: "normal" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Click on the Macro button of Property: "connectionArguments" and set the value to: "connArgumentsSource" + Then Click on the Macro button of Property: "transactionIsolationLevel" and set the value to: "defaultTransactionIsolationLevel" + Then Replace input plugin property: "database" with value: "databaseName" + Then Click on the Macro button of Property: "boundingQuery" and set the value in textarea: "oracleBoundingQuery" + Then Validate "Oracle" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "Oracle2" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Replace input plugin property: "host" with value: "host" for Credentials and Authorization related fields + Then Replace input plugin property: "port" with value: "port" for Credentials and Authorization related fields + Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "dbSchemaName" with value: "schema" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Select radio button plugin property: "connectionType" with value: "service" + Then Select radio button plugin property: "role" with value: "normal" + Then Validate "Oracle2" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "connectionArguments" for key "connArgumentsSource" + Then Enter runtime argument value "boundingQuery" for key "oracleBoundingQuery" + Then Enter runtime argument value "transactionIsolationLevel" for key "defaultTransactionIsolationLevel" + Then Run the preview of pipeline with runtime arguments + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "connectionArguments" for key "connArgumentsSource" + Then Enter runtime argument value "boundingQuery" for key "oracleBoundingQuery" + Then Enter runtime argument value "transactionIsolationLevel" for key "defaultTransactionIsolationLevel" + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate the values of records transferred to target table is equal to the values from source table diff --git a/oracle-plugin/src/e2e-test/java/io.cdap.plugin/common.stepsdesign/TestSetupHooks.java b/oracle-plugin/src/e2e-test/java/io.cdap.plugin/common.stepsdesign/TestSetupHooks.java index 757b9c9f7..e36a71b0d 100644 --- a/oracle-plugin/src/e2e-test/java/io.cdap.plugin/common.stepsdesign/TestSetupHooks.java +++ b/oracle-plugin/src/e2e-test/java/io.cdap.plugin/common.stepsdesign/TestSetupHooks.java @@ -17,6 +17,8 @@ package io.cdap.plugin.common.stepsdesign; import com.google.cloud.bigquery.BigQueryException; +import io.cdap.e2e.pages.actions.CdfConnectionActions; +import io.cdap.e2e.pages.actions.CdfPluginPropertiesActions; import io.cdap.e2e.utils.BigQueryClient; import io.cdap.e2e.utils.PluginPropertyUtils; import io.cdap.plugin.OracleClient; @@ -48,8 +50,10 @@ public static void setTableName() { PluginPropertyUtils.addPluginProp("sourceTable", sourceTableName); PluginPropertyUtils.addPluginProp("targetTable", targetTableName); String schema = PluginPropertyUtils.pluginProp("schema"); - PluginPropertyUtils.addPluginProp("selectQuery", String.format("select * from %s.%s", schema, - sourceTableName)); + PluginPropertyUtils.addPluginProp("selectQuery", String.format("select * from %s.%s " + + "WHERE $CONDITIONS", schema, sourceTableName)); + PluginPropertyUtils.addPluginProp("boundingQuery", String.format("select MIN(ID),MAX(ID)" + + " from %s.%s", schema, sourceTableName)); } @Before(order = 2, value = "@ORACLE_SOURCE_TEST") @@ -416,4 +420,25 @@ public static void dropOracleTargetDateTable() throws SQLException, ClassNotFoun BeforeActions.scenario.write("Oracle Target Table - " + PluginPropertyUtils.pluginProp("targetTable") + " deleted successfully"); } + + @Before(order = 1, value = "@CONNECTION") + public static void setNewConnectionName() { + String connectionName = "Oracle" + RandomStringUtils.randomAlphanumeric(10); + PluginPropertyUtils.addPluginProp("connection.name", connectionName); + BeforeActions.scenario.write("New Connection name: " + connectionName); + } + + private static void deleteConnection(String connectionType, String connectionName) throws IOException { + CdfConnectionActions.openWranglerConnectionsPage(); + CdfConnectionActions.expandConnections(connectionType); + CdfConnectionActions.openConnectionActionMenu(connectionType, connectionName); + CdfConnectionActions.selectConnectionAction(connectionType, connectionName, "Delete"); + CdfPluginPropertiesActions.clickPluginPropertyButton("Delete"); + } + + @After(order = 1, value = "@CONNECTION") + public static void deleteBQConnection() throws IOException { + deleteConnection("Oracle", "connection.name"); + PluginPropertyUtils.removePluginProp("connection.name"); + } } diff --git a/oracle-plugin/src/e2e-test/resources/errorMessage.properties b/oracle-plugin/src/e2e-test/resources/errorMessage.properties index b981faf81..b61f963c2 100644 --- a/oracle-plugin/src/e2e-test/resources/errorMessage.properties +++ b/oracle-plugin/src/e2e-test/resources/errorMessage.properties @@ -17,3 +17,6 @@ errorMessageInvalidSinkDatabase=Exception while trying to validate schema of dat errorMessageInvalidHost=Exception while trying to validate schema of database table '"table"' for connection errorLogsMessageInvalidBoundingQuery=Spark program 'phase-1' failed with error: ORA-00936: missing expression . \ Please check the system logs for more details. +errorMessageBlankPassword=SQL error while getting query schema: ORA-01005: null password given; logon denied +errorMessageBlankHost=SQL error while getting query schema: IO Error: +blank.database.message=Required property 'database' has no value. diff --git a/oracle-plugin/src/e2e-test/resources/pluginParameters.properties b/oracle-plugin/src/e2e-test/resources/pluginParameters.properties index c71e82b53..e3f81f1bc 100644 --- a/oracle-plugin/src/e2e-test/resources/pluginParameters.properties +++ b/oracle-plugin/src/e2e-test/resources/pluginParameters.properties @@ -7,6 +7,7 @@ host=ORACLE_HOST port=ORACLE_PORT username=ORACLE_USERNAME password=ORACLE_PASSWORD +connection.name=dummy outputSchema=[{"key":"ID","value":"decimal"},{"key":"LASTNAME","value":"string"}] datatypeColumns=(ID VARCHAR2(100) PRIMARY KEY, COL1 CHAR, COL2 CHAR(10), COL3 VARCHAR(3), COL4 VARCHAR2(3), \ COL5 NCHAR, COL6 NCHAR(12), COL7 NVARCHAR2(12), COL8 CLOB, COL9 NCLOB, COL10 LONG, COL11 ROWID, COL12 NUMBER(4), \ @@ -14,16 +15,16 @@ datatypeColumns=(ID VARCHAR2(100) PRIMARY KEY, COL1 CHAR, COL2 CHAR(10), COL3 VA COL19 DECIMAL(*,2), COL20 DECIMAL(10,-3), COL21 DECIMAL, COL22 FLOAT, COL23 FLOAT(4), COL24 INTEGER, \ COL25 DOUBLE PRECISION, COL26 REAL, COL27 SMALLINT, COL28 TIMESTAMP, COL29 TIMESTAMP(9), \ COL30 TIMESTAMP WITH TIME ZONE, COL31 INTERVAL DAY(6) TO SECOND (5), COL32 INTERVAL YEAR(4) TO MONTH, COL33 DATE, \ - COL34 BINARY_FLOAT, COL35 BINARY_DOUBLE) + COL34 BINARY_FLOAT, COL35 BINARY_DOUBLE, COL36 UROWID) datatypeColumnsList=(ID, COL1, COL2, COL3, COL4,COL5,COL6,COL7,COL8,COL9,COL10,COL11,COL12,COL13,COL14,COL15,COL16,\ - COL17,COL18,COL19,COL20,COL21,COL22,COL23,COL24,COL25,COL26,COL27,COL28,COL29,COL30,COL31,COL32,COL33,COL34,COL35) + COL17,COL18,COL19,COL20,COL21,COL22,COL23,COL24,COL25,COL26,COL27,COL28,COL29,COL30,COL31,COL32,COL33,COL34,COL35,COL36) datatypeValues=VALUES ('USER1', 'M','ABCDEF','ABC','ABC','ä','ä½ å¥½ï¼?è¿?','ä½ å¥½ï¼?è¿?',\ 'This is a sample long data.\n','è¿?æ?¯ä¸?个é\u009D?常','48656C6C6F','AAAAaoAATAAABrXAAA',1234,1234.56789,\ 1234.56789,1234.56789,1234.56789,1234.56789,1234.56789,1234.56789,1234.56789,1234.56789,1234.5679,1234.5679,\ 1234.56789,1234.5679,1234.5679,1234.56789,TIMESTAMP'2023-01-01 2:00:00',TIMESTAMP'2023-01-01 2:00:00',\ TIMESTAMP'2023-01-01 2:00:00 -08:00',TIMESTAMP '2001-09-03 12:47:00.000000'- TIMESTAMP '2001-09-03 13:13:00.000000',\ INTERVAL '5-2' YEAR TO MONTH,TIMESTAMP '2023-01-01 00:00:00.000000',339999992740149460000,\ - 34000000000000000000000000000000000000000) + 34000000000000000000000000000000000000000,'AAAHJYAAEAAAADyAAA') outputDatatypesSchema=[{"key":"ID","value":"string"},{"key":"COL1","value":"string"},{"key":"COL2","value":"string"},\ {"key":"COL3","value":"string"},{"key":"COL4","value":"string"},{"key":"COL5","value":"string"},\ {"key":"COL6","value":"string"},{"key":"COL7","value":"string"},{"key":"COL8","value":"string"},\ @@ -35,7 +36,8 @@ outputDatatypesSchema=[{"key":"ID","value":"string"},{"key":"COL1","value":"stri {"key":"COL24","value":"decimal"},{"key":"COL25","value":"double"},{"key":"COL26","value":"double"},\ {"key":"COL27","value":"decimal"},{"key":"COL28","value":"datetime"},{"key":"COL29","value":"datetime"},\ {"key":"COL30","value":"timestamp"},{"key":"COL31","value":"string"},{"key":"COL32","value":"string"},\ - {"key":"COL33","value":"datetime"},{"key":"COL34","value":"float"},{"key":"COL35","value":"double"}] + {"key":"COL33","value":"datetime"},{"key":"COL34","value":"float"},{"key":"COL35","value":"double"},\ + {"key":"COL36","value":"string"}] longColumns=(ID VARCHAR2(100) PRIMARY KEY, COL1 LONG, COL2 RAW(2), COL3 BLOB, COL4 CLOB, COL5 NCLOB, COL6 BFILE) longColumnsList=(ID,COL1,COL2,COL3,COL4,COL5,COL6) @@ -93,6 +95,8 @@ numberOfSplits=2 zeroValue=0 splitByColumn=ID importQuery=where $CONDITIONS +connectionArguments=queryTimeout=50 +transactionIsolationLevel=TRANSACTION_READ_COMMITTED #bq properties projectId=cdf-athena