From def8ed24545ceb531fd7cd53d20b82846ed95af8 Mon Sep 17 00:00:00 2001 From: isha kaushik Date: Thu, 4 May 2023 12:02:09 +0530 Subject: [PATCH 1/7] e2e Tests for cloudSQL-MySQL --- .../sink/CloudMySqlDesignTime.feature | 36 ++ .../CloudMySqlDesignTimeValidation.feature | 114 ++++++ .../CloudMySqlDesignTimeWithMacro.feature | 35 ++ .../features/sink/CloudMySqlRunTime.feature | 92 +++++ .../sink/CloudMySqlRunTimeMacro.feature | 62 ++++ .../source/CloudMySqlDesignTime.feature | 56 +++ .../CloudMySqlDesignTimeVaidation.feature | 167 +++++++++ .../CloudMySqlDesignTimeWithMacro.feature | 38 ++ .../features/source/CloudMySqlRunTime.feature | 326 ++++++++++++++++++ .../source/CloudMySqlRunTimeMacro.feature | 150 ++++++++ .../resources/errorMessage.properties | 17 + .../resources/pluginParameters.properties | 32 ++ 12 files changed, 1125 insertions(+) create mode 100644 cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTime.feature create mode 100644 cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTimeValidation.feature create mode 100644 cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTimeWithMacro.feature create mode 100644 cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTime.feature create mode 100644 cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTimeMacro.feature create mode 100644 cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTime.feature create mode 100644 cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTimeVaidation.feature create mode 100644 cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTimeWithMacro.feature create mode 100644 cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTime.feature create mode 100644 cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTimeMacro.feature create mode 100644 cloudsql-mysql-plugin/src/e2e-test/resources/errorMessage.properties create mode 100644 cloudsql-mysql-plugin/src/e2e-test/resources/pluginParameters.properties diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTime.feature b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTime.feature new file mode 100644 index 000000000..1e585573c --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTime.feature @@ -0,0 +1,36 @@ +@CloudMySql +Feature: CloudMySql sink- Verify CloudMySql sink plugin design time scenarios + + Scenario: To verify CloudMySql sink plugin validation with mandatory properties + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "TestDatabase" + Then Enter input plugin property: "tableName" with value: "mytable" + Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + + Scenario: To verify CloudMySql sink plugin validation with connection and basic details for connectivity + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList" + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "TestDatabase" + Then Enter input plugin property: "tableName" with value: "mytable" + Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + + + diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTimeValidation.feature b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTimeValidation.feature new file mode 100644 index 000000000..8939b36fa --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTimeValidation.feature @@ -0,0 +1,114 @@ +Feature: CloudMySql sink- Verify ,Mysql sink plugin design time validation scenarios + + Scenario: To verify CloudMySql sink plugin validation error message with invalid database + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL MySQL" and "CloudSQL MySQL2" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "TestDatabase" + Then Enter textarea plugin property: "importQuery" with value: "insertQuery" +# Then Click on the Get Schema button +# Then Verify the Output Schema matches the Expected Schema: "outputSchema" +# Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL MySQL2" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "invalidDatabaseName" + Then Enter input plugin property: "tableName" with value: "mytable" + Then Click on the Validate button +# Then Verify that the Plugin is displaying an error message: "errorMessageInvalidSinkDatabase" on the header + + Scenario: To verify CloudMySql sink plugin validation error message with invalid tablename + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL MySQL" and "CloudSQL MySQL2" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "TestDatabase" + Then Enter textarea plugin property: "importQuery" with value: "insertQuery" +# Then Click on the Get Schema button +# Then Verify the Output Schema matches the Expected Schema: "outputSchema" +# Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL MySQL2" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "invalidRefName" + Then Enter input plugin property: "database" with value: "TestDatabase" + Then Enter input plugin property: "tableName" with value: "Invalidtable" + Then Click on the Validate button +# Then Verify that the Plugin Property: "table" is displaying an in-line error message: "errorMessageInvalidTableName" + + Scenario: To verify CloudMySql sink plugin validation error message with invalid reference Name + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "invalidRef" + Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList" + Then Enter input plugin property: "database" with value: "TestDatabase" + Then Enter input plugin property: "tableName" with value: "mytable" + Then Click on the Validate button + Then Verify that the Plugin Property: "referenceName" is displaying an in-line error message: "errorMessageCloudMySqlInvalidReferenceName" + + Scenario: To verify CloudMySql sink plugin validation error message with blank username + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList" + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "TestDatabase" + Then Enter input plugin property: "tableName" with value: "mytable" + Then Click on the Validate button + Then Verify that the Plugin Property: "user" is displaying an in-line error message: "errorMessageBlankUsername" + + Scenario: To verify CloudMySql sink plugin validation error message with ConnectionName + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList" + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "TestDatabase" + Then Enter input plugin property: "tableName" with value: "mytable" + Then Click on the Validate button + Then Verify that the Plugin Property: "connectionName" is displaying an in-line error message: "errorMessageConnectionName" diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTimeWithMacro.feature b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTimeWithMacro.feature new file mode 100644 index 000000000..752540965 --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTimeWithMacro.feature @@ -0,0 +1,35 @@ +Feature: CloudMySql sink- Verify CloudMySql sink plugin design time macro scenarios + + Scenario: To verify CloudMySql sink plugin validation with macro enabled fields for connection section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudsql-mysql" + Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Click on the Macro button of Property: "user" and set the value to: "username" + Then Click on the Macro button of Property: "password" and set the value to: "password" + Then Click on the Macro button of Property: "connectionArguments" and set the value to: "connectionArgumentsList" + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "TestDatabase" + Then Click on the Validate button +# Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + + Scenario: To verify cloudsql sink plugin validation with macro enabled fields for basic section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "TestDatabase" + Then Click on the Macro button of Property: "tableName" and set the value to: "mytable" + Then Click on the Validate button +# Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page \ No newline at end of file diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTime.feature b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTime.feature new file mode 100644 index 000000000..d4156e0fc --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTime.feature @@ -0,0 +1,92 @@ +Feature: CloudMySql - Verify data transfer from BigQuery source to CloudMySql sink + + Scenario: To verify data is getting transferred from BigQuery source to CloudMySql sink successfully + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "CloudSQL MySQL" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Override Service account details if set in environment variables + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" +# Then Click on the Get Schema button +# Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "TestDatabase" + Then Enter input plugin property: "tableName" with value: "mytable" +# Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs +# Then Verify the preview run status of pipeline in the logs is "succeeded" +# Then Close the pipeline logs +# Then Close the preview +# Then Deploy the pipeline +# Then Run the Pipeline in Runtime +# Then Wait till pipeline is in running state +# Then Open and capture logs +# Then Verify the pipeline status is "Succeeded" +# Then Close the pipeline logs +# Then Validate OUT record count is equal to records transferred to target BigQuery table + + Scenario: To verify data is getting transferred from BigQuery source to CloudMySql sink successfully when connection arguments are set + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "CloudSQL MySQL" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Override Service account details if set in environment variables + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" +# Then Click on the Get Schema button +# Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "TestDatabase" + Then Enter input plugin property: "tableName" with value: "mytable" + Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList" +# Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state +# Then Open and capture pipeline preview logs +# Then Verify the preview run status of pipeline in the logs is "succeeded" +# Then Close the pipeline logs +# Then Close the preview +# Then Deploy the pipeline +# Then Run the Pipeline in Runtime +# Then Wait till pipeline is in running state +# Then Open and capture logs +# Then Verify the pipeline status is "Succeeded" +# Then Close the pipeline logs +# Then Validate OUT record count is equal to records transferred to target BigQuery table + + + + diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTimeMacro.feature b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTimeMacro.feature new file mode 100644 index 000000000..4b49bb0e0 --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTimeMacro.feature @@ -0,0 +1,62 @@ +Feature: CloudMySql - Verify data transfer to CloudMySql sink with macro arguments + + Scenario: To verify data is getting transferred from BigQuery source to CloudMySql sink using macro arguments + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "CloudSQL MySQL" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Click on the Macro button of Property: "project" and set the value to: "projectId" + Then Click on the Macro button of Property: "datasetProject" and set the value to: "bqDatasetProjectId" + Then Click on the Macro button of Property: "dataset" and set the value to: "bqDataset" + Then Click on the Macro button of Property: "table" and set the value to: "bqTargetTable" +# Then Click on the Get Schema button +# Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Click on the Macro button of Property: "user" and set the value to: "username" + Then Click on the Macro button of Property: "password" and set the value to: "password" + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "TestDatabase" + Then Click on the Macro button of Property: "tableName" and set the value to: "mytable" +# Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "projectId" for key "projectId" + Then Enter runtime argument value "bqDatasetId" for key "bqDatasetProjectId" + Then Enter runtime argument value "dataset" for key "bqDataset" + Then Enter runtime argument value "bqSourceTable" for key "bqTargetTable" + Then Enter runtime argument value "driver" for key "cloudsql-mysql" + Then Enter runtime argument value from environment variable "name" for key "username" + Then Enter runtime argument value from environment variable "pass" for key "password" + Then Enter runtime argument value "table" for key "mytable" + Then Run the preview of pipeline with runtime arguments + Then Wait till pipeline preview is in running state +# Then Open and capture pipeline preview logs +# Then Verify the preview run status of pipeline in the logs is "succeeded" +# Then Close the pipeline logs +# Then Close the preview +# Then Deploy the pipeline +# Then Run the Pipeline in Runtime +# Then Enter runtime argument value "projectId" for key "projectId" +# Then Enter runtime argument value "bqDatasetId" for key "bqDatasetProjectId" +# Then Enter runtime argument value "dataset" for key "bqDataset" +# Then Enter runtime argument value "bqSourceTable" for key "bqTargetTable" +# Then Enter runtime argument value "driver" for key "cloudsql-mysql" +# Then Enter runtime argument value from environment variable "name" for key "username" +# Then Enter runtime argument value from environment variable "pass" for key "password" +# Then Click on the Macro button of Property: "dataset" and set the value to: "bqDataset" +# Then Enter runtime argument value "table" for key "mytable" +# Then Run the Pipeline in Runtime with runtime arguments +# Then Wait till pipeline is in running state +# Then Open and capture logs +# Then Verify the pipeline status is "Succeeded" +# Then Close the pipeline logs + diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTime.feature b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTime.feature new file mode 100644 index 000000000..01ccc1feb --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTime.feature @@ -0,0 +1,56 @@ +@CloudMySql +Feature: CloudMySql source- Verify CloudMySql source plugin design time scenarios + + Scenario: To verify CloudMySql source plugin validation with mandatory properties + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Click on the Macro button of Property: "user" and set the value to: "username" + Then Click on the Macro button of Property: "password" and set the value to: "password" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "TestDatabase" + Then Enter textarea plugin property: "importQuery" with value: "insertQuery" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "outputSchema" + Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + + Scenario: To verify CloudMySql source plugin validation with connection and basic details for connectivity + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Enter input plugin property: "connectionName" with value: "connection" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList" + Then Enter input plugin property: "referenceName" with value: "refName" + Then Enter input plugin property: "database" with value: "TestDatabase" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "outputSchema" + Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + + Scenario: To verify CloudMySql source plugin validation setting up connection arguments + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Enter input plugin property: "connectionName" with value: "connection" + Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList" + Then Enter input plugin property: "referenceName" with value: "referencename" + Then Enter input plugin property: "database" with value: "TestDatabase" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "outputSchema" + Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page \ No newline at end of file diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTimeVaidation.feature b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTimeVaidation.feature new file mode 100644 index 000000000..0be92bf7d --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTimeVaidation.feature @@ -0,0 +1,167 @@ +Feature: CloudMySql source- Verify ,Mysql source plugin design time validation scenarios + + Scenario: To verify CloudMySql source plugin validation error message with invalid database + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Replace input plugin property: "database" with value: "invalidDatabase" + Then Enter textarea plugin property: "importQuery" with value: "insertQuery" + Then Click on the Validate button + Then Verify that the Plugin is displaying an error message: "errorMessageInvalidSourceDatabase" on the header + + Scenario: To verify CloudMySql source plugin validation error message with invalid import query + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Enter input plugin property: "connectionName" with value: "project:region:instance" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "invalidImportQuery" + Then Replace input plugin property: "numSplits" with value: "numberOfSplits" + Then Click on the Validate button + Then Verify that the Plugin Property: "importQuery" is displaying an in-line error message: "errorMessageInvalidImportQuery" + + Scenario: To verify CloudMySql source plugin validation error message with invalid reference name + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Enter input plugin property: "connectionName" with value: "project:region:instance" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "invalidRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Validate button + Then Verify that the Plugin Property: "referenceName" is displaying an in-line error message: "errorMessageCloudMySqlInvalidReferenceName" + + Scenario: To verify CloudMySql source plugin validation error message with blank username + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Enter input plugin property: "connectionName" with value: "project:region:instance" + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Validate button + Then Verify that the Plugin Property: "user" is displaying an in-line error message: "errorMessageBlankUsername" + + Scenario: To verify CloudMySql source plugin validation error message with blank password + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Enter input plugin property: "connectionName" with value: "project:region:instance" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Validate button + Then Verify that the Plugin is displaying an error message: "errorMessageBlankPassword" on the header + + Scenario: To verify CloudMySql source plugin validation error message when fetch size is changed to zero + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Enter input plugin property: "connectionName" with value: "project:region:instance" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Replace input plugin property: "fetchSize" with value: "zeroValue" + Then Click on the Validate button + Then Verify that the Plugin Property: "fetchSize" is displaying an in-line error message: "errorMessageInvalidFetchSize" + + Scenario: To verify CloudMySql source plugin validation error message with number of splits without split by field name + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Enter input plugin property: "connectionName" with value: "project:region:instance" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Replace input plugin property: "numSplits" with value: "numberOfSplits" + Then Click on the Validate button + Then Verify that the Plugin Property: "numSplits" is displaying an in-line error message: "errorMessageBlankSplitBy" + Then Verify that the Plugin Property: "splitBy" is displaying an in-line error message: "errorMessageBlankSplitBy" + + Scenario: To verify CloudMySql source plugin validation error message when number of Split value is changed to zero + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Enter input plugin property: "connectionName" with value: "project:region:instance" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Replace input plugin property: "numSplits" with value: "zeroValue" + Then Click on the Validate button + Then Verify that the Plugin Property: "numSplits" is displaying an in-line error message: "errorMessageInvalidNumberOfSplits" + + Scenario: To verify CloudMySql source plugin validation error message when number of Split value is not a number + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Enter input plugin property: "connectionName" with value: "project:region:instance" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Replace input plugin property: "numSplits" with value: "zeroSplits" + Then Click on the Validate button + Then Verify that the Plugin Property: "numSplits" is displaying an in-line error message: "errorMessageNumberOfSplitNotNumber" + + Scenario: To verify CloudMySql source plugin validation error message with blank bounding query + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Enter input plugin property: "connectionName" with value: "project:region:instance" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "invalidDatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "invalidImportQuery" + Then Click on the Validate button + Then Verify that the Plugin Property: "boundingQuery" is displaying an in-line error message: "errorMessageBoundingQuery" + Then Verify that the Plugin Property: "numSplits" is displaying an in-line error message: "errorMessageNumberOfSplits" + Then Verify that the Plugin Property: "importQuery" is displaying an in-line error message: "errorMessageInvalidImportQuery" diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTimeWithMacro.feature b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTimeWithMacro.feature new file mode 100644 index 000000000..5721be421 --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTimeWithMacro.feature @@ -0,0 +1,38 @@ +Feature: CloudMySql source- Verify CloudMySql source plugin design time macro scenarios + + Scenario: To verify CloudMySql source plugin validation with macro enabled fields for connection section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudsql-mysql" + Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Click on the Macro button of Property: "user" and set the value to: "username" + Then Click on the Macro button of Property: "password" and set the value to: "password" + Then Click on the Macro button of Property: "connectionArguments" and set the value to: "connectionArgumentsList" + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "TestDatabase" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Validate button +# Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + + Scenario: To verify cloudsql source plugin validation with macro enabled fields for basic section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "TestDatabase" + Then Click on the Macro button of Property: "importQuery" and set the value in textarea: "CloudMySqlImportQuery" +# Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + + + diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTime.feature b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTime.feature new file mode 100644 index 000000000..c3ddf705f --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTime.feature @@ -0,0 +1,326 @@ +Feature: CloudMySql - Verify data transfer from CloudMySql source to BigQuery sink + + Scenario: To verify data is getting transferred from CloudMySql source to BigQuery sink successfully + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL MySQL" and "BigQuery" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "TestDatabase" + Then Enter textarea plugin property: "importQuery" with value: "insertQuery" +# Then Click on the Get Schema button +# Then Verify the Output Schema matches the Expected Schema: "outputSchema" +# Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Override Service account details if set in environment variables + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Click plugin property: "truncateTable" + Then Click plugin property: "allowSchemaRelaxation" +# Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs +# Then Verify the preview run status of pipeline in the logs is "succeeded" +# Then Close the pipeline logs +# Then Close the preview +# Then Deploy the pipeline +# Then Run the Pipeline in Runtime +# Then Wait till pipeline is in running state +# Then Open and capture logs +# Then Verify the pipeline status is "Succeeded" +# Then Close the pipeline logs +# Then Validate OUT record count is equal to records transferred to target BigQuery table + + Scenario: To verify data is getting transferred from CloudMySql source to BigQuery sink successfully using long datatypes + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL MySQL" and "BigQuery" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "TestDatabase" + Then Enter textarea plugin property: "importQuery" with value: "insertQuery" +# Then Click on the Get Schema button +# Then Verify the Output Schema matches the Expected Schema: "outputDatatypesSchema2" +# Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Click plugin property: "truncateTable" + Then Click plugin property: "allowSchemaRelaxation" +# Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs +# Then Verify the preview run status of pipeline in the logs is "succeeded" +# Then Close the pipeline logs +# Then Close the preview +# Then Deploy the pipeline +# Then Run the Pipeline in Runtime +# Then Wait till pipeline is in running state +# Then Open and capture logs +# Then Verify the pipeline status is "Succeeded" +# Then Close the pipeline logs +# Then Validate OUT record count is equal to records transferred to target BigQuery table + + Scenario: To verify data is getting transferred from CloudMySql source to BigQuery sink successfully using long raw datatypes + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL MySQL" and "BigQuery" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" +# Then Click on the Get Schema button +# Then Verify the Output Schema matches the Expected Schema: "outputDatatypesSchema3" +# Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Click plugin property: "truncateTable" + Then Click plugin property: "allowSchemaRelaxation" +# Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs +# Then Verify the preview run status of pipeline in the logs is "succeeded" +# Then Close the pipeline logs +# Then Close the preview +# Then Deploy the pipeline +# Then Run the Pipeline in Runtime +# Then Wait till pipeline is in running state +# Then Open and capture logs +# Then Verify the pipeline status is "Succeeded" +# Then Close the pipeline logs +# Then Validate OUT record count is equal to records transferred to target BigQuery table + + Scenario: To verify data is getting transferred from CloudMySql source to BigQuery sink successfully using long Varchar datatypes + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL MySQL" and "BigQuery" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" +# Then Click on the Get Schema button +# Then Verify the Output Schema matches the Expected Schema: "outputDatatypesSchema4" +# Then Validate "Oracle" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Click plugin property: "truncateTable" + Then Click plugin property: "allowSchemaRelaxation" +# Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline +# Then Wait till pipeline preview is in running state +# Then Open and capture pipeline preview logs +# Then Verify the preview run status of pipeline in the logs is "succeeded" +# Then Close the pipeline logs +# Then Close the preview +# Then Deploy the pipeline +# Then Run the Pipeline in Runtime +# Then Wait till pipeline is in running state +# Then Open and capture logs +# Then Verify the pipeline status is "Succeeded" +# Then Close the pipeline logs +# Then Validate OUT record count is equal to records transferred to target BigQuery table + + Scenario: To verify data is getting transferred from CloudMySql source to BigQuery sink successfully when connection arguments are set + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL MySQL" and "BigQuery" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" +# Then Click on the Get Schema button +# Then Verify the Output Schema matches the Expected Schema: "outputDatatypesSchema4" +# Then Validate "Oracle" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Click plugin property: "truncateTable" + Then Click plugin property: "allowSchemaRelaxation" +# Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs +# Then Verify the preview run status of pipeline in the logs is "succeeded" +# Then Close the pipeline logs +# Then Close the preview +# Then Deploy the pipeline +# Then Run the Pipeline in Runtime +# Then Wait till pipeline is in running state +# Then Open and capture logs +# Then Verify the pipeline status is "Succeeded" +# Then Close the pipeline logs +# Then Validate OUT record count is equal to records transferred to target BigQuery table + + Scenario: To verify the pipeline fails while preview with invalid bounding query setting the required split-By field + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL MySQL" and "BigQuery" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "TestDatabase" + Then Enter textarea plugin property: "importQuery" with value: "insertQuery" +# Then Click on the Get Schema button + Then Replace input plugin property: "splitBy" with value: "splitBy" + Then Replace input plugin property: "numSplits" with value: "numberOfSplits" + Then Enter textarea plugin property: "boundingQuery" with value: "invalidBoundingQuery" +# Then Verify the Output Schema matches the Expected Schema: "outputSchema" +# Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Override Service account details if set in environment variables + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Click plugin property: "truncateTable" + Then Click plugin property: "allowSchemaRelaxation" +# Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs +# Then Verify the preview run status of pipeline in the logs is "succeeded" +# Then Close the pipeline logs +# Then Close the preview +# Then Deploy the pipeline +# Then Run the Pipeline in Runtime +# Then Wait till pipeline is in running state +# Then Open and capture logs +# Then Verify the pipeline status is "Succeeded" +# Then Close the pipeline logs +# Then Validate OUT record count is equal to records transferred to target BigQuery table + + Scenario: To verify data is getting transferred from CloudMySql source to BigQuery sink successfully using different datatypes + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL MySQL" and "BigQuery" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "TestDatabase" + Then Enter textarea plugin property: "importQuery" with value: "insertQuery" +# Then Click on the Get Schema button +# Then Verify the Output Schema matches the Expected Schema: "outputDatatypesSchema1" +# Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Override Service account details if set in environment variables + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Click plugin property: "truncateTable" + Then Click plugin property: "allowSchemaRelaxation" +# Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state +# Then Open and capture pipeline preview logs +# Then Verify the preview run status of pipeline in the logs is "succeeded" +# Then Close the pipeline logs +# Then Close the preview +# Then Deploy the pipeline +# Then Run the Pipeline in Runtime +# Then Wait till pipeline is in running state +# Then Open and capture logs +# Then Verify the pipeline status is "Succeeded" +# Then Close the pipeline logs +# Then Validate OUT record count is equal to records transferred to target BigQuery table + + + + diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTimeMacro.feature b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTimeMacro.feature new file mode 100644 index 000000000..88c09eb19 --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTimeMacro.feature @@ -0,0 +1,150 @@ +Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro arguments + + @ORACLE_SOURCE_TEST @ORACLE_SINK_TEST + Scenario: To verify data is getting transferred from CloudMySql to CloudMySql successfully using macro arguments in connection section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL MySQL" and "CloudSQL MySQL2" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Click on the Macro button of Property: "user" and set the value to: "username" + Then Click on the Macro button of Property: "password" and set the value to: "password" + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Replace input plugin property: "database" with value: "databaseName" + Then Click on the Macro button of Property: "importQuery" and set the value in textarea: "CloudMySqlImportQuery" +# Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL MySQL2" + Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Click on the Macro button of Property: "user" and set the value to: "username" + Then Click on the Macro button of Property: "password" and set the value to: "password" + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Replace input plugin property: "database" with value: "databaseName" + Then Enter input plugin property: "tableName" with value: "mytable" +# Then Validate "CloudSQL MySQL2" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "driver" for key "cloudsql-mysql" + Then Enter runtime argument value from environment variable "name" for key "username" + Then Enter runtime argument value from environment variable "pass" for key "password" + Then Run the preview of pipeline with runtime arguments + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs +# Then Verify the preview run status of pipeline in the logs is "succeeded" +# Then Close the pipeline logs +# Then Close the preview +# Then Deploy the pipeline +# Then Run the Pipeline in Runtime +# Then Enter runtime argument value "driver" for key "cloudsql-mysql" +# Then Enter runtime argument value from environment variable "name" for key "username" +# Then Enter runtime argument value from environment variable "pass" for key "password" +# Then Run the Pipeline in Runtime with runtime arguments +# Then Wait till pipeline is in running state +# Then Open and capture logs +# Then Verify the pipeline status is "Succeeded" +# Then Close the pipeline logs +# Then Validate the values of records transferred to target table is equal to the values from source table + + Scenario: To verify data is getting transferred from CloudMySql to CloudMySql successfully using macro arguments in basic section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL MySQL" and "CloudSQL MySQL2" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Replace input plugin property: "database" with value: "databaseName" + Then Click on the Macro button of Property: "importQuery" and set the value in textarea: "CloudMySqlImportQuery" +# Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL MySQL2" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Replace input plugin property: "database" with value: "databaseName" + Then Click on the Macro button of Property: "tableName" and set the value to: "mytable" +# Then Validate "CloudSQL MySQL2" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "insertQuery" for key "CloudMySqlImportQuery" + Then Enter runtime argument value "table" for key "mytable" + Then Run the preview of pipeline with runtime arguments + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs +# Then Verify the preview run status of pipeline in the logs is "succeeded" +# Then Close the pipeline logs +# Then Close the preview +# Then Deploy the pipeline +# Then Run the Pipeline in Runtime +# Then Enter runtime argument value "selectQuery" for key "CloudMySqlImportQuery" +# Then Enter runtime argument value "table" for key "mytable" +# Then Run the Pipeline in Runtime with runtime arguments +# Then Wait till pipeline is in running state +# Then Open and capture logs +# Then Verify the pipeline status is "Succeeded" +# Then Close the pipeline logs +# Then Validate the values of records transferred to target table is equal to the values from source table + + Scenario: To verify pipeline preview fails when invalid connection details provided using macro arguments + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL MySQL" and "CloudSQL MySQL2" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Click on the Macro button of Property: "select-jdbcPluginName" and set the value to: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Click on the Macro button of Property: "user" and set the value to: "username" + Then Click on the Macro button of Property: "password" and set the value to: "password" + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Replace input plugin property: "database" with value: "databaseName" + + Then Click on the Macro button of Property: "database" and set the value to: "oracleDatabase" + Then Select radio button plugin property: "connectionType" with value: "service" + Then Select radio button plugin property: "role" with value: "sysdba" + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Validate "Oracle" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "Oracle2" + Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "oracleDriverName" + Then Click on the Macro button of Property: "host" and set the value to: "oracleHost" + Then Click on the Macro button of Property: "port" and set the value to: "oraclePort" + Then Click on the Macro button of Property: "user" and set the value to: "oracleUsername" + Then Click on the Macro button of Property: "password" and set the value to: "oraclePassword" + Then Click on the Macro button of Property: "database" and set the value to: "oracleDatabase" + Then Enter input plugin property: "referenceName" with value: "targetRef" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "dbSchemaName" with value: "schema" + Then Select radio button plugin property: "connectionType" with value: "service" + Then Select radio button plugin property: "role" with value: "sysdba" + Then Validate "Oracle2" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "invalidDriverName" for key "oracleDriverName" + Then Enter runtime argument value "invalidHost" for key "oracleHost" + Then Enter runtime argument value "invalidPort" for key "oraclePort" + Then Enter runtime argument value "invalidUserName" for key "oracleUsername" + Then Enter runtime argument value "invalidPassword" for key "oraclePassword" + Then Enter runtime argument value "invalidDatabaseName" for key "oracleDatabase" + Then Run the preview of pipeline with runtime arguments + Then Verify the preview of pipeline is "Failed" \ No newline at end of file diff --git a/cloudsql-mysql-plugin/src/e2e-test/resources/errorMessage.properties b/cloudsql-mysql-plugin/src/e2e-test/resources/errorMessage.properties new file mode 100644 index 000000000..763b82d88 --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/resources/errorMessage.properties @@ -0,0 +1,17 @@ +errorMessageInvalidSourceDatabase=SQL error while getting query schema +errorMessageInvalidImportQuery=Import Query select must contain the string '$CONDITIONS'. if Number of Splits is not set\ + \ to 1. Include '$CONDITIONS' in the Import Query +errorMessageCloudMySqlInvalidReferenceName=Invalid reference name +errorMessageBlankUsername=Username is required when password is given. +errorMessageBlankPassword=SQL error while getting query schema +errorMessageInvalidFetchSize=Invalid fetch size. Fetch size must be a positive integer. +errorMessageBlankSplitBy=Split-By Field Name must be specified if Number of Splits is not set to 1. Specify the Split-by Field Name. +errorMessageInvalidNumberOfSplits=Invalid value for Number of Splits '0'. Must be at least 1. Specify a Number of Splits no less than 1. +errorMessageNumberOfSplitNotNumber=Unable to create config for batchsource +errorMessageNumberOfSplits=Split-By Field Name must be specified if Number of Splits is not set to 1. Specify the Split-by Field Name. +errorMessageBoundingQuery=Bounding Query must be specified if Number of Splits is not set to 1. Specify the Bounding Query. +errorMessageInvalidSinkDatabase=Exception while trying to validate schema of database table '"targetTable"' +errorMessageInvalidTableName=Exception while trying to validate schema of database table '"table"' for connection \ + 'jdbc:oracle:thin:@xe' with IO Error: Unknown host specified +errorMessageConnectionName=Connection Name must be in the format :: to connect to a public CloudSQL PostgreSQL instance. + diff --git a/cloudsql-mysql-plugin/src/e2e-test/resources/pluginParameters.properties b/cloudsql-mysql-plugin/src/e2e-test/resources/pluginParameters.properties new file mode 100644 index 000000000..b18f51de8 --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/resources/pluginParameters.properties @@ -0,0 +1,32 @@ +driverName=cloudsql-mysql +username=v +password=v@123 +connectionArgumentsList=[{"key":"name","value":"isha"}] +invalidImportQuery=select +numberOfSplits=2 +invalidRef=invalidRef&^*&&* +zeroValue=0 +zeroSplits=isha +insertQuery= select * from mytable +CloudMySqlImportQuery=select * from HR.SOURCETABLE_SANYNMSPDZ where $CONDITIONS +projectId=cdf-athena +BQReferenceName=reference +bqTargetTable=mytable +bqDatasetId=1234 +dataset=sql +bqSourceTable=mysql +driver=cloudsql-mysql +table=myTable +name=NAME +pass=PASS +outputDatatypesSchema2=[{"key":"ID","value":"string"},{"key":"COL1","value":"string"},{"key":"COL2","value":"bytes"},\ + {"key":"COL3","value":"bytes"},{"key":"COL4","value":"string"},{"key":"COL5","value":"string"},\ + {"key":"COL6","value":"bytes"}] +outputDatatypesSchema3=[{"key":"ID","value":"string"},{"key":"COL1","value":"bytes"}] +outputDatatypesSchema4=[{"key":"ID","value":"string"},{"key":"COL1","value":"string"}] +splitBy=column name +invalidBoundingQuery=SELECT MIN(id),MAX(id) FROM table +outputDatatypesSchema1=[{"key":"COL23","value":"double"},{"key":"COL28","value":"timestamp"},\ + {"key":"COL29","value":"timestamp"},{"key":"COL30","value":"string"},{"key":"COL31","value":"string"},\ + {"key":"COL32","value":"string"},{"key":"COL33","value":"timestamp"},{"key":"COL34","value":"float"},\ + {"key":"COL35","value":"double"}] \ No newline at end of file From 46b7c59ce44a9b710aa0c72d746a88980fd0f0d9 Mon Sep 17 00:00:00 2001 From: isha kaushik Date: Tue, 9 May 2023 11:51:13 +0530 Subject: [PATCH 2/7] macro validation --- .../source/CloudMySqlRunTimeMacro.feature | 153 ++++++++++++++---- .../plugin/CloudMySql/CloudMySqlClient.java | 12 ++ .../resources/pluginParameters.properties | 7 + 3 files changed, 141 insertions(+), 31 deletions(-) create mode 100644 cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/CloudMySqlClient.java diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTimeMacro.feature b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTimeMacro.feature index 88c09eb19..3f9368114 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTimeMacro.feature +++ b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTimeMacro.feature @@ -1,6 +1,5 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro arguments - @ORACLE_SOURCE_TEST @ORACLE_SINK_TEST Scenario: To verify data is getting transferred from CloudMySql to CloudMySql successfully using macro arguments in connection section Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" @@ -27,7 +26,7 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Click on the Macro button of Property: "password" and set the value to: "password" Then Enter input plugin property: "referenceName" with value: "RefName" Then Replace input plugin property: "database" with value: "databaseName" - Then Enter input plugin property: "tableName" with value: "mytable" + Then Enter input plugin property: "CloudMySqlImportQuery" with value: "mytable" # Then Validate "CloudSQL MySQL2" plugin properties Then Close the Plugin Properties page Then Save the pipeline @@ -35,9 +34,10 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Enter runtime argument value "driver" for key "cloudsql-mysql" Then Enter runtime argument value from environment variable "name" for key "username" Then Enter runtime argument value from environment variable "pass" for key "password" + Then Enter runtime argument value "CloudMySqlImportQuery" for key "CloudMySqlImportQuery" Then Run the preview of pipeline with runtime arguments - Then Wait till pipeline preview is in running state - Then Open and capture pipeline preview logs +# Then Wait till pipeline preview is in running state +# Then Open and capture pipeline preview logs # Then Verify the preview run status of pipeline in the logs is "succeeded" # Then Close the pipeline logs # Then Close the preview @@ -109,42 +109,133 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" Then Connect plugins: "CloudSQL MySQL" and "CloudSQL MySQL2" to establish connection Then Navigate to the properties page of plugin: "CloudSQL MySQL" - Then Click on the Macro button of Property: "select-jdbcPluginName" and set the value to: "cloudsql-mysql" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" Then Click on the Macro button of Property: "user" and set the value to: "username" Then Click on the Macro button of Property: "password" and set the value to: "password" Then Enter input plugin property: "referenceName" with value: "RefName" Then Replace input plugin property: "database" with value: "databaseName" + Then Click on the Macro button of Property: "importQuery" and set the value in textarea: "CloudMySqlImportQuery" +# Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL MySQL2" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Click on the Macro button of Property: "user" and set the value to: "username" + Then Click on the Macro button of Property: "password" and set the value to: "password" + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Click on the Macro button of Property: "tableName" and set the value to: "mytable" + Then Replace input plugin property: "database" with value: "databaseName" +# Then Validate "CloudSQL MySQL2" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "invalidTable" for key "mytable" + Then Enter runtime argument value "invalidUserName" for key "username" + Then Enter runtime argument value "invalidPassword" for key "password" + Then Enter runtime argument value "invalidImportQuery" for key "CloudMySqlImportQuery" + Then Run the preview of pipeline with runtime arguments + Then Verify the preview of pipeline is "Failed" - Then Click on the Macro button of Property: "database" and set the value to: "oracleDatabase" - Then Select radio button plugin property: "connectionType" with value: "service" - Then Select radio button plugin property: "role" with value: "sysdba" - Then Enter input plugin property: "referenceName" with value: "sourceRef" - Then Enter textarea plugin property: "importQuery" with value: "selectQuery" - Then Validate "Oracle" plugin properties + Scenario: To verify pipeline preview fails when invalid basic details provided using macro arguments + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL MySQL" and "CloudSQL MySQL2" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Replace input plugin property: "database" with value: "databaseName" + Then Click on the Macro button of Property: "importQuery" and set the value in textarea: "CloudMySqlImportQuery" +# Then Validate "CloudSQL MySQL" plugin properties Then Close the Plugin Properties page - Then Navigate to the properties page of plugin: "Oracle2" - Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "oracleDriverName" - Then Click on the Macro button of Property: "host" and set the value to: "oracleHost" - Then Click on the Macro button of Property: "port" and set the value to: "oraclePort" - Then Click on the Macro button of Property: "user" and set the value to: "oracleUsername" - Then Click on the Macro button of Property: "password" and set the value to: "oraclePassword" - Then Click on the Macro button of Property: "database" and set the value to: "oracleDatabase" - Then Enter input plugin property: "referenceName" with value: "targetRef" - Then Replace input plugin property: "tableName" with value: "targetTable" - Then Replace input plugin property: "dbSchemaName" with value: "schema" - Then Select radio button plugin property: "connectionType" with value: "service" - Then Select radio button plugin property: "role" with value: "sysdba" - Then Validate "Oracle2" plugin properties + Then Navigate to the properties page of plugin: "CloudSQL MySQL2" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Click on the Macro button of Property: "tableName" and set the value to: "mytable" + Then Replace input plugin property: "database" with value: "databaseName" +# Then Validate "CloudSQL MySQL2" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "invalidImportQuery" for key "CloudMySqlImportQuery" + Then Enter runtime argument value "invalidTable" for key "mytable" + Then Run the preview of pipeline with runtime arguments + Then Verify the preview of pipeline is "Failed" + + Scenario: To verify data is getting transferred from CloudMySql source to BigQuery sink using macro arguments + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL MySQL" and "BigQuery" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "CloudMySqlDriverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Click on the Macro button of Property: "user" and set the value to: "username" + Then Click on the Macro button of Property: "password" and set the value to: "password" + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Replace input plugin property: "database" with value: "databaseName" + Then Click on the Macro button of Property: "importQuery" and set the value in textarea: "CloudMySqlImportQuery" +# Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Click on the Macro button of Property: "project" and set the value to: "projectId" + Then Click on the Macro button of Property: "datasetProject" and set the value to: "bqDatasetId" + Then Click on the Macro button of Property: "dataset" and set the value to: "dataset" + Then Click on the Macro button of Property: "table" and set the value to: "bqSourceTable" + Then Click on the Macro button of Property: "truncateTable" and set the value to: "bqTruncateTable" + Then Click on the Macro button of Property: "allowSchemaRelaxation" and set the value to: "bqUpdateTableSchema" +# Then Validate "BigQuery" plugin properties Then Close the Plugin Properties page Then Save the pipeline Then Preview and run the pipeline - Then Enter runtime argument value "invalidDriverName" for key "oracleDriverName" - Then Enter runtime argument value "invalidHost" for key "oracleHost" - Then Enter runtime argument value "invalidPort" for key "oraclePort" - Then Enter runtime argument value "invalidUserName" for key "oracleUsername" - Then Enter runtime argument value "invalidPassword" for key "oraclePassword" - Then Enter runtime argument value "invalidDatabaseName" for key "oracleDatabase" + Then Enter runtime argument value "CloudMySqlDriverName" for key "CloudMySqlDriverName" + Then Enter runtime argument value from environment variable "name" for key "username" + Then Enter runtime argument value from environment variable "pass" for key "password" + Then Enter runtime argument value "CloudMySqlImportQuery" for key "CloudMySqlImportQuery" + Then Enter runtime argument value "projectId" for key "projectId" + Then Enter runtime argument value "projectId" for key "bqDatasetId" + Then Enter runtime argument value "dataset" for key "dataset" + Then Enter runtime argument value "bqSourceTable" for key "bqSourceTable" + Then Enter runtime argument value "bqTargetTable" for key "bqTruncateTable" + Then Enter runtime argument value "bqTargetTable" for key "bqUpdateTableSchema" Then Run the preview of pipeline with runtime arguments - Then Verify the preview of pipeline is "Failed" \ No newline at end of file + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs +# Then Verify the preview run status of pipeline in the logs is "succeeded" +# Then Close the pipeline logs +# Then Close the preview +# Then Deploy the pipeline +# Then Run the Pipeline in Runtime +# Then Enter runtime argument value "CloudMySqlDriverName" for key "CloudMySqlDriverName" +# Then Enter runtime argument value from environment variable "name" for key "username" +# Then Enter runtime argument value from environment variable "pass" for key "password" +# Then Enter runtime argument value "CloudMySqlImportQuery" for key "CloudMySqlImportQuery" +# Then Enter runtime argument value "projectId" for key "projectId" +# Then Enter runtime argument value "projectId" for key "bqDatasetId" +# Then Enter runtime argument value "dataset" for key "dataset" +# Then Enter runtime argument value "bqSourceTable" for key "bqSourceTable" +# Then Enter runtime argument value "bqTargetTable" for key "bqTruncateTable" +# Then Enter runtime argument value "bqTargetTable" for key "bqUpdateTableSchema" +# Then Run the Pipeline in Runtime with runtime arguments +# Then Wait till pipeline is in running state +# Then Open and capture logs +# Then Verify the pipeline status is "Succeeded" +# Then Close the pipeline logs +# Then Validate OUT record count is equal to records transferred to target BigQuery table diff --git a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/CloudMySqlClient.java b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/CloudMySqlClient.java new file mode 100644 index 000000000..a95ab449d --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/CloudMySqlClient.java @@ -0,0 +1,12 @@ +package io.cdap.plugin.CloudMySql; + +import io.cdap.e2e.utils.PluginPropertyUtils; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.SQLException; +import java.util.TimeZone; + +public class CloudMySqlClient { + } + diff --git a/cloudsql-mysql-plugin/src/e2e-test/resources/pluginParameters.properties b/cloudsql-mysql-plugin/src/e2e-test/resources/pluginParameters.properties index b18f51de8..9ea6189bb 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/resources/pluginParameters.properties +++ b/cloudsql-mysql-plugin/src/e2e-test/resources/pluginParameters.properties @@ -19,6 +19,13 @@ driver=cloudsql-mysql table=myTable name=NAME pass=PASS +invalidUserName=testUser +invalidPassword=testPassword +invalidTable=data +CloudMySqlDriverName=cloudsql-mysql +bqTruncateTable=truncateTable +bqUpdateTableSchema=updateSchema +invalidDatabaseName=invalidDB%$^%* outputDatatypesSchema2=[{"key":"ID","value":"string"},{"key":"COL1","value":"string"},{"key":"COL2","value":"bytes"},\ {"key":"COL3","value":"bytes"},{"key":"COL4","value":"string"},{"key":"COL5","value":"string"},\ {"key":"COL6","value":"bytes"}] From 0bd19aebae02f2a8348bf5a7668816ceb3fd97ab Mon Sep 17 00:00:00 2001 From: isha kaushik Date: Mon, 15 May 2023 11:28:04 +0530 Subject: [PATCH 3/7] e2e Tests for cloudSQL-MySQL --- .../sink/CloudMySqlDesignTime.feature | 4 +- .../CloudMySqlDesignTimeValidation.feature | 12 +- .../CloudMySqlDesignTimeWithMacro.feature | 4 +- .../features/sink/CloudMySqlRunTime.feature | 6 +- .../sink/CloudMySqlRunTimeMacro.feature | 2 +- .../source/CloudMySqlDesignTime.feature | 4 +- .../CloudMySqlDesignTimeVaidation.feature | 20 +- .../CloudMySqlDesignTimeWithMacro.feature | 4 +- .../features/source/CloudMySqlRunTime.feature | 15 +- .../source/CloudMySqlRunTimeMacro.feature | 18 +- .../plugin/CloudMySql/CloudMySqlClient.java | 12 -- .../plugin/CloudMySql/runners/TestRunner.java | 4 + .../runners/TestRunnerRequired.java | 4 + .../CloudMySql/stepsdesign/CloudMySql.java | 4 + .../java/io/cdap/plugin/CloudMySqlClient.java | 175 ++++++++++++++++++ .../common/stepsdesign/TestSetupHooks.java | 140 ++++++++++++++ .../BigQuery/BigQueryCreateTableQuery.txt | 1 + .../BigQuery/BigQueryInsertDataQuery.txt | 4 + .../resources/pluginParameters.properties | 1 + 19 files changed, 375 insertions(+), 59 deletions(-) delete mode 100644 cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/CloudMySqlClient.java create mode 100644 cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/runners/TestRunner.java create mode 100644 cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/runners/TestRunnerRequired.java create mode 100644 cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/stepsdesign/CloudMySql.java create mode 100644 cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySqlClient.java create mode 100644 cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java create mode 100644 cloudsql-mysql-plugin/src/e2e-test/resources/BigQuery/BigQueryCreateTableQuery.txt create mode 100644 cloudsql-mysql-plugin/src/e2e-test/resources/BigQuery/BigQueryInsertDataQuery.txt diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTime.feature b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTime.feature index 1e585573c..cb8042fab 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTime.feature +++ b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTime.feature @@ -8,7 +8,7 @@ Feature: CloudMySql sink- Verify CloudMySql sink plugin design time scenarios Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Enter input plugin property: "referenceName" with value: "RefName" Then Enter input plugin property: "database" with value: "TestDatabase" Then Enter input plugin property: "tableName" with value: "mytable" @@ -22,7 +22,7 @@ Feature: CloudMySql sink- Verify CloudMySql sink plugin design time scenarios Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList" diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTimeValidation.feature b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTimeValidation.feature index 8939b36fa..ea6f744e2 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTimeValidation.feature +++ b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTimeValidation.feature @@ -10,7 +10,7 @@ Feature: CloudMySql sink- Verify ,Mysql sink plugin design time validation scena Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" @@ -23,7 +23,7 @@ Feature: CloudMySql sink- Verify ,Mysql sink plugin design time validation scena Then Navigate to the properties page of plugin: "CloudSQL MySQL2" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" @@ -42,7 +42,7 @@ Feature: CloudMySql sink- Verify ,Mysql sink plugin design time validation scena Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" @@ -55,7 +55,7 @@ Feature: CloudMySql sink- Verify ,Mysql sink plugin design time validation scena Then Navigate to the properties page of plugin: "CloudSQL MySQL2" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "invalidRefName" Then Enter input plugin property: "database" with value: "TestDatabase" @@ -70,7 +70,7 @@ Feature: CloudMySql sink- Verify ,Mysql sink plugin design time validation scena Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "invalidRef" @@ -87,7 +87,7 @@ Feature: CloudMySql sink- Verify ,Mysql sink plugin design time validation scena Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList" Then Enter input plugin property: "referenceName" with value: "RefName" diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTimeWithMacro.feature b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTimeWithMacro.feature index 752540965..9ffda6fc4 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTimeWithMacro.feature +++ b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTimeWithMacro.feature @@ -6,7 +6,7 @@ Feature: CloudMySql sink- Verify CloudMySql sink plugin design time macro scenar When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudsql-mysql" - Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Select radio button plugin property: "instanceType" with value: "public" Then Click on the Macro button of Property: "user" and set the value to: "username" Then Click on the Macro button of Property: "password" and set the value to: "password" @@ -23,7 +23,7 @@ Feature: CloudMySql sink- Verify CloudMySql sink plugin design time macro scenar When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" - Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Select radio button plugin property: "instanceType" with value: "public" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTime.feature b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTime.feature index d4156e0fc..9eb62c0e3 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTime.feature +++ b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTime.feature @@ -10,7 +10,6 @@ Feature: CloudMySql - Verify data transfer from BigQuery source to CloudMySql si Then Navigate to the properties page of plugin: "BigQuery" Then Replace input plugin property: "project" with value: "projectId" Then Enter input plugin property: "datasetProject" with value: "projectId" - Then Override Service account details if set in environment variables Then Enter input plugin property: "referenceName" with value: "BQReferenceName" Then Enter input plugin property: "dataset" with value: "dataset" Then Enter input plugin property: "table" with value: "bqTargetTable" @@ -20,7 +19,7 @@ Feature: CloudMySql - Verify data transfer from BigQuery source to CloudMySql si Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" @@ -53,7 +52,6 @@ Feature: CloudMySql - Verify data transfer from BigQuery source to CloudMySql si Then Navigate to the properties page of plugin: "BigQuery" Then Replace input plugin property: "project" with value: "projectId" Then Enter input plugin property: "datasetProject" with value: "projectId" - Then Override Service account details if set in environment variables Then Enter input plugin property: "referenceName" with value: "BQReferenceName" Then Enter input plugin property: "dataset" with value: "dataset" Then Enter input plugin property: "table" with value: "bqTargetTable" @@ -63,7 +61,7 @@ Feature: CloudMySql - Verify data transfer from BigQuery source to CloudMySql si Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTimeMacro.feature b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTimeMacro.feature index 4b49bb0e0..4b610b86f 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTimeMacro.feature +++ b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTimeMacro.feature @@ -19,7 +19,7 @@ Feature: CloudMySql - Verify data transfer to CloudMySql sink with macro argumen Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Click on the Macro button of Property: "user" and set the value to: "username" Then Click on the Macro button of Property: "password" and set the value to: "password" Then Enter input plugin property: "referenceName" with value: "RefName" diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTime.feature b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTime.feature index 01ccc1feb..8ec8b0673 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTime.feature +++ b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTime.feature @@ -10,7 +10,7 @@ Feature: CloudMySql source- Verify CloudMySql source plugin design time scenario Then Click on the Macro button of Property: "user" and set the value to: "username" Then Click on the Macro button of Property: "password" and set the value to: "password" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Enter input plugin property: "referenceName" with value: "RefName" Then Enter input plugin property: "database" with value: "TestDatabase" Then Enter textarea plugin property: "importQuery" with value: "insertQuery" @@ -45,7 +45,7 @@ Feature: CloudMySql source- Verify CloudMySql source plugin design time scenario Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "connection" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList" Then Enter input plugin property: "referenceName" with value: "referencename" Then Enter input plugin property: "database" with value: "TestDatabase" diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTimeVaidation.feature b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTimeVaidation.feature index 0be92bf7d..c30de1516 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTimeVaidation.feature +++ b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTimeVaidation.feature @@ -7,7 +7,7 @@ Feature: CloudMySql source- Verify ,Mysql source plugin design time validation s Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" @@ -23,7 +23,7 @@ Feature: CloudMySql source- Verify ,Mysql source plugin design time validation s Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "project:region:instance" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "sourceRef" @@ -40,7 +40,7 @@ Feature: CloudMySql source- Verify ,Mysql source plugin design time validation s Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "project:region:instance" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "invalidRef" @@ -56,7 +56,7 @@ Feature: CloudMySql source- Verify ,Mysql source plugin design time validation s Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "project:region:instance" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "sourceRef" Then Replace input plugin property: "database" with value: "databaseName" @@ -71,7 +71,7 @@ Feature: CloudMySql source- Verify ,Mysql source plugin design time validation s Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "project:region:instance" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "sourceRef" Then Replace input plugin property: "database" with value: "databaseName" @@ -86,7 +86,7 @@ Feature: CloudMySql source- Verify ,Mysql source plugin design time validation s Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "project:region:instance" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "sourceRef" @@ -103,7 +103,7 @@ Feature: CloudMySql source- Verify ,Mysql source plugin design time validation s Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "project:region:instance" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "sourceRef" @@ -121,7 +121,7 @@ Feature: CloudMySql source- Verify ,Mysql source plugin design time validation s Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "project:region:instance" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "sourceRef" @@ -138,7 +138,7 @@ Feature: CloudMySql source- Verify ,Mysql source plugin design time validation s Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "project:region:instance" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "sourceRef" @@ -155,7 +155,7 @@ Feature: CloudMySql source- Verify ,Mysql source plugin design time validation s Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "project:region:instance" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "sourceRef" diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTimeWithMacro.feature b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTimeWithMacro.feature index 5721be421..056b788e4 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTimeWithMacro.feature +++ b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTimeWithMacro.feature @@ -6,7 +6,7 @@ Feature: CloudMySql source- Verify CloudMySql source plugin design time macro sc When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudsql-mysql" - Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Select radio button plugin property: "instanceType" with value: "public" Then Click on the Macro button of Property: "user" and set the value to: "username" Then Click on the Macro button of Property: "password" and set the value to: "password" @@ -24,7 +24,7 @@ Feature: CloudMySql source- Verify CloudMySql source plugin design time macro sc When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" - Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Select radio button plugin property: "instanceType" with value: "public" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTime.feature b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTime.feature index c3ddf705f..eebc2a52e 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTime.feature +++ b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTime.feature @@ -10,7 +10,7 @@ Feature: CloudMySql - Verify data transfer from CloudMySql source to BigQuery si Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" @@ -23,7 +23,6 @@ Feature: CloudMySql - Verify data transfer from CloudMySql source to BigQuery si Then Navigate to the properties page of plugin: "BigQuery" Then Replace input plugin property: "project" with value: "projectId" Then Enter input plugin property: "datasetProject" with value: "projectId" - Then Override Service account details if set in environment variables Then Enter input plugin property: "referenceName" with value: "BQReferenceName" Then Enter input plugin property: "dataset" with value: "dataset" Then Enter input plugin property: "table" with value: "bqTargetTable" @@ -56,7 +55,7 @@ Feature: CloudMySql - Verify data transfer from CloudMySql source to BigQuery si Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Select radio button plugin property: "instanceType" with value: "public" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields @@ -102,7 +101,7 @@ Feature: CloudMySql - Verify data transfer from CloudMySql source to BigQuery si Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "sourceRef" @@ -147,7 +146,7 @@ Feature: CloudMySql - Verify data transfer from CloudMySql source to BigQuery si Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "sourceRef" @@ -236,7 +235,7 @@ Feature: CloudMySql - Verify data transfer from CloudMySql source to BigQuery si Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" @@ -252,7 +251,6 @@ Feature: CloudMySql - Verify data transfer from CloudMySql source to BigQuery si Then Navigate to the properties page of plugin: "BigQuery" Then Replace input plugin property: "project" with value: "projectId" Then Enter input plugin property: "datasetProject" with value: "projectId" - Then Override Service account details if set in environment variables Then Enter input plugin property: "referenceName" with value: "BQReferenceName" Then Enter input plugin property: "dataset" with value: "dataset" Then Enter input plugin property: "table" with value: "bqTargetTable" @@ -285,7 +283,7 @@ Feature: CloudMySql - Verify data transfer from CloudMySql source to BigQuery si Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" @@ -298,7 +296,6 @@ Feature: CloudMySql - Verify data transfer from CloudMySql source to BigQuery si Then Navigate to the properties page of plugin: "BigQuery" Then Replace input plugin property: "project" with value: "projectId" Then Enter input plugin property: "datasetProject" with value: "projectId" - Then Override Service account details if set in environment variables Then Enter input plugin property: "referenceName" with value: "BQReferenceName" Then Enter input plugin property: "dataset" with value: "dataset" Then Enter input plugin property: "table" with value: "bqTargetTable" diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTimeMacro.feature b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTimeMacro.feature index 3f9368114..f91f5329d 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTimeMacro.feature +++ b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTimeMacro.feature @@ -10,7 +10,7 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Click on the Macro button of Property: "user" and set the value to: "username" Then Click on the Macro button of Property: "password" and set the value to: "password" Then Enter input plugin property: "referenceName" with value: "RefName" @@ -21,7 +21,7 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Navigate to the properties page of plugin: "CloudSQL MySQL2" Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Click on the Macro button of Property: "user" and set the value to: "username" Then Click on the Macro button of Property: "password" and set the value to: "password" Then Enter input plugin property: "referenceName" with value: "RefName" @@ -63,7 +63,7 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" @@ -74,7 +74,7 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Navigate to the properties page of plugin: "CloudSQL MySQL2" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Enter input plugin property: "referenceName" with value: "RefName" Then Replace input plugin property: "database" with value: "databaseName" Then Click on the Macro button of Property: "tableName" and set the value to: "mytable" @@ -111,7 +111,7 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Click on the Macro button of Property: "user" and set the value to: "username" Then Click on the Macro button of Property: "password" and set the value to: "password" Then Enter input plugin property: "referenceName" with value: "RefName" @@ -122,7 +122,7 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Navigate to the properties page of plugin: "CloudSQL MySQL2" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Click on the Macro button of Property: "user" and set the value to: "username" Then Click on the Macro button of Property: "password" and set the value to: "password" Then Enter input plugin property: "referenceName" with value: "RefName" @@ -149,7 +149,7 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" @@ -160,7 +160,7 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Navigate to the properties page of plugin: "CloudSQL MySQL2" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" @@ -185,7 +185,7 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "CloudMySqlDriverName" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1:sql-automation-test-instance" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Click on the Macro button of Property: "user" and set the value to: "username" Then Click on the Macro button of Property: "password" and set the value to: "password" Then Enter input plugin property: "referenceName" with value: "RefName" diff --git a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/CloudMySqlClient.java b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/CloudMySqlClient.java deleted file mode 100644 index a95ab449d..000000000 --- a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/CloudMySqlClient.java +++ /dev/null @@ -1,12 +0,0 @@ -package io.cdap.plugin.CloudMySql; - -import io.cdap.e2e.utils.PluginPropertyUtils; - -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.SQLException; -import java.util.TimeZone; - -public class CloudMySqlClient { - } - diff --git a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/runners/TestRunner.java b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/runners/TestRunner.java new file mode 100644 index 000000000..5d67a20cb --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/runners/TestRunner.java @@ -0,0 +1,4 @@ +package io.cdap.plugin.CloudMySql.runners; + +public class TestRunner { +} diff --git a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/runners/TestRunnerRequired.java b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/runners/TestRunnerRequired.java new file mode 100644 index 000000000..5aa0be2a2 --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/runners/TestRunnerRequired.java @@ -0,0 +1,4 @@ +package io.cdap.plugin.CloudMySql.runners; + +public class TestRunnerRequired { +} diff --git a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/stepsdesign/CloudMySql.java b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/stepsdesign/CloudMySql.java new file mode 100644 index 000000000..e130279b9 --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/stepsdesign/CloudMySql.java @@ -0,0 +1,4 @@ +package io.cdap.plugin.CloudMySql.stepsdesign; + +public class CloudMySql { +} diff --git a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySqlClient.java b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySqlClient.java new file mode 100644 index 000000000..b3eba687a --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySqlClient.java @@ -0,0 +1,175 @@ +package io.cdap.plugin; + +import breeze.macros.expand; +import com.google.cloud.storage.Acl; +import io.cdap.e2e.utils.PluginPropertyUtils; +import org.junit.Assert; + +import java.sql.*; +import java.util.Date; +import java.util.GregorianCalendar; +import java.util.TimeZone; + +public class CloudMySqlClient { + + public static void main(String[] args) throws SQLException, ClassNotFoundException { + getCloudMysqlConnection(); + //createSourceTable("myTable"); + createSourceTable("newTable"); + String[] tablesToDrop = {"newTable"}; + dropTables(tablesToDrop); + System.out.println("done"); + + } + public static Connection getCloudMysqlConnection() throws SQLException, ClassNotFoundException { + Class.forName("com.google.cloud.sql.mysql.SocketFactory"); + String instanceConnectionName="cdf-athena:us-central1:sql-automation-test-instance"; + String databaseName="TestDatabase"; + String Username="v"; + String Password="v@123"; + String jdbcUrl=String.format("jdbc:mysql:///%s?cloudSqlInstance=%s&socketFactory=com.google.cloud.sql.mysql.SocketFactory&user=%s&password=%s",databaseName,instanceConnectionName,Username,Password); + Connection conn = DriverManager.getConnection(jdbcUrl); + System.out.println("connected to database"); + return conn; + } + + public static int countRecord(String table) throws SQLException, ClassNotFoundException { + String countQuery = "SELECT COUNT(*) as total FROM " + table; + try (Connection connect = getCloudMysqlConnection(); + Statement statement = connect.createStatement(); + ResultSet rs = statement.executeQuery(countQuery)) { + int num = 0; + while (rs.next()) { + num = (rs.getInt(1)); + } + return num; + } + } + + public static boolean validateRecordValues(String sourceTable, String targetTable) + throws SQLException, ClassNotFoundException { + String getSourceQuery = "SELECT * FROM " + sourceTable; + String getTargetQuery = "SELECT * FROM " + targetTable; + try (Connection connect = getCloudMysqlConnection()) { + connect.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT); + Statement statement1 = connect.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE, + ResultSet.HOLD_CURSORS_OVER_COMMIT); + Statement statement2 = connect.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE, + ResultSet.HOLD_CURSORS_OVER_COMMIT); + ResultSet rsSource = statement1.executeQuery(getSourceQuery); + ResultSet rsTarget = statement2.executeQuery(getTargetQuery); + return compareResultSetData(rsSource, rsTarget); + } + } + + /** + * Compares the result Set data in source table and sink table.. + * @param rsSource result set of the source table data + * @param rsTarget result set of the target table data + * @return true if rsSource matches rsTarget + */ + public static boolean compareResultSetData(ResultSet rsSource, ResultSet rsTarget) throws SQLException { + ResultSetMetaData mdSource = rsSource.getMetaData(); + ResultSetMetaData mdTarget = rsTarget.getMetaData(); + int columnCountSource = mdSource.getColumnCount(); + int columnCountTarget = mdTarget.getColumnCount(); + Assert.assertEquals("Number of columns in source and target are not equal", + columnCountSource, columnCountTarget); + while (rsSource.next() && rsTarget.next()) { + int currentColumnCount = 1; + while (currentColumnCount <= columnCountSource) { + String columnTypeName = mdSource.getColumnTypeName(currentColumnCount); + int columnType = mdSource.getColumnType(currentColumnCount); + String columnName = mdSource.getColumnName(currentColumnCount); + if (columnType == Types.TIMESTAMP) { + GregorianCalendar gc = new GregorianCalendar(TimeZone.getTimeZone("UTC")); + gc.setGregorianChange(new Date(Long.MIN_VALUE)); + Timestamp sourceTS = rsSource.getTimestamp(currentColumnCount, gc); + Timestamp targetTS = rsTarget.getTimestamp(currentColumnCount, gc); + Assert.assertTrue(String.format("Different values found for column : %s", columnName), + sourceTS.equals(targetTS)); + } else { + String sourceString = rsSource.getString(currentColumnCount); + String targetString = rsTarget.getString(currentColumnCount); + Assert.assertTrue(String.format("Different values found for column : %s", columnName), + String.valueOf(sourceString).equals(String.valueOf(targetString))); + } + currentColumnCount++; + } + } + Assert.assertFalse("Number of rows in Source table is greater than the number of rows in Target table", + rsSource.next()); + Assert.assertFalse("Number of rows in Target table is greater than the number of rows in Source table", + rsTarget.next()); + return true; + } + + public static void createSourceTable(String sourceTable) throws SQLException, ClassNotFoundException { + try (Connection connect = getCloudMysqlConnection(); + Statement statement = connect.createStatement()) { + String createSourceTableQuery = "CREATE TABLE IF NOT EXISTS " + sourceTable + + "(id int, lastName varchar(255), PRIMARY KEY (id))"; + statement.executeUpdate(createSourceTableQuery); + + // Truncate table to clean the data of last failure run. + String truncateSourceTableQuery = "TRUNCATE TABLE " + sourceTable; + statement.executeUpdate(truncateSourceTableQuery); + + // Insert dummy data. + statement.executeUpdate("INSERT INTO " + sourceTable + " (id, lastName)" + + "VALUES (1, 'Ankit')"); + statement.executeUpdate("INSERT INTO " + sourceTable + " (id, lastName)" + + "VALUES (2, 'Isha')"); + statement.executeUpdate("INSERT INTO " + sourceTable + " (id, lastName)" + + "VALUES (3, 'Vipin')"); + + + } + } + + public static void createTargetTable(String targetTable) throws SQLException, ClassNotFoundException { + try (Connection connect = getCloudMysqlConnection(); + Statement statement = connect.createStatement()) { + String createTargetTableQuery = "CREATE TABLE IF NOT EXISTS " + targetTable + + "(id int, lastName varchar(255), PRIMARY KEY (id))"; + statement.executeUpdate(createTargetTableQuery); +// Truncate table to clean the data of last failure run. + String truncateTargetTableQuery = "TRUNCATE TABLE " + targetTable; + statement.executeUpdate(truncateTargetTableQuery); + } + } + + public static void createSourceDatatypesTable(String sourceTable) throws SQLException, ClassNotFoundException { + try (Connection connect = getCloudMysqlConnection(); + Statement statement = connect.createStatement()) { + String datatypesColumns = PluginPropertyUtils.pluginProp("datatypesColumns"); + String createSourceTableQuery = "CREATE TABLE " + sourceTable + " " + datatypesColumns; + statement.executeUpdate(createSourceTableQuery); + + // Insert dummy data. + String datatypesValues = PluginPropertyUtils.pluginProp("datatypesValues"); + String datatypesColumnsList = PluginPropertyUtils.pluginProp("datatypesColumnsList"); + statement.executeUpdate("INSERT INTO " + sourceTable + " " + datatypesColumnsList + " " + datatypesValues); + } + } + + public static void createTargetDatatypesTable(String targetTable) throws SQLException, ClassNotFoundException { + try (Connection connect = getCloudMysqlConnection(); + Statement statement = connect.createStatement()) { + String datatypesColumns = PluginPropertyUtils.pluginProp("datatypesColumns"); + String createTargetTableQuery = "CREATE TABLE " + targetTable + " " + datatypesColumns; + statement.executeUpdate(createTargetTableQuery); + } + } + + public static void dropTables(String[] tables) throws SQLException, ClassNotFoundException { + try (Connection connect = getCloudMysqlConnection(); + Statement statement = connect.createStatement()) { + for (String table : tables) { + String dropTableQuery = "Drop Table " + table; + statement.executeUpdate(dropTableQuery); + } + } + } + } + diff --git a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java new file mode 100644 index 000000000..cc85fe8a6 --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java @@ -0,0 +1,140 @@ +package io.cdap.plugin.common.stepsdesign; + +import com.google.cloud.bigquery.BigQueryException; +import io.cdap.e2e.utils.BigQueryClient; +import io.cdap.e2e.utils.PluginPropertyUtils; +import io.cdap.plugin.CloudMySqlClient; +import io.cucumber.java.After; +import io.cucumber.java.Before; +import org.apache.commons.lang3.RandomStringUtils; +import org.apache.commons.lang3.StringUtils; +import org.junit.Assert; +import stepsdesign.BeforeActions; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.sql.SQLException; +import java.util.NoSuchElementException; +import java.util.UUID; + +public class TestSetupHooks { + private static void setTableName() { + String randomString = RandomStringUtils.randomAlphabetic(10); + String sourceTableName = String.format("SourceTable_%s", randomString); + String targetTableName = String.format("TargetTable_%s", randomString); + PluginPropertyUtils.addPluginProp("sourceTable", sourceTableName); + PluginPropertyUtils.addPluginProp("targetTable", targetTableName); + PluginPropertyUtils.addPluginProp("selectQuery", String.format("select * from %s", sourceTableName)); + } + + @Before(order = 1) + public static void initializeDBProperties() { + String username = System.getenv("username"); + if (username != null && !username.isEmpty()) { + PluginPropertyUtils.addPluginProp("username", username); + } + String password = System.getenv("password"); + if (password != null && !password.isEmpty()) { + PluginPropertyUtils.addPluginProp("password", password); + } + TestSetupHooks.setTableName(); + } + + @Before(order = 2, value = "@CLOUDMYSQL_SOURCE_TEST") + public static void createTables() throws SQLException, ClassNotFoundException { + CloudMySqlClient.createSourceTable(PluginPropertyUtils.pluginProp("sourceTable")); + CloudMySqlClient.createTargetTable(PluginPropertyUtils.pluginProp("targetTable")); + } + + + @Before(order = 2, value = "@CLOUDMYSQL_SOURCE_DATATYPES_TEST") + public static void createDatatypesTable() throws SQLException, ClassNotFoundException { + CloudMySqlClient.createSourceDatatypesTable(PluginPropertyUtils.pluginProp("sourceTable")); + CloudMySqlClient.createTargetDatatypesTable(PluginPropertyUtils.pluginProp("targetTable")); + } + + @After(order = 2, value = "@CLOUDMYSQL_SINK_TEST") + public static void dropTables() throws SQLException, ClassNotFoundException { + CloudMySqlClient.dropTables(new String[]{PluginPropertyUtils.pluginProp("sourceTable"), + PluginPropertyUtils.pluginProp("targetTable")}); + } + + @Before(order = 1, value = "@BQ_SINK_TEST") + public static void setTempTargetBQTableName() { + String bqTargetTableName = "E2E_TARGET_" + UUID.randomUUID().toString().replaceAll("-", "_"); + PluginPropertyUtils.addPluginProp("bqTargetTable", bqTargetTableName); + BeforeActions.scenario.write("BQ Target table name - " + bqTargetTableName); + } + + @After(order = 1, value = "@BQ_SINK_TEST") + public static void deleteTempTargetBQTable() throws IOException, InterruptedException { + String bqTargetTableName = PluginPropertyUtils.pluginProp("bqTargetTable"); + try { + BigQueryClient.dropBqQuery(bqTargetTableName); + BeforeActions.scenario.write("BQ Target table - " + bqTargetTableName + " deleted successfully"); + PluginPropertyUtils.removePluginProp("bqTargetTable"); + } catch (BigQueryException e) { + if (e.getMessage().contains("Not found: Table")) { + BeforeActions.scenario.write("BQ Target Table " + bqTargetTableName + " does not exist"); + } else { + Assert.fail(e.getMessage()); + } + } + } + + @Before(order = 1, value = "@BQ_SOURCE_TEST") + public static void createTempSourceBQTable() throws IOException, InterruptedException { + createSourceBQTableWithQueries(PluginPropertyUtils.pluginProp("CreateBQTableQueryFile"), + PluginPropertyUtils.pluginProp("InsertBQDataQueryFile")); + } + + @After(order = 1, value = "@BQ_SOURCE_TEST") + public static void deleteTempSourceBQTable() throws IOException, InterruptedException { + String bqSourceTable = PluginPropertyUtils.pluginProp("bqSourceTable"); + BigQueryClient.dropBqQuery(bqSourceTable); + BeforeActions.scenario.write("BQ source Table " + bqSourceTable + " deleted successfully"); + PluginPropertyUtils.removePluginProp("bqSourceTable"); + } + private static void createSourceBQTableWithQueries(String bqCreateTableQueryFile, String bqInsertDataQueryFile) + throws IOException, InterruptedException { + String bqSourceTable = "E2E_SOURCE_" + UUID.randomUUID().toString().replaceAll("-", "_"); + + String createTableQuery = StringUtils.EMPTY; + try { + createTableQuery = new String(Files.readAllBytes(Paths.get(TestSetupHooks.class.getResource + ("/" + bqCreateTableQueryFile).toURI())) + , StandardCharsets.UTF_8); + createTableQuery = createTableQuery.replace("DATASET", PluginPropertyUtils.pluginProp("dataset")) + .replace("TABLE_NAME", bqSourceTable); + } catch (Exception e) { + BeforeActions.scenario.write("Exception in reading " + bqCreateTableQueryFile + " - " + e.getMessage()); + Assert.fail("Exception in BigQuery testdata prerequisite setup " + + "- error in reading create table query file " + e.getMessage()); + } + + String insertDataQuery = StringUtils.EMPTY; + try { + insertDataQuery = new String(Files.readAllBytes(Paths.get(TestSetupHooks.class.getResource + ("/" + bqInsertDataQueryFile).toURI())) + , StandardCharsets.UTF_8); + insertDataQuery = insertDataQuery.replace("DATASET", PluginPropertyUtils.pluginProp("dataset")) + .replace("TABLE_NAME", bqSourceTable); + } catch (Exception e) { + BeforeActions.scenario.write("Exception in reading " + bqInsertDataQueryFile + " - " + e.getMessage()); + Assert.fail("Exception in BigQuery testdata prerequisite setup " + + "- error in reading insert data query file " + e.getMessage()); + } + BigQueryClient.getSoleQueryResult(createTableQuery); + try { + BigQueryClient.getSoleQueryResult(insertDataQuery); + } catch (NoSuchElementException e) { + // Insert query does not return any record. + // Iterator on TableResult values in getSoleQueryResult method throws NoSuchElementException + } + PluginPropertyUtils.addPluginProp("bqSourceTable", bqSourceTable); + BeforeActions.scenario.write("BQ Source Table " + bqSourceTable + " created successfully"); + } + +} diff --git a/cloudsql-mysql-plugin/src/e2e-test/resources/BigQuery/BigQueryCreateTableQuery.txt b/cloudsql-mysql-plugin/src/e2e-test/resources/BigQuery/BigQueryCreateTableQuery.txt new file mode 100644 index 000000000..56a1eda2d --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/resources/BigQuery/BigQueryCreateTableQuery.txt @@ -0,0 +1 @@ +create table `DATASET.TABLE_NAME` (ID STRING, LASTNAME STRING) diff --git a/cloudsql-mysql-plugin/src/e2e-test/resources/BigQuery/BigQueryInsertDataQuery.txt b/cloudsql-mysql-plugin/src/e2e-test/resources/BigQuery/BigQueryInsertDataQuery.txt new file mode 100644 index 000000000..e12d1d88b --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/resources/BigQuery/BigQueryInsertDataQuery.txt @@ -0,0 +1,4 @@ +insert into `DATASET.TABLE_NAME` (ID, LASTNAME) values +('1','Shelby'), +('2','Simpson'), +('3','Williams'); diff --git a/cloudsql-mysql-plugin/src/e2e-test/resources/pluginParameters.properties b/cloudsql-mysql-plugin/src/e2e-test/resources/pluginParameters.properties index 9ea6189bb..3cf20075f 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/resources/pluginParameters.properties +++ b/cloudsql-mysql-plugin/src/e2e-test/resources/pluginParameters.properties @@ -6,6 +6,7 @@ invalidImportQuery=select numberOfSplits=2 invalidRef=invalidRef&^*&&* zeroValue=0 +ConnectionName=cdf-athena:us-central1:sql-automation-test-instance zeroSplits=isha insertQuery= select * from mytable CloudMySqlImportQuery=select * from HR.SOURCETABLE_SANYNMSPDZ where $CONDITIONS From 9fcc646a4e1edba166117df286a65f2e52781969 Mon Sep 17 00:00:00 2001 From: priyabhatnagar Date: Thu, 25 May 2023 11:47:30 +0530 Subject: [PATCH 4/7] CloudSql My Sql e2e tests --- .../sink/CloudMySqlDesignTime.feature | 14 + .../features/sink/CloudMySqlRunTime.feature | 92 +++-- .../sink/CloudMySqlRunTimeMacro.feature | 65 ++-- .../source/CloudMySqlDesignTime.feature | 35 +- .../CloudMySqlDesignTimeVaidation.feature | 44 ++- .../CloudMySqlDesignTimeWithMacro.feature | 26 +- .../features/source/CloudMySqlRunTime.feature | 329 +++++++---------- .../source/CloudMySqlRunTimeMacro.feature | 333 +++++++++++------- .../plugin/CloudMySql/runners/TestRunner.java | 31 ++ .../java/io/cdap/plugin/CloudMySqlClient.java | 51 ++- .../common/stepsdesign/TestSetupHooks.java | 9 + .../BigQuery/BigQueryCreateTableQuery.txt | 3 +- .../BigQuery/BigQueryInsertDataQuery.txt | 7 +- .../resources/errorMessage.properties | 3 +- .../pluginDataCyAttributes.properties | 12 + .../resources/pluginParameters.properties | 51 ++- pom.xml | 2 +- 17 files changed, 646 insertions(+), 461 deletions(-) create mode 100644 cloudsql-mysql-plugin/src/e2e-test/resources/pluginDataCyAttributes.properties diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTime.feature b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTime.feature index cb8042fab..ad17579d3 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTime.feature +++ b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTime.feature @@ -1,3 +1,17 @@ +# Copyright © 2023 Cask Data, Inc. +## +## Licensed under the Apache License, Version 2.0 (the "License"); you may not +## use this file except in compliance with the License. You may obtain a copy of +## the License at +## +## http://www.apache.org/licenses/LICENSE-2.0 +## +## Unless required by applicable law or agreed to in writing, software +## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +## License for the specific language governing permissions and limitations under +# the License.. + @CloudMySql Feature: CloudMySql sink- Verify CloudMySql sink plugin design time scenarios diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTime.feature b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTime.feature index 9eb62c0e3..96d1fd42b 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTime.feature +++ b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTime.feature @@ -1,21 +1,38 @@ -Feature: CloudMySql - Verify data transfer from BigQuery source to CloudMySql sink +# Copyright © 2023 Cask Data, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +@CloudMySql +Feature: CloudMySql Sink - Run time scenarios + + @BQ_SOURCE_TEST @CLOUDMYSQL_TEST_TABLE Scenario: To verify data is getting transferred from BigQuery source to CloudMySql sink successfully Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" When Select plugin: "BigQuery" from the plugins list as: "Source" + And Navigate to the properties page of plugin: "BigQuery" + And Enter input plugin property: "referenceName" with value: "Reference" + And Replace input plugin property: "project" with value: "projectId" + And Enter input plugin property: "datasetProject" with value: "datasetprojectId" + And Enter input plugin property: "dataset" with value: "dataset" + And Enter input plugin property: "table" with value: "bqSourceTable" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "bqOutputMultipleDatatypesSchema" + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page When Expand Plugin group in the LHS plugins list: "Sink" When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" Then Connect plugins: "BigQuery" and "CloudSQL MySQL" to establish connection - Then Navigate to the properties page of plugin: "BigQuery" - Then Replace input plugin property: "project" with value: "projectId" - Then Enter input plugin property: "datasetProject" with value: "projectId" - Then Enter input plugin property: "referenceName" with value: "BQReferenceName" - Then Enter input plugin property: "dataset" with value: "dataset" - Then Enter input plugin property: "table" with value: "bqTargetTable" -# Then Click on the Get Schema button -# Then Validate "BigQuery" plugin properties - Then Close the Plugin Properties page Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" @@ -23,25 +40,25 @@ Feature: CloudMySql - Verify data transfer from BigQuery source to CloudMySql si Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" - Then Enter input plugin property: "database" with value: "TestDatabase" - Then Enter input plugin property: "tableName" with value: "mytable" -# Then Validate "CloudSQL MySQL" plugin properties + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Validate "CloudSQL MySQL" plugin properties Then Close the Plugin Properties page Then Save the pipeline Then Preview and run the pipeline Then Wait till pipeline preview is in running state Then Open and capture pipeline preview logs -# Then Verify the preview run status of pipeline in the logs is "succeeded" -# Then Close the pipeline logs -# Then Close the preview -# Then Deploy the pipeline -# Then Run the Pipeline in Runtime -# Then Wait till pipeline is in running state -# Then Open and capture logs -# Then Verify the pipeline status is "Succeeded" -# Then Close the pipeline logs -# Then Validate OUT record count is equal to records transferred to target BigQuery table + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + @BQ_SOURCE_TEST @CLOUDMYSQL_SOURCE_TEST Scenario: To verify data is getting transferred from BigQuery source to CloudMySql sink successfully when connection arguments are set Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" @@ -55,8 +72,8 @@ Feature: CloudMySql - Verify data transfer from BigQuery source to CloudMySql si Then Enter input plugin property: "referenceName" with value: "BQReferenceName" Then Enter input plugin property: "dataset" with value: "dataset" Then Enter input plugin property: "table" with value: "bqTargetTable" -# Then Click on the Get Schema button -# Then Validate "BigQuery" plugin properties + Then Click on the Get Schema button + Then Validate "BigQuery" plugin properties Then Close the Plugin Properties page Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" @@ -66,24 +83,23 @@ Feature: CloudMySql - Verify data transfer from BigQuery source to CloudMySql si Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" Then Enter input plugin property: "database" with value: "TestDatabase" - Then Enter input plugin property: "tableName" with value: "mytable" + Then Replace input plugin property: "tableName" with value: "targetTable" Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList" -# Then Validate "CloudSQL MySQL" plugin properties + Then Validate "CloudSQL MySQL" plugin properties Then Close the Plugin Properties page Then Save the pipeline Then Preview and run the pipeline Then Wait till pipeline preview is in running state -# Then Open and capture pipeline preview logs -# Then Verify the preview run status of pipeline in the logs is "succeeded" -# Then Close the pipeline logs -# Then Close the preview -# Then Deploy the pipeline -# Then Run the Pipeline in Runtime -# Then Wait till pipeline is in running state -# Then Open and capture logs -# Then Verify the pipeline status is "Succeeded" -# Then Close the pipeline logs -# Then Validate OUT record count is equal to records transferred to target BigQuery table + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTimeMacro.feature b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTimeMacro.feature index 4b610b86f..a6100e8c8 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTimeMacro.feature +++ b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTimeMacro.feature @@ -1,6 +1,21 @@ -Feature: CloudMySql - Verify data transfer to CloudMySql sink with macro arguments +# Copyright © 2023 Cask Data, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. - Scenario: To verify data is getting transferred from BigQuery source to CloudMySql sink using macro arguments + +Feature: CloudMySql Sink - Run time scenarios (macro) + + Scenario: To verify data is getting transferred from BigQuery source to CloudMySql sink using macro arguments in connection section Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" When Select plugin: "BigQuery" from the plugins list as: "Source" @@ -13,8 +28,8 @@ Feature: CloudMySql - Verify data transfer to CloudMySql sink with macro argumen Then Click on the Macro button of Property: "datasetProject" and set the value to: "bqDatasetProjectId" Then Click on the Macro button of Property: "dataset" and set the value to: "bqDataset" Then Click on the Macro button of Property: "table" and set the value to: "bqTargetTable" -# Then Click on the Get Schema button -# Then Validate "BigQuery" plugin properties + Then Click on the Get Schema button + Then Validate "BigQuery" plugin properties Then Close the Plugin Properties page Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudsql-mysql" @@ -25,7 +40,7 @@ Feature: CloudMySql - Verify data transfer to CloudMySql sink with macro argumen Then Enter input plugin property: "referenceName" with value: "RefName" Then Enter input plugin property: "database" with value: "TestDatabase" Then Click on the Macro button of Property: "tableName" and set the value to: "mytable" -# Then Validate "CloudSQL MySQL" plugin properties + Then Validate "CloudSQL MySQL" plugin properties Then Close the Plugin Properties page Then Save the pipeline Then Preview and run the pipeline @@ -39,24 +54,24 @@ Feature: CloudMySql - Verify data transfer to CloudMySql sink with macro argumen Then Enter runtime argument value "table" for key "mytable" Then Run the preview of pipeline with runtime arguments Then Wait till pipeline preview is in running state -# Then Open and capture pipeline preview logs -# Then Verify the preview run status of pipeline in the logs is "succeeded" -# Then Close the pipeline logs -# Then Close the preview -# Then Deploy the pipeline -# Then Run the Pipeline in Runtime -# Then Enter runtime argument value "projectId" for key "projectId" -# Then Enter runtime argument value "bqDatasetId" for key "bqDatasetProjectId" -# Then Enter runtime argument value "dataset" for key "bqDataset" -# Then Enter runtime argument value "bqSourceTable" for key "bqTargetTable" -# Then Enter runtime argument value "driver" for key "cloudsql-mysql" -# Then Enter runtime argument value from environment variable "name" for key "username" -# Then Enter runtime argument value from environment variable "pass" for key "password" -# Then Click on the Macro button of Property: "dataset" and set the value to: "bqDataset" -# Then Enter runtime argument value "table" for key "mytable" -# Then Run the Pipeline in Runtime with runtime arguments -# Then Wait till pipeline is in running state -# Then Open and capture logs -# Then Verify the pipeline status is "Succeeded" -# Then Close the pipeline logs + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "projectId" for key "projectId" + Then Enter runtime argument value "bqDatasetId" for key "bqDatasetProjectId" + Then Enter runtime argument value "dataset" for key "bqDataset" + Then Enter runtime argument value "bqSourceTable" for key "bqTargetTable" + Then Enter runtime argument value "driver" for key "cloudsql-mysql" + Then Enter runtime argument value from environment variable "name" for key "username" + Then Enter runtime argument value from environment variable "pass" for key "password" + Then Click on the Macro button of Property: "dataset" and set the value to: "bqDataset" + Then Enter runtime argument value "table" for key "mytable" + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTime.feature b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTime.feature index 8ec8b0673..6041602f2 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTime.feature +++ b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTime.feature @@ -1,3 +1,17 @@ +# Copyright © 2023 Cask Data, Inc. +## +## Licensed under the Apache License, Version 2.0 (the "License"); you may not +## use this file except in compliance with the License. You may obtain a copy of +## the License at +## +## http://www.apache.org/licenses/LICENSE-2.0 +## +## Unless required by applicable law or agreed to in writing, software +## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +## License for the specific language governing permissions and limitations under +# the License.. + @CloudMySql Feature: CloudMySql source- Verify CloudMySql source plugin design time scenarios @@ -7,12 +21,12 @@ Feature: CloudMySql source- Verify CloudMySql source plugin design time scenario When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" - Then Click on the Macro button of Property: "user" and set the value to: "username" - Then Click on the Macro button of Property: "password" and set the value to: "password" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Select radio button plugin property: "instanceType" with value: "public" Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Enter input plugin property: "referenceName" with value: "RefName" - Then Enter input plugin property: "database" with value: "TestDatabase" + Then Enter input plugin property: "database" with value: "DatabaseName" Then Enter textarea plugin property: "importQuery" with value: "insertQuery" Then Click on the Get Schema button Then Verify the Output Schema matches the Expected Schema: "outputSchema" @@ -26,13 +40,12 @@ Feature: CloudMySql source- Verify CloudMySql source plugin design time scenario Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "connection" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields - Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList" - Then Enter input plugin property: "referenceName" with value: "refName" - Then Enter input plugin property: "database" with value: "TestDatabase" - Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "insertQuery" Then Click on the Get Schema button Then Verify the Output Schema matches the Expected Schema: "outputSchema" Then Validate "CloudSQL MySQL" plugin properties @@ -46,10 +59,12 @@ Feature: CloudMySql source- Verify CloudMySql source plugin design time scenario Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList" Then Enter input plugin property: "referenceName" with value: "referencename" - Then Enter input plugin property: "database" with value: "TestDatabase" - Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "insertQuery" Then Click on the Get Schema button Then Verify the Output Schema matches the Expected Schema: "outputSchema" Then Validate "CloudSQL MySQL" plugin properties diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTimeVaidation.feature b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTimeVaidation.feature index c30de1516..e1494327b 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTimeVaidation.feature +++ b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTimeVaidation.feature @@ -1,4 +1,18 @@ -Feature: CloudMySql source- Verify ,Mysql source plugin design time validation scenarios +# Copyright © 2023 Cask Data, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License + +Feature: CloudMySql source- Verify CloudMySql source plugin design time validation scenarios Scenario: To verify CloudMySql source plugin validation error message with invalid database Given Open Datafusion Project to configure pipeline @@ -27,7 +41,7 @@ Feature: CloudMySql source- Verify ,Mysql source plugin design time validation s Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "sourceRef" - Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "database" with value: "DatabaseName" Then Enter textarea plugin property: "importQuery" with value: "invalidImportQuery" Then Replace input plugin property: "numSplits" with value: "numberOfSplits" Then Click on the Validate button @@ -44,8 +58,8 @@ Feature: CloudMySql source- Verify ,Mysql source plugin design time validation s Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "invalidRef" - Then Replace input plugin property: "database" with value: "databaseName" - Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Replace input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "insertQuery" Then Click on the Validate button Then Verify that the Plugin Property: "referenceName" is displaying an in-line error message: "errorMessageCloudMySqlInvalidReferenceName" @@ -59,8 +73,8 @@ Feature: CloudMySql source- Verify ,Mysql source plugin design time validation s Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "sourceRef" - Then Replace input plugin property: "database" with value: "databaseName" - Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Replace input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "insertQuery" Then Click on the Validate button Then Verify that the Plugin Property: "user" is displaying an in-line error message: "errorMessageBlankUsername" @@ -74,8 +88,8 @@ Feature: CloudMySql source- Verify ,Mysql source plugin design time validation s Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "sourceRef" - Then Replace input plugin property: "database" with value: "databaseName" - Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Replace input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "insertQuery" Then Click on the Validate button Then Verify that the Plugin is displaying an error message: "errorMessageBlankPassword" on the header @@ -91,7 +105,7 @@ Feature: CloudMySql source- Verify ,Mysql source plugin design time validation s Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "sourceRef" Then Replace input plugin property: "database" with value: "DatabaseName" - Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Enter textarea plugin property: "importQuery" with value: "insertQuery" Then Replace input plugin property: "fetchSize" with value: "zeroValue" Then Click on the Validate button Then Verify that the Plugin Property: "fetchSize" is displaying an in-line error message: "errorMessageInvalidFetchSize" @@ -108,7 +122,7 @@ Feature: CloudMySql source- Verify ,Mysql source plugin design time validation s Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "sourceRef" Then Replace input plugin property: "database" with value: "DatabaseName" - Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Enter textarea plugin property: "importQuery" with value: "insertQuery" Then Replace input plugin property: "numSplits" with value: "numberOfSplits" Then Click on the Validate button Then Verify that the Plugin Property: "numSplits" is displaying an in-line error message: "errorMessageBlankSplitBy" @@ -126,7 +140,7 @@ Feature: CloudMySql source- Verify ,Mysql source plugin design time validation s Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "sourceRef" Then Replace input plugin property: "database" with value: "DatabaseName" - Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Enter textarea plugin property: "importQuery" with value: "insertQuery" Then Replace input plugin property: "numSplits" with value: "zeroValue" Then Click on the Validate button Then Verify that the Plugin Property: "numSplits" is displaying an in-line error message: "errorMessageInvalidNumberOfSplits" @@ -143,7 +157,7 @@ Feature: CloudMySql source- Verify ,Mysql source plugin design time validation s Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "sourceRef" Then Replace input plugin property: "database" with value: "DatabaseName" - Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Enter textarea plugin property: "importQuery" with value: "insertQuery" Then Replace input plugin property: "numSplits" with value: "zeroSplits" Then Click on the Validate button Then Verify that the Plugin Property: "numSplits" is displaying an in-line error message: "errorMessageNumberOfSplitNotNumber" @@ -159,9 +173,9 @@ Feature: CloudMySql source- Verify ,Mysql source plugin design time validation s Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "sourceRef" - Then Replace input plugin property: "database" with value: "invalidDatabaseName" - Then Enter textarea plugin property: "importQuery" with value: "invalidImportQuery" + Then Replace input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "insertQuery" + Then Replace input plugin property: "numSplits" with value: "numberOfSplits" Then Click on the Validate button Then Verify that the Plugin Property: "boundingQuery" is displaying an in-line error message: "errorMessageBoundingQuery" Then Verify that the Plugin Property: "numSplits" is displaying an in-line error message: "errorMessageNumberOfSplits" - Then Verify that the Plugin Property: "importQuery" is displaying an in-line error message: "errorMessageInvalidImportQuery" diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTimeWithMacro.feature b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTimeWithMacro.feature index 056b788e4..216a14c28 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTimeWithMacro.feature +++ b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTimeWithMacro.feature @@ -1,3 +1,18 @@ +# Copyright © 2023 Cask Data, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. + +@CloudMySql Feature: CloudMySql source- Verify CloudMySql source plugin design time macro scenarios Scenario: To verify CloudMySql source plugin validation with macro enabled fields for connection section @@ -5,17 +20,16 @@ Feature: CloudMySql source- Verify CloudMySql source plugin design time macro sc When Expand Plugin group in the LHS plugins list: "Source" When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" Then Navigate to the properties page of plugin: "CloudSQL MySQL" - Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudsql-mysql" + Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "DriverName" Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Select radio button plugin property: "instanceType" with value: "public" Then Click on the Macro button of Property: "user" and set the value to: "username" Then Click on the Macro button of Property: "password" and set the value to: "password" Then Click on the Macro button of Property: "connectionArguments" and set the value to: "connectionArgumentsList" Then Enter input plugin property: "referenceName" with value: "sourceRef" - Then Replace input plugin property: "database" with value: "TestDatabase" - Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Replace input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "insertQuery" Then Click on the Validate button -# Then Validate "CloudSQL MySQL" plugin properties Then Close the Plugin Properties page Scenario: To verify cloudsql source plugin validation with macro enabled fields for basic section @@ -29,9 +43,9 @@ Feature: CloudMySql source- Verify CloudMySql source plugin design time macro sc Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "sourceRef" - Then Replace input plugin property: "database" with value: "TestDatabase" + Then Replace input plugin property: "database" with value: "DatabaseName" Then Click on the Macro button of Property: "importQuery" and set the value in textarea: "CloudMySqlImportQuery" -# Then Validate "CloudSQL MySQL" plugin properties + Then Click on the Validate button Then Close the Plugin Properties page diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTime.feature b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTime.feature index eebc2a52e..4214abe5d 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTime.feature +++ b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTime.feature @@ -1,5 +1,23 @@ +# Copyright © 2023 Cask Data, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. + +@CloudMySql +Feature: CloudMySql Source - Run time scenarios + Feature: CloudMySql - Verify data transfer from CloudMySql source to BigQuery sink + @CLOUDMYSQL_SOURCE_TEST @BQ_SINK_TEST Scenario: To verify data is getting transferred from CloudMySql source to BigQuery sink successfully Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" @@ -14,11 +32,11 @@ Feature: CloudMySql - Verify data transfer from CloudMySql source to BigQuery si Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" - Then Enter input plugin property: "database" with value: "TestDatabase" - Then Enter textarea plugin property: "importQuery" with value: "insertQuery" -# Then Click on the Get Schema button -# Then Verify the Output Schema matches the Expected Schema: "outputSchema" -# Then Validate "CloudSQL MySQL" plugin properties + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "OutputSchema" + Then Validate "CloudSQL MySQL" plugin properties Then Close the Plugin Properties page Then Navigate to the properties page of plugin: "BigQuery" Then Replace input plugin property: "project" with value: "projectId" @@ -26,26 +44,25 @@ Feature: CloudMySql - Verify data transfer from CloudMySql source to BigQuery si Then Enter input plugin property: "referenceName" with value: "BQReferenceName" Then Enter input plugin property: "dataset" with value: "dataset" Then Enter input plugin property: "table" with value: "bqTargetTable" - Then Click plugin property: "truncateTable" - Then Click plugin property: "allowSchemaRelaxation" -# Then Validate "BigQuery" plugin properties + Then Validate "BigQuery" plugin properties Then Close the Plugin Properties page Then Save the pipeline Then Preview and run the pipeline Then Wait till pipeline preview is in running state Then Open and capture pipeline preview logs -# Then Verify the preview run status of pipeline in the logs is "succeeded" -# Then Close the pipeline logs -# Then Close the preview -# Then Deploy the pipeline -# Then Run the Pipeline in Runtime -# Then Wait till pipeline is in running state -# Then Open and capture logs -# Then Verify the pipeline status is "Succeeded" -# Then Close the pipeline logs -# Then Validate OUT record count is equal to records transferred to target BigQuery table + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + #Then Validate the values of records transferred to target Big Query table is equal to the values from source table - Scenario: To verify data is getting transferred from CloudMySql source to BigQuery sink successfully using long datatypes + @CLOUDMYSQL_SOURCE_DATATYPES_TEST @BQ_SINK_TEST + Scenario: To verify data is getting transferred from CloudMySql source to BigQuery sink successfully with all datatypes Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" @@ -56,15 +73,13 @@ Feature: CloudMySql - Verify data transfer from CloudMySql source to BigQuery si Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" Then Enter input plugin property: "connectionName" with value: "ConnectionName" - Then Select radio button plugin property: "instanceType" with value: "public" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" - Then Enter input plugin property: "database" with value: "TestDatabase" - Then Enter textarea plugin property: "importQuery" with value: "insertQuery" -# Then Click on the Get Schema button -# Then Verify the Output Schema matches the Expected Schema: "outputDatatypesSchema2" -# Then Validate "CloudSQL MySQL" plugin properties + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Get Schema button + Then Validate "CloudSQL MySQL" plugin properties Then Close the Plugin Properties page Then Navigate to the properties page of plugin: "BigQuery" Then Replace input plugin property: "project" with value: "projectId" @@ -72,214 +87,115 @@ Feature: CloudMySql - Verify data transfer from CloudMySql source to BigQuery si Then Enter input plugin property: "referenceName" with value: "BQReferenceName" Then Enter input plugin property: "dataset" with value: "dataset" Then Enter input plugin property: "table" with value: "bqTargetTable" - Then Click plugin property: "truncateTable" - Then Click plugin property: "allowSchemaRelaxation" -# Then Validate "BigQuery" plugin properties + Then Validate "BigQuery" plugin properties Then Close the Plugin Properties page Then Save the pipeline Then Preview and run the pipeline Then Wait till pipeline preview is in running state Then Open and capture pipeline preview logs -# Then Verify the preview run status of pipeline in the logs is "succeeded" -# Then Close the pipeline logs -# Then Close the preview -# Then Deploy the pipeline -# Then Run the Pipeline in Runtime -# Then Wait till pipeline is in running state -# Then Open and capture logs -# Then Verify the pipeline status is "Succeeded" -# Then Close the pipeline logs -# Then Validate OUT record count is equal to records transferred to target BigQuery table + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + #Then Validate the values of records transferred to target Big Query table is equal to the values from source table - Scenario: To verify data is getting transferred from CloudMySql source to BigQuery sink successfully using long raw datatypes + @CLOUDMYSQL_SOURCE_DATATYPES_TEST + Scenario: To verify data is getting transferred from CloudMySql source to CloudMySql sink successfully Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" When Expand Plugin group in the LHS plugins list: "Sink" - When Select plugin: "BigQuery" from the plugins list as: "Sink" - Then Connect plugins: "CloudSQL MySQL" and "BigQuery" to establish connection + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL MySQL" and "CloudSQL MySQL2" to establish connection Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields - Then Enter input plugin property: "referenceName" with value: "sourceRef" - Then Replace input plugin property: "database" with value: "databaseName" + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "DatabaseName" Then Enter textarea plugin property: "importQuery" with value: "selectQuery" -# Then Click on the Get Schema button -# Then Verify the Output Schema matches the Expected Schema: "outputDatatypesSchema3" -# Then Validate "CloudSQL MySQL" plugin properties - Then Close the Plugin Properties page - Then Navigate to the properties page of plugin: "BigQuery" - Then Replace input plugin property: "project" with value: "projectId" - Then Enter input plugin property: "datasetProject" with value: "projectId" - Then Enter input plugin property: "referenceName" with value: "BQReferenceName" - Then Enter input plugin property: "dataset" with value: "dataset" - Then Enter input plugin property: "table" with value: "bqTargetTable" - Then Click plugin property: "truncateTable" - Then Click plugin property: "allowSchemaRelaxation" -# Then Validate "BigQuery" plugin properties + Then Click on the Get Schema button +# Then Verify the Output Schema matches the Expected Schema: "OutputSchema" + Then Validate "CloudSQL MySQL" plugin properties Then Close the Plugin Properties page - Then Save the pipeline - Then Preview and run the pipeline - Then Wait till pipeline preview is in running state - Then Open and capture pipeline preview logs -# Then Verify the preview run status of pipeline in the logs is "succeeded" -# Then Close the pipeline logs -# Then Close the preview -# Then Deploy the pipeline -# Then Run the Pipeline in Runtime -# Then Wait till pipeline is in running state -# Then Open and capture logs -# Then Verify the pipeline status is "Succeeded" -# Then Close the pipeline logs -# Then Validate OUT record count is equal to records transferred to target BigQuery table - - Scenario: To verify data is getting transferred from CloudMySql source to BigQuery sink successfully using long Varchar datatypes - Given Open Datafusion Project to configure pipeline - When Expand Plugin group in the LHS plugins list: "Source" - When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" - When Expand Plugin group in the LHS plugins list: "Sink" - When Select plugin: "BigQuery" from the plugins list as: "Sink" - Then Connect plugins: "CloudSQL MySQL" and "BigQuery" to establish connection - Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Navigate to the properties page of plugin: "CloudSQL MySQL2" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields - Then Enter input plugin property: "referenceName" with value: "sourceRef" - Then Replace input plugin property: "database" with value: "databaseName" - Then Enter textarea plugin property: "importQuery" with value: "selectQuery" -# Then Click on the Get Schema button -# Then Verify the Output Schema matches the Expected Schema: "outputDatatypesSchema4" -# Then Validate "Oracle" plugin properties - Then Close the Plugin Properties page - Then Navigate to the properties page of plugin: "BigQuery" - Then Replace input plugin property: "project" with value: "projectId" - Then Enter input plugin property: "datasetProject" with value: "projectId" - Then Enter input plugin property: "referenceName" with value: "BQReferenceName" - Then Enter input plugin property: "dataset" with value: "dataset" - Then Enter input plugin property: "table" with value: "bqTargetTable" - Then Click plugin property: "truncateTable" - Then Click plugin property: "allowSchemaRelaxation" -# Then Validate "BigQuery" plugin properties + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Validate "CloudSQL MySQL2" plugin properties Then Close the Plugin Properties page Then Save the pipeline Then Preview and run the pipeline -# Then Wait till pipeline preview is in running state -# Then Open and capture pipeline preview logs -# Then Verify the preview run status of pipeline in the logs is "succeeded" -# Then Close the pipeline logs -# Then Close the preview -# Then Deploy the pipeline -# Then Run the Pipeline in Runtime -# Then Wait till pipeline is in running state -# Then Open and capture logs -# Then Verify the pipeline status is "Succeeded" -# Then Close the pipeline logs -# Then Validate OUT record count is equal to records transferred to target BigQuery table + Then Verify the preview of pipeline is "success" + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" - Scenario: To verify data is getting transferred from CloudMySql source to BigQuery sink successfully when connection arguments are set + @CLOUDMYSQL_SOURCE_DATATYPES_TEST + Scenario: To verify data is getting transferred from CloudMySql source to CloudMySql successfully when connection arguments are set Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" When Expand Plugin group in the LHS plugins list: "Sink" - When Select plugin: "BigQuery" from the plugins list as: "Sink" - Then Connect plugins: "CloudSQL MySQL" and "BigQuery" to establish connection + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL MySQL" and "CloudSQL MySQL2" to establish connection Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields - Then Enter input plugin property: "referenceName" with value: "sourceRef" - Then Replace input plugin property: "database" with value: "databaseName" Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList" + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "DatabaseName" Then Enter textarea plugin property: "importQuery" with value: "selectQuery" -# Then Click on the Get Schema button -# Then Verify the Output Schema matches the Expected Schema: "outputDatatypesSchema4" -# Then Validate "Oracle" plugin properties + Then Click on the Get Schema button + Then Validate "CloudSQL MySQL" plugin properties Then Close the Plugin Properties page - Then Navigate to the properties page of plugin: "BigQuery" - Then Replace input plugin property: "project" with value: "projectId" - Then Enter input plugin property: "datasetProject" with value: "projectId" - Then Enter input plugin property: "referenceName" with value: "BQReferenceName" - Then Enter input plugin property: "dataset" with value: "dataset" - Then Enter input plugin property: "table" with value: "bqTargetTable" - Then Click plugin property: "truncateTable" - Then Click plugin property: "allowSchemaRelaxation" -# Then Validate "BigQuery" plugin properties - Then Close the Plugin Properties page - Then Save the pipeline - Then Preview and run the pipeline - Then Wait till pipeline preview is in running state - Then Open and capture pipeline preview logs -# Then Verify the preview run status of pipeline in the logs is "succeeded" -# Then Close the pipeline logs -# Then Close the preview -# Then Deploy the pipeline -# Then Run the Pipeline in Runtime -# Then Wait till pipeline is in running state -# Then Open and capture logs -# Then Verify the pipeline status is "Succeeded" -# Then Close the pipeline logs -# Then Validate OUT record count is equal to records transferred to target BigQuery table - - Scenario: To verify the pipeline fails while preview with invalid bounding query setting the required split-By field - Given Open Datafusion Project to configure pipeline - When Expand Plugin group in the LHS plugins list: "Source" - When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" - When Expand Plugin group in the LHS plugins list: "Sink" - When Select plugin: "BigQuery" from the plugins list as: "Sink" - Then Connect plugins: "CloudSQL MySQL" and "BigQuery" to establish connection - Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Navigate to the properties page of plugin: "CloudSQL MySQL2" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" - Then Enter input plugin property: "database" with value: "TestDatabase" - Then Enter textarea plugin property: "importQuery" with value: "insertQuery" -# Then Click on the Get Schema button - Then Replace input plugin property: "splitBy" with value: "splitBy" - Then Replace input plugin property: "numSplits" with value: "numberOfSplits" - Then Enter textarea plugin property: "boundingQuery" with value: "invalidBoundingQuery" -# Then Verify the Output Schema matches the Expected Schema: "outputSchema" -# Then Validate "CloudSQL MySQL" plugin properties - Then Close the Plugin Properties page - Then Navigate to the properties page of plugin: "BigQuery" - Then Replace input plugin property: "project" with value: "projectId" - Then Enter input plugin property: "datasetProject" with value: "projectId" - Then Enter input plugin property: "referenceName" with value: "BQReferenceName" - Then Enter input plugin property: "dataset" with value: "dataset" - Then Enter input plugin property: "table" with value: "bqTargetTable" - Then Click plugin property: "truncateTable" - Then Click plugin property: "allowSchemaRelaxation" -# Then Validate "BigQuery" plugin properties + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Validate "CloudSQL MySQL2" plugin properties Then Close the Plugin Properties page Then Save the pipeline Then Preview and run the pipeline - Then Wait till pipeline preview is in running state - Then Open and capture pipeline preview logs -# Then Verify the preview run status of pipeline in the logs is "succeeded" -# Then Close the pipeline logs -# Then Close the preview -# Then Deploy the pipeline -# Then Run the Pipeline in Runtime -# Then Wait till pipeline is in running state -# Then Open and capture logs -# Then Verify the pipeline status is "Succeeded" -# Then Close the pipeline logs -# Then Validate OUT record count is equal to records transferred to target BigQuery table + Then Verify the preview of pipeline is "success" + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" - Scenario: To verify data is getting transferred from CloudMySql source to BigQuery sink successfully using different datatypes + @CLOUDMYSQL_SOURCE_DATATYPES_TEST + Scenario: Verify user should not be able to deploy and run the pipeline when plugin is configured with invalid bounding query Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" When Expand Plugin group in the LHS plugins list: "Sink" - When Select plugin: "BigQuery" from the plugins list as: "Sink" - Then Connect plugins: "CloudSQL MySQL" and "BigQuery" to establish connection + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL MySQL" and "CloudSQL MySQL2" to establish connection Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" @@ -287,37 +203,32 @@ Feature: CloudMySql - Verify data transfer from CloudMySql source to BigQuery si Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" - Then Enter input plugin property: "database" with value: "TestDatabase" - Then Enter textarea plugin property: "importQuery" with value: "insertQuery" -# Then Click on the Get Schema button -# Then Verify the Output Schema matches the Expected Schema: "outputDatatypesSchema1" -# Then Validate "CloudSQL MySQL" plugin properties + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Enter textarea plugin property: "boundingQuery" with value: "invalidboundQuery" + Then Enter textarea plugin property: "importQuery" with value: "cloudsqlimportQuery" + Then Replace input plugin property: "splitBy" with value: "splitby" + Then Replace input plugin property: "numSplits" with value: "numbersplitsgenerate" + Then Click on the Get Schema button + Then Validate "CloudSQL MySQL" plugin properties Then Close the Plugin Properties page - Then Navigate to the properties page of plugin: "BigQuery" - Then Replace input plugin property: "project" with value: "projectId" - Then Enter input plugin property: "datasetProject" with value: "projectId" - Then Enter input plugin property: "referenceName" with value: "BQReferenceName" - Then Enter input plugin property: "dataset" with value: "dataset" - Then Enter input plugin property: "table" with value: "bqTargetTable" - Then Click plugin property: "truncateTable" - Then Click plugin property: "allowSchemaRelaxation" -# Then Validate "BigQuery" plugin properties + Then Navigate to the properties page of plugin: "CloudSQL MySQL2" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Validate "CloudSQL MySQL2" plugin properties Then Close the Plugin Properties page Then Save the pipeline - Then Preview and run the pipeline - Then Wait till pipeline preview is in running state -# Then Open and capture pipeline preview logs -# Then Verify the preview run status of pipeline in the logs is "succeeded" -# Then Close the pipeline logs -# Then Close the preview -# Then Deploy the pipeline -# Then Run the Pipeline in Runtime -# Then Wait till pipeline is in running state -# Then Open and capture logs -# Then Verify the pipeline status is "Succeeded" -# Then Close the pipeline logs -# Then Validate OUT record count is equal to records transferred to target BigQuery table - + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + And Verify the pipeline status is "Failed" diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTimeMacro.feature b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTimeMacro.feature index f91f5329d..516b59d96 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTimeMacro.feature +++ b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTimeMacro.feature @@ -1,5 +1,21 @@ +# Copyright © 2023 Cask Data, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. + +@CloudMySql Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro arguments + @CLOUDMYSQL_SOURCE_TEST Scenario: To verify data is getting transferred from CloudMySql to CloudMySql successfully using macro arguments in connection section Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" @@ -8,51 +24,50 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" Then Connect plugins: "CloudSQL MySQL" and "CloudSQL MySQL2" to establish connection Then Navigate to the properties page of plugin: "CloudSQL MySQL" - Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudsql-mysql" + Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "driverName" Then Select radio button plugin property: "instanceType" with value: "public" Then Enter input plugin property: "connectionName" with value: "ConnectionName" - Then Click on the Macro button of Property: "user" and set the value to: "username" - Then Click on the Macro button of Property: "password" and set the value to: "password" + Then Click on the Macro button of Property: "user" and set the value to: "Username" + Then Click on the Macro button of Property: "password" and set the value to: "Password" Then Enter input plugin property: "referenceName" with value: "RefName" - Then Replace input plugin property: "database" with value: "databaseName" - Then Click on the Macro button of Property: "importQuery" and set the value in textarea: "CloudMySqlImportQuery" -# Then Validate "CloudSQL MySQL" plugin properties + Then Replace input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Validate "CloudSQL MySQL" plugin properties Then Close the Plugin Properties page Then Navigate to the properties page of plugin: "CloudSQL MySQL2" - Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudsql-mysql" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" Then Enter input plugin property: "connectionName" with value: "ConnectionName" - Then Click on the Macro button of Property: "user" and set the value to: "username" - Then Click on the Macro button of Property: "password" and set the value to: "password" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" - Then Replace input plugin property: "database" with value: "databaseName" - Then Enter input plugin property: "CloudMySqlImportQuery" with value: "mytable" -# Then Validate "CloudSQL MySQL2" plugin properties + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Validate "CloudSQL MySQL2" plugin properties Then Close the Plugin Properties page Then Save the pipeline Then Preview and run the pipeline - Then Enter runtime argument value "driver" for key "cloudsql-mysql" - Then Enter runtime argument value from environment variable "name" for key "username" - Then Enter runtime argument value from environment variable "pass" for key "password" - Then Enter runtime argument value "CloudMySqlImportQuery" for key "CloudMySqlImportQuery" + Then Enter runtime argument value "driverName" for key "driverName" + Then Enter runtime argument value "username" for key "Username" + Then Enter runtime argument value "password" for key "Password" Then Run the preview of pipeline with runtime arguments -# Then Wait till pipeline preview is in running state -# Then Open and capture pipeline preview logs -# Then Verify the preview run status of pipeline in the logs is "succeeded" -# Then Close the pipeline logs -# Then Close the preview -# Then Deploy the pipeline -# Then Run the Pipeline in Runtime -# Then Enter runtime argument value "driver" for key "cloudsql-mysql" -# Then Enter runtime argument value from environment variable "name" for key "username" -# Then Enter runtime argument value from environment variable "pass" for key "password" -# Then Run the Pipeline in Runtime with runtime arguments -# Then Wait till pipeline is in running state -# Then Open and capture logs -# Then Verify the pipeline status is "Succeeded" -# Then Close the pipeline logs -# Then Validate the values of records transferred to target table is equal to the values from source table + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "driver" for key "driverName" + Then Enter runtime argument value "username" for key "Username" + Then Enter runtime argument value "password" for key "Password" + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + @CLOUDMYSQL_SOURCE_TEST Scenario: To verify data is getting transferred from CloudMySql to CloudMySql successfully using macro arguments in basic section Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" @@ -69,113 +84,187 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Enter input plugin property: "referenceName" with value: "RefName" Then Replace input plugin property: "database" with value: "databaseName" Then Click on the Macro button of Property: "importQuery" and set the value in textarea: "CloudMySqlImportQuery" -# Then Validate "CloudSQL MySQL" plugin properties + Then Validate "CloudSQL MySQL" plugin properties Then Close the Plugin Properties page Then Navigate to the properties page of plugin: "CloudSQL MySQL2" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" - Then Replace input plugin property: "database" with value: "databaseName" - Then Click on the Macro button of Property: "tableName" and set the value to: "mytable" -# Then Validate "CloudSQL MySQL2" plugin properties + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Validate "CloudSQL MySQL2" plugin properties Then Close the Plugin Properties page Then Save the pipeline Then Preview and run the pipeline Then Enter runtime argument value "insertQuery" for key "CloudMySqlImportQuery" - Then Enter runtime argument value "table" for key "mytable" Then Run the preview of pipeline with runtime arguments Then Wait till pipeline preview is in running state Then Open and capture pipeline preview logs -# Then Verify the preview run status of pipeline in the logs is "succeeded" -# Then Close the pipeline logs -# Then Close the preview -# Then Deploy the pipeline -# Then Run the Pipeline in Runtime -# Then Enter runtime argument value "selectQuery" for key "CloudMySqlImportQuery" -# Then Enter runtime argument value "table" for key "mytable" -# Then Run the Pipeline in Runtime with runtime arguments -# Then Wait till pipeline is in running state -# Then Open and capture logs -# Then Verify the pipeline status is "Succeeded" -# Then Close the pipeline logs -# Then Validate the values of records transferred to target table is equal to the values from source table + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "selectQuery" for key "CloudMySqlImportQuery" + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs - Scenario: To verify pipeline preview fails when invalid connection details provided using macro arguments + @CLOUDMYSQL_SOURCE_TEST + Scenario: To verify data is getting transferred from CloudMySql to CloudMySql successfully using macro arguments in advance section Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" When Expand Plugin group in the LHS plugins list: "Sink" - When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" - Then Connect plugins: "CloudSQL MySQL" and "CloudSQL MySQL2" to establish connection + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL MySQL" and "BigQuery" to establish connection Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" Then Enter input plugin property: "connectionName" with value: "ConnectionName" - Then Click on the Macro button of Property: "user" and set the value to: "username" - Then Click on the Macro button of Property: "password" and set the value to: "password" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" - Then Replace input plugin property: "database" with value: "databaseName" - Then Click on the Macro button of Property: "importQuery" and set the value in textarea: "CloudMySqlImportQuery" -# Then Validate "CloudSQL MySQL" plugin properties + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + And Click on the Macro button of Property: "fetchSize" and set the value to: "fetchSize" + And Click on the Macro button of Property: "splitBy" and set the value to: "SplitBy" + And Click on the Macro button of Property: "numSplits" and set the value to: "NumSplits" + Then Validate "CloudSQL MySQL" plugin properties Then Close the Plugin Properties page Then Navigate to the properties page of plugin: "CloudSQL MySQL2" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" Then Enter input plugin property: "connectionName" with value: "ConnectionName" - Then Click on the Macro button of Property: "user" and set the value to: "username" - Then Click on the Macro button of Property: "password" and set the value to: "password" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" - Then Click on the Macro button of Property: "tableName" and set the value to: "mytable" - Then Replace input plugin property: "database" with value: "databaseName" -# Then Validate "CloudSQL MySQL2" plugin properties + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Validate "CloudSQL MySQL2" plugin properties Then Close the Plugin Properties page Then Save the pipeline Then Preview and run the pipeline - Then Enter runtime argument value "invalidTable" for key "mytable" - Then Enter runtime argument value "invalidUserName" for key "username" - Then Enter runtime argument value "invalidPassword" for key "password" - Then Enter runtime argument value "invalidImportQuery" for key "CloudMySqlImportQuery" + Then Enter runtime argument value "fetchSize" for key "fetchSize" + Then Enter runtime argument value "splitBy" for key "SplitBy" + Then Enter runtime argument value "numSplits" for key "NumSplits" Then Run the preview of pipeline with runtime arguments - Then Verify the preview of pipeline is "Failed" + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "fetchSize" for key "fetchSize" + Then Enter runtime argument value "splitBy" for key "SplitBy" + Then Enter runtime argument value "numSplits" for key "NumSplits" + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs - Scenario: To verify pipeline preview fails when invalid basic details provided using macro arguments - Given Open Datafusion Project to configure pipeline + @CLOUDMYSQL_SOURCE_TEST @BQ_SINK_TEST + Scenario: To verify data is getting transferred from CloudMySql source to BigQuery sink successfully using macro arguments in connection section When Expand Plugin group in the LHS plugins list: "Source" When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" When Expand Plugin group in the LHS plugins list: "Sink" - When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" - Then Connect plugins: "CloudSQL MySQL" and "CloudSQL MySQL2" to establish connection + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL MySQL" and "BigQuery" to establish connection Then Navigate to the properties page of plugin: "CloudSQL MySQL" - Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "driverName" Then Select radio button plugin property: "instanceType" with value: "public" Then Enter input plugin property: "connectionName" with value: "ConnectionName" - Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields - Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Click on the Macro button of Property: "user" and set the value to: "Username" + Then Click on the Macro button of Property: "password" and set the value to: "Password" Then Enter input plugin property: "referenceName" with value: "RefName" - Then Replace input plugin property: "database" with value: "databaseName" - Then Click on the Macro button of Property: "importQuery" and set the value in textarea: "CloudMySqlImportQuery" -# Then Validate "CloudSQL MySQL" plugin properties + Then Replace input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Validate "CloudSQL MySQL" plugin properties Then Close the Plugin Properties page - Then Navigate to the properties page of plugin: "CloudSQL MySQL2" + Then Navigate to the properties page of plugin: "BigQuery" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "driverName" for key "driverName" + Then Enter runtime argument value "username" for key "Username" + Then Enter runtime argument value "password" for key "Password" + Then Run the preview of pipeline with runtime arguments + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "driver" for key "driverName" + Then Enter runtime argument value "username" for key "Username" + Then Enter runtime argument value "password" for key "Password" + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + + @CLOUDMYSQL_SOURCE_TEST @BQ_SINK_TEST + Scenario: To verify data is getting transferred from CloudMySql source to BigQuery sink successfully using macro arguments in basic section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL MySQL" and "BigQuery" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" - Then Click on the Macro button of Property: "tableName" and set the value to: "mytable" Then Replace input plugin property: "database" with value: "databaseName" -# Then Validate "CloudSQL MySQL2" plugin properties + Then Click on the Macro button of Property: "importQuery" and set the value in textarea: "CloudMySqlImportQuery" + Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Validate "BigQuery" plugin properties Then Close the Plugin Properties page Then Save the pipeline Then Preview and run the pipeline - Then Enter runtime argument value "invalidImportQuery" for key "CloudMySqlImportQuery" - Then Enter runtime argument value "invalidTable" for key "mytable" + Then Enter runtime argument value "insertQuery" for key "CloudMySqlImportQuery" Then Run the preview of pipeline with runtime arguments - Then Verify the preview of pipeline is "Failed" + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "selectQuery" for key "CloudMySqlImportQuery" + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs - Scenario: To verify data is getting transferred from CloudMySql source to BigQuery sink using macro arguments + @CLOUDMYSQL_SOURCE_TEST @BQ_SINK_TEST + Scenario: To verify data is getting transferred from CloudMySql source to BigQuery sink successfully using macro arguments in advance section Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" @@ -183,59 +272,45 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument When Select plugin: "BigQuery" from the plugins list as: "Sink" Then Connect plugins: "CloudSQL MySQL" and "BigQuery" to establish connection Then Navigate to the properties page of plugin: "CloudSQL MySQL" - Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "CloudMySqlDriverName" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" Then Enter input plugin property: "connectionName" with value: "ConnectionName" - Then Click on the Macro button of Property: "user" and set the value to: "username" - Then Click on the Macro button of Property: "password" and set the value to: "password" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" - Then Replace input plugin property: "database" with value: "databaseName" - Then Click on the Macro button of Property: "importQuery" and set the value in textarea: "CloudMySqlImportQuery" -# Then Validate "CloudSQL MySQL" plugin properties + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + And Click on the Macro button of Property: "fetchSize" and set the value to: "fetchSize" + And Click on the Macro button of Property: "splitBy" and set the value to: "SplitBy" + And Click on the Macro button of Property: "numSplits" and set the value to: "NumSplits" + Then Validate "CloudSQL MySQL" plugin properties Then Close the Plugin Properties page Then Navigate to the properties page of plugin: "BigQuery" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" Then Enter input plugin property: "referenceName" with value: "BQReferenceName" - Then Click on the Macro button of Property: "project" and set the value to: "projectId" - Then Click on the Macro button of Property: "datasetProject" and set the value to: "bqDatasetId" - Then Click on the Macro button of Property: "dataset" and set the value to: "dataset" - Then Click on the Macro button of Property: "table" and set the value to: "bqSourceTable" - Then Click on the Macro button of Property: "truncateTable" and set the value to: "bqTruncateTable" - Then Click on the Macro button of Property: "allowSchemaRelaxation" and set the value to: "bqUpdateTableSchema" -# Then Validate "BigQuery" plugin properties + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Validate "BigQuery" plugin properties Then Close the Plugin Properties page Then Save the pipeline Then Preview and run the pipeline - Then Enter runtime argument value "CloudMySqlDriverName" for key "CloudMySqlDriverName" - Then Enter runtime argument value from environment variable "name" for key "username" - Then Enter runtime argument value from environment variable "pass" for key "password" - Then Enter runtime argument value "CloudMySqlImportQuery" for key "CloudMySqlImportQuery" - Then Enter runtime argument value "projectId" for key "projectId" - Then Enter runtime argument value "projectId" for key "bqDatasetId" - Then Enter runtime argument value "dataset" for key "dataset" - Then Enter runtime argument value "bqSourceTable" for key "bqSourceTable" - Then Enter runtime argument value "bqTargetTable" for key "bqTruncateTable" - Then Enter runtime argument value "bqTargetTable" for key "bqUpdateTableSchema" + Then Enter runtime argument value "fetchSize" for key "fetchSize" + Then Enter runtime argument value "splitBy" for key "SplitBy" + Then Enter runtime argument value "numSplits" for key "NumSplits" Then Run the preview of pipeline with runtime arguments Then Wait till pipeline preview is in running state Then Open and capture pipeline preview logs -# Then Verify the preview run status of pipeline in the logs is "succeeded" -# Then Close the pipeline logs -# Then Close the preview -# Then Deploy the pipeline -# Then Run the Pipeline in Runtime -# Then Enter runtime argument value "CloudMySqlDriverName" for key "CloudMySqlDriverName" -# Then Enter runtime argument value from environment variable "name" for key "username" -# Then Enter runtime argument value from environment variable "pass" for key "password" -# Then Enter runtime argument value "CloudMySqlImportQuery" for key "CloudMySqlImportQuery" -# Then Enter runtime argument value "projectId" for key "projectId" -# Then Enter runtime argument value "projectId" for key "bqDatasetId" -# Then Enter runtime argument value "dataset" for key "dataset" -# Then Enter runtime argument value "bqSourceTable" for key "bqSourceTable" -# Then Enter runtime argument value "bqTargetTable" for key "bqTruncateTable" -# Then Enter runtime argument value "bqTargetTable" for key "bqUpdateTableSchema" -# Then Run the Pipeline in Runtime with runtime arguments -# Then Wait till pipeline is in running state -# Then Open and capture logs -# Then Verify the pipeline status is "Succeeded" -# Then Close the pipeline logs -# Then Validate OUT record count is equal to records transferred to target BigQuery table + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "fetchSize" for key "fetchSize" + Then Enter runtime argument value "splitBy" for key "SplitBy" + Then Enter runtime argument value "numSplits" for key "NumSplits" + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs diff --git a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/runners/TestRunner.java b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/runners/TestRunner.java index 5d67a20cb..2d8ac7c4e 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/runners/TestRunner.java +++ b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/runners/TestRunner.java @@ -1,4 +1,35 @@ +/* + * Copyright © 2023 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ package io.cdap.plugin.CloudMySql.runners; +import io.cucumber.junit.Cucumber; +import io.cucumber.junit.CucumberOptions; +import org.junit.runner.RunWith; + +/** + * Test Runner to execute Mysql plugin test cases. + */ +@RunWith(Cucumber.class) +@CucumberOptions( + features = {"src/e2e-test/features"}, + glue = {"stepsdesign", "io.cdap.plugin.common.stepsdesign"}, + tags = {"@CloudMySql"}, + plugin = {"pretty", "html:target/cucumber-html-report/CloudMySql", + "json:target/cucumber-reports/cucumber-mysql.json", + "junit:target/cucumber-reports/cucumber-mysql.xml"} +) public class TestRunner { } diff --git a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySqlClient.java b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySqlClient.java index b3eba687a..81027d43d 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySqlClient.java +++ b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySqlClient.java @@ -1,7 +1,5 @@ package io.cdap.plugin; -import breeze.macros.expand; -import com.google.cloud.storage.Acl; import io.cdap.e2e.utils.PluginPropertyUtils; import org.junit.Assert; @@ -12,24 +10,28 @@ public class CloudMySqlClient { + private static final String database = PluginPropertyUtils.pluginProp("DatabaseName"); + private static final String connectionName = PluginPropertyUtils.pluginProp("ConnectionName"); + public static void main(String[] args) throws SQLException, ClassNotFoundException { getCloudMysqlConnection(); //createSourceTable("myTable"); - createSourceTable("newTable"); - String[] tablesToDrop = {"newTable"}; - dropTables(tablesToDrop); - System.out.println("done"); +// createSourceTable("newTable"); +// String[] tablesToDrop = {"newTable"}; +// dropTables(tablesToDrop); + //System.out.println("done"); } + public static Connection getCloudMysqlConnection() throws SQLException, ClassNotFoundException { Class.forName("com.google.cloud.sql.mysql.SocketFactory"); - String instanceConnectionName="cdf-athena:us-central1:sql-automation-test-instance"; - String databaseName="TestDatabase"; - String Username="v"; - String Password="v@123"; - String jdbcUrl=String.format("jdbc:mysql:///%s?cloudSqlInstance=%s&socketFactory=com.google.cloud.sql.mysql.SocketFactory&user=%s&password=%s",databaseName,instanceConnectionName,Username,Password); + String instanceConnectionName = "cdf-athena:us-central1:sql-automation-test-instance"; + String databaseName = "TestDatabase"; + String Username = "v"; + String Password = "v@123"; + String jdbcUrl = String.format("jdbc:mysql:///%s?cloudSqlInstance=%s&socketFactory=com.google.cloud.sql.mysql.SocketFactory&user=%s&password=%s", databaseName, instanceConnectionName, Username, Password); Connection conn = DriverManager.getConnection(jdbcUrl); - System.out.println("connected to database"); + //System.out.println("connected to database"); return conn; } @@ -64,6 +66,7 @@ public static boolean validateRecordValues(String sourceTable, String targetTabl /** * Compares the result Set data in source table and sink table.. + * * @param rsSource result set of the source table data * @param rsTarget result set of the target table data * @return true if rsSource matches rsTarget @@ -111,17 +114,17 @@ public static void createSourceTable(String sourceTable) throws SQLException, Cl "(id int, lastName varchar(255), PRIMARY KEY (id))"; statement.executeUpdate(createSourceTableQuery); - // Truncate table to clean the data of last failure run. + // Truncate table to clean the data of last failure run. String truncateSourceTableQuery = "TRUNCATE TABLE " + sourceTable; statement.executeUpdate(truncateSourceTableQuery); // Insert dummy data. statement.executeUpdate("INSERT INTO " + sourceTable + " (id, lastName)" + - "VALUES (1, 'Ankit')"); + "VALUES (1, 'Priya')"); statement.executeUpdate("INSERT INTO " + sourceTable + " (id, lastName)" + - "VALUES (2, 'Isha')"); + "VALUES (2, 'Shubhangi')"); statement.executeUpdate("INSERT INTO " + sourceTable + " (id, lastName)" + - "VALUES (3, 'Vipin')"); + "VALUES (3, 'Shorya')"); } @@ -147,7 +150,7 @@ public static void createSourceDatatypesTable(String sourceTable) throws SQLExce statement.executeUpdate(createSourceTableQuery); // Insert dummy data. - String datatypesValues = PluginPropertyUtils.pluginProp("datatypesValues"); + String datatypesValues = PluginPropertyUtils.pluginProp("datatypesValue1"); String datatypesColumnsList = PluginPropertyUtils.pluginProp("datatypesColumnsList"); statement.executeUpdate("INSERT INTO " + sourceTable + " " + datatypesColumnsList + " " + datatypesValues); } @@ -162,6 +165,17 @@ public static void createTargetDatatypesTable(String targetTable) throws SQLExce } } + public static void createTargetCloudMysqlTable(String targetTable) throws SQLException, + ClassNotFoundException { + try (Connection connect = getCloudMysqlConnection(); + Statement statement = connect.createStatement()) { + String datatypesColumns = PluginPropertyUtils.pluginProp("SqlServerDatatypesColumns"); + String createTargetTableQuery = "CREATE TABLE " + targetTable + " " + datatypesColumns; + statement.executeUpdate(createTargetTableQuery); + } + } + + public static void dropTables(String[] tables) throws SQLException, ClassNotFoundException { try (Connection connect = getCloudMysqlConnection(); Statement statement = connect.createStatement()) { @@ -171,5 +185,4 @@ public static void dropTables(String[] tables) throws SQLException, ClassNotFoun } } } - } - +} \ No newline at end of file diff --git a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java index cc85fe8a6..1572abc71 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java +++ b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java @@ -19,6 +19,10 @@ import java.util.NoSuchElementException; import java.util.UUID; +/** + * CLOUDSQL MYSQL test hooks. + */ + public class TestSetupHooks { private static void setTableName() { String randomString = RandomStringUtils.randomAlphabetic(10); @@ -61,6 +65,11 @@ public static void dropTables() throws SQLException, ClassNotFoundException { PluginPropertyUtils.pluginProp("targetTable")}); } + @Before(order = 2, value = "@CLOUDMYSQL_TEST_TABLE") + public static void createCloudMysqlTestTable() throws SQLException, ClassNotFoundException { + CloudMySqlClient.createTargetCloudMysqlTable(PluginPropertyUtils.pluginProp("targetTable")); + } + @Before(order = 1, value = "@BQ_SINK_TEST") public static void setTempTargetBQTableName() { String bqTargetTableName = "E2E_TARGET_" + UUID.randomUUID().toString().replaceAll("-", "_"); diff --git a/cloudsql-mysql-plugin/src/e2e-test/resources/BigQuery/BigQueryCreateTableQuery.txt b/cloudsql-mysql-plugin/src/e2e-test/resources/BigQuery/BigQueryCreateTableQuery.txt index 56a1eda2d..54fd6ef5e 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/resources/BigQuery/BigQueryCreateTableQuery.txt +++ b/cloudsql-mysql-plugin/src/e2e-test/resources/BigQuery/BigQueryCreateTableQuery.txt @@ -1 +1,2 @@ -create table `DATASET.TABLE_NAME` (ID STRING, LASTNAME STRING) +create table `DATASET.TABLE_NAME` (col1 BYTES, col2 STRING, col3 DATE, col4 FLOAT64, col6 TIMESTAMP, +col8 BOOL, col9 INT64, col10 TIME) diff --git a/cloudsql-mysql-plugin/src/e2e-test/resources/BigQuery/BigQueryInsertDataQuery.txt b/cloudsql-mysql-plugin/src/e2e-test/resources/BigQuery/BigQueryInsertDataQuery.txt index e12d1d88b..bdccb0ea8 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/resources/BigQuery/BigQueryInsertDataQuery.txt +++ b/cloudsql-mysql-plugin/src/e2e-test/resources/BigQuery/BigQueryInsertDataQuery.txt @@ -1,4 +1,3 @@ -insert into `DATASET.TABLE_NAME` (ID, LASTNAME) values -('1','Shelby'), -('2','Simpson'), -('3','Williams'); +insert into `DATASET.TABLE_NAME` (col1, col2, col3, col4, col6, col8, col9, col10) values +(b'01011011','priya','2021-01-28',1.110,'2019-03-10 04:50:01 UTC',false,92233720,'21:26:00'), +(b'01011011','surya','2021-01-21',1.110000001,'2018-03-10 04:50:01 UTC',true,92233729,'20:26:00'); diff --git a/cloudsql-mysql-plugin/src/e2e-test/resources/errorMessage.properties b/cloudsql-mysql-plugin/src/e2e-test/resources/errorMessage.properties index 763b82d88..27437b5f0 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/resources/errorMessage.properties +++ b/cloudsql-mysql-plugin/src/e2e-test/resources/errorMessage.properties @@ -14,4 +14,5 @@ errorMessageInvalidSinkDatabase=Exception while trying to validate schema of dat errorMessageInvalidTableName=Exception while trying to validate schema of database table '"table"' for connection \ 'jdbc:oracle:thin:@xe' with IO Error: Unknown host specified errorMessageConnectionName=Connection Name must be in the format :: to connect to a public CloudSQL PostgreSQL instance. - +validationSuccessMessage=No errors found. +validationErrorMessage=COUNT ERROR found diff --git a/cloudsql-mysql-plugin/src/e2e-test/resources/pluginDataCyAttributes.properties b/cloudsql-mysql-plugin/src/e2e-test/resources/pluginDataCyAttributes.properties new file mode 100644 index 000000000..74a4271fb --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/resources/pluginDataCyAttributes.properties @@ -0,0 +1,12 @@ +referenceName=referenceName +enableQuotedValues=switch-enableQuotedValues +inputsToLoadMemory=inMemoryInputs +projectId=project +datasetProjectId=datasetProject +dataset=dataset +table=table +truncateTable=switch-truncateTable +truncateTableMacroInput=truncateTable +updateTableSchema=switch-allowSchemaRelaxation +updateTableSchemaMacroInput=allowSchemaRelaxation +outputSchemaMacroInput=Output Schema-macro-input diff --git a/cloudsql-mysql-plugin/src/e2e-test/resources/pluginParameters.properties b/cloudsql-mysql-plugin/src/e2e-test/resources/pluginParameters.properties index 3cf20075f..0e43a9e82 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/resources/pluginParameters.properties +++ b/cloudsql-mysql-plugin/src/e2e-test/resources/pluginParameters.properties @@ -1,7 +1,9 @@ driverName=cloudsql-mysql username=v password=v@123 -connectionArgumentsList=[{"key":"name","value":"isha"}] +DatabaseName=TestDatabase +connectionArgumentsList=[{"key":"numSplits","value":"1"}] +invalidconnectionArgumentsList=[{"key":"numSplits","value":"%$^&#"}] invalidImportQuery=select numberOfSplits=2 invalidRef=invalidRef&^*&&* @@ -9,7 +11,10 @@ zeroValue=0 ConnectionName=cdf-athena:us-central1:sql-automation-test-instance zeroSplits=isha insertQuery= select * from mytable -CloudMySqlImportQuery=select * from HR.SOURCETABLE_SANYNMSPDZ where $CONDITIONS +CloudMySqlImportQuery=select * from mytable +fetchSize=1000 +NumSplits=1 +SplitBy=ID projectId=cdf-athena BQReferenceName=reference bqTargetTable=mytable @@ -27,6 +32,13 @@ CloudMySqlDriverName=cloudsql-mysql bqTruncateTable=truncateTable bqUpdateTableSchema=updateSchema invalidDatabaseName=invalidDB%$^%* +invalidboundQuery=SELECT MIN(id),MAX(id) FROM table +cloudsqlimportQuery=where $CONDITIONS; +splitby=ID +numbersplitsgenerate=2 +outputSchema=[{"key":"fname","value":"string"},{"key":"lname","value":"string"},{"key":"cost","value":"double"},\ + {"key":"zipcode","value":"int"}] +OutputSchema=[{"key":"id","value":"int"},{"key":"lastName","value":"string"}] outputDatatypesSchema2=[{"key":"ID","value":"string"},{"key":"COL1","value":"string"},{"key":"COL2","value":"bytes"},\ {"key":"COL3","value":"bytes"},{"key":"COL4","value":"string"},{"key":"COL5","value":"string"},\ {"key":"COL6","value":"bytes"}] @@ -37,4 +49,37 @@ invalidBoundingQuery=SELECT MIN(id),MAX(id) FROM table outputDatatypesSchema1=[{"key":"COL23","value":"double"},{"key":"COL28","value":"timestamp"},\ {"key":"COL29","value":"timestamp"},{"key":"COL30","value":"string"},{"key":"COL31","value":"string"},\ {"key":"COL32","value":"string"},{"key":"COL33","value":"timestamp"},{"key":"COL34","value":"float"},\ - {"key":"COL35","value":"double"}] \ No newline at end of file + {"key":"COL35","value":"double"}] + +datatypesColumns=(ID varchar(100) PRIMARY KEY, COL1 bit(1), COL2 tinyint(20), COL3 boolean, COL4 smallint(10), \ + COL5 blob, COL6 mediumint(10), COL7 int(11), COL8 bigint(1), COL9 float, COL10 date, COL11 datetime, \ + COL12 decimal(10,0), COL13 double, COL14 enum('A','B','C'), COL15 time, COL16 timestamp, COL18 char(1),\ + COL19 binary(1), COL20 tinytext, COL21 varbinary(100), COL22 tinyblob, COL23 mediumblob, \ + COL24 blob, COL25 text, COL26 mediumtext, COL27 longblob, COL28 longtext, COL29 set('X','y','Z')) +datatypesColumnsList=(ID,COL1,COL2,COL3,COL4,COL5,COL6,COL7,COL8,COL9,COL10,COL11,COL12,COL13,COL14,COL15,COL16,\ + COL18,COL19,COL20,COL21,COL22,COL23,COL24,COL25,COL26,COL27,COL28,COL29) +datatypesValue1=VALUES ('User1',1,-1,true,-32768,HEX('27486920546869732069732061206C6F6E6720746578742E27'),0,25,\ +-9223372036854775808,22.0,'2023-01-01','2023-01-01 00:00:00',1234,1234.5678,'A','00:00:00','2023-01-01 00:00:00',\ + 'P',1,'This is a test message',1,HEX('27486920546869732069732061206C6F6E6720746578742E27'),\ + HEX('27486920546869732069732061206C6F6E6720746578742E27'),HEX('27486920546869732069732061206C6F6E6720746578742E27'),\ + 'This is a test message','This is a test message',HEX('27486920546869732069732061206C6F6E6720746578742E27'),\ + 'This is a test message to check ','X') +datatypesSchema=[{"key":"ID","value":"string"},{"key":"COL1","value":"boolean"},{"key":"COL2","value":"int"},\ + {"key":"COL3","value":"boolean"},{"key":"COL4","value":"int"},{"key":"COL5","value":"bytes"},\ + {"key":"COL6","value":"double"},{"key":"COL7","value":"int"},{"key":"COL8","value":"long"},\ + {"key":"COL9","value":"float"},{"key":"COL10","value":"date"},{"key":"COL11","value":"timestamp"},\ + {"key":"COL12","value":"decimal"},{"key":"COL13","value":"double"},{"key":"COL14","value":"string"},\ + {"key":"COL15","value":"time"},{"key":"COL16","value":"timestamp"},\ + {"key":"COL18","value":"string"},{"key":"COL19","value":"bytes"},{"key":"COL20","value":"string"},\ + {"key":"COL21","value":"bytes"},{"key":"COL22","value":"bytes"},{"key":"COL23","value":"bytes"},\ + {"key":"COL24","value":"bytes"},{"key":"COL25","value":"string"},{"key":"COL26","value":"time"},\ + {"key":"COL27","value":"bytes"},{"key":"COL28","value":"string"},{"key":"COL29","value":"string"}] + +bqOutputMultipleDatatypesSchema= [{"key":"col1","value":"bytes"},{"key":"col2","value":"string"},\ + {"key":"col3","value":"date"},{"key":"col4","value":"double"},{"key":"col6","value":"timestamp"},\ + {"key":"col8","value":"boolean"},{"key":"col9","value":"long"},{"key":"col10","value":"time"}] +CloudMySqlDatatypesColumns=(COL1 VARBINARY(100) , COL2 VARCHAR(100), COL3 DATE, COL4 DOUBLE,\ + COL6 TIMESTAMP, COL8 BIT, COL9 BIGINT, COL10 TIME) +#bq queries file path +CreateBQTableQueryFile=testData/BigQuery/BigQueryCreateTableQuery.txt +InsertBQDataQueryFile=testData/BigQuery/BigQueryInsertDataQuery.txt diff --git a/pom.xml b/pom.xml index 35a5f4161..97b6825f7 100644 --- a/pom.xml +++ b/pom.xml @@ -715,7 +715,7 @@ io.cdap.tests.e2e cdap-e2e-framework - 0.3.0-SNAPSHOT + 0.2.0-SNAPSHOT test From 70ee2bc20670e925e2808a0cca4f1efa14730363 Mon Sep 17 00:00:00 2001 From: isha kaushik Date: Thu, 25 May 2023 15:50:38 +0530 Subject: [PATCH 5/7] bq validation --- .../features/source/CloudMySqlRunTime.feature | 8 +- .../cdap/plugin/CloudMySql/BQValidation.java | 154 ++++++++++++++++++ .../CloudMySql/stepsdesign/CloudMySql.java | 1 + .../java/io/cdap/plugin/CloudMySqlClient.java | 3 +- .../common/stepsdesign/TestSetupHooks.java | 6 + .../resources/pluginParameters.properties | 2 - pom.xml | 2 +- 7 files changed, 169 insertions(+), 7 deletions(-) create mode 100644 cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/BQValidation.java diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTime.feature b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTime.feature index 4214abe5d..cb4e62e08 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTime.feature +++ b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTime.feature @@ -17,7 +17,7 @@ Feature: CloudMySql Source - Run time scenarios Feature: CloudMySql - Verify data transfer from CloudMySql source to BigQuery sink - @CLOUDMYSQL_SOURCE_TEST @BQ_SINK_TEST + @CLOUDMYSQL_SOURCE_TEST Scenario: To verify data is getting transferred from CloudMySql source to BigQuery sink successfully Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" @@ -29,8 +29,10 @@ Feature: CloudMySql - Verify data transfer from CloudMySql source to BigQuery si Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" Then Enter input plugin property: "connectionName" with value: "ConnectionName" - Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields - Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "user" with value: "username" + Then Enter input plugin property: "password" with value: "password" +# Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields +# Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" Then Enter input plugin property: "database" with value: "DatabaseName" Then Enter textarea plugin property: "importQuery" with value: "selectQuery" diff --git a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/BQValidation.java b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/BQValidation.java new file mode 100644 index 000000000..850ea08a1 --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/BQValidation.java @@ -0,0 +1,154 @@ +package io.cdap.plugin.CloudMySql; + +import com.google.cloud.bigquery.TableResult; +import com.google.gson.Gson; +import com.google.gson.JsonObject; +import io.cdap.e2e.utils.BigQueryClient; +import io.cdap.e2e.utils.PluginPropertyUtils; +import io.cdap.plugin.CloudMySqlClient; +import org.junit.Assert; + +import java.io.IOException; +import java.sql.*; +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Base64; +import java.util.Date; +import java.util.List; + +/** + * BQValidation + */ + +public class BQValidation { + public static void main(String[] args) { +// validateBQAndDBRecordValues(String schema, String sourceTable, String targetTable) + } + + /** + * Extracts entire data from source and target tables. + * @param sourceTable table at the source side + * @param targetTable table at the sink side + * @return true if the values in source and target side are equal + */ + + public static boolean validateBQAndDBRecordValues(String schema, String sourceTable, String targetTable) + throws SQLException, ClassNotFoundException, ParseException, IOException, InterruptedException { + List jsonResponse = new ArrayList<>(); + List bigQueryRows = new ArrayList<>(); + getBigQueryTableData(targetTable, bigQueryRows); + for (Object rows : bigQueryRows) { + JsonObject json = new Gson().fromJson(String.valueOf(rows), JsonObject.class); + jsonResponse.add(json); + } + String getSourceQuery = "SELECT * FROM " + schema + "." + sourceTable; + try (Connection connect = CloudMySqlClient.getCloudMysqlConnection()) { + connect.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT); + Statement statement1 = connect.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE, + ResultSet.HOLD_CURSORS_OVER_COMMIT); + + ResultSet rsSource = statement1.executeQuery(getSourceQuery); + return compareResultSetData(rsSource, jsonResponse); + } + } + + /** + * Retrieves the data from a specified BigQuery table and populates it into the provided list of objects. + * + * @param table The name of the BigQuery table to fetch data from. + * @param bigQueryRows The list to store the fetched BigQuery data. + * + */ + + private static void getBigQueryTableData(String table, List bigQueryRows) + throws IOException, InterruptedException { + + String projectId = PluginPropertyUtils.pluginProp("projectId"); + String dataset = PluginPropertyUtils.pluginProp("dataset"); + String selectQuery = "SELECT TO_JSON(t) FROM `" + projectId + "." + dataset + "." + table + "` AS t"; + TableResult result = BigQueryClient.getQueryResult(selectQuery); + result.iterateAll().forEach(value -> bigQueryRows.add(value.get(0).getValue())); + } + + /** + * Compares the data in the result set obtained from the Oracle database with the provided BigQuery JSON objects. + * + * @param rsSource The result set obtained from the Oracle database. + * @param bigQueryData The list of BigQuery JSON objects to compare with the result set data. + * + * @return True if the result set data matches the BigQuery data, false otherwise. + * @throws SQLException If an SQL error occurs during the result set operations. + * @throws ParseException If an error occurs while parsing the data. + */ + + public static boolean compareResultSetData(ResultSet rsSource, List bigQueryData) throws SQLException, + ParseException { + ResultSetMetaData mdSource = rsSource.getMetaData(); + boolean result = false; + int columnCountSource = mdSource.getColumnCount(); + + if (bigQueryData == null) { + Assert.fail("bigQueryData is null"); + return result; + } + + // Get the column count of the first JsonObject in bigQueryData + int columnCountTarget = 0; + if (bigQueryData.size() > 0) { + columnCountTarget = bigQueryData.get(0).entrySet().size(); + } + // Compare the number of columns in the source and target + Assert.assertEquals("Number of columns in source and target are not equal", + columnCountSource, columnCountTarget); + + //Variable 'jsonObjectIdx' to track the index of the current JsonObject in the bigQueryData list, + int jsonObjectIdx = 0; + while (rsSource.next()) { + int currentColumnCount = 1; + while (currentColumnCount <= columnCountSource) { + String columnTypeName = mdSource.getColumnTypeName(currentColumnCount); + int columnType = mdSource.getColumnType(currentColumnCount); + String columnName = mdSource.getColumnName(currentColumnCount); + // Perform different comparisons based on column type + switch (columnType) { + // Since we skip BFILE in Oracle Sink, we are not comparing the BFILE source and sink values + case Types.BLOB: + case Types.VARBINARY: + case Types.LONGVARBINARY: + String sourceB64String = new String(Base64.getEncoder().encode(rsSource.getBytes(currentColumnCount))); + String targetB64String = bigQueryData.get(jsonObjectIdx).get(columnName).getAsString(); + Assert.assertEquals("Different values found for column : %s", + sourceB64String, targetB64String); + break; + + case Types.NUMERIC: + long sourceVal = rsSource.getLong(currentColumnCount); + long targetVal = Long.parseLong(bigQueryData.get(jsonObjectIdx).get(columnName).getAsString()); + Assert.assertTrue("Different values found for column : %s", + String.valueOf(sourceVal).equals(String.valueOf(targetVal))); + break; + + case Types.TIMESTAMP: + Timestamp sourceTS = rsSource.getTimestamp(columnName); + SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'hh:mm:ss'Z'"); + Date parsedDate = dateFormat.parse(bigQueryData.get(jsonObjectIdx).get(columnName).getAsString()); + Timestamp targetTs = new Timestamp(parsedDate.getTime()); + Assert.assertEquals("Different values found for column : %s", String.valueOf(sourceTS). + equals(String.valueOf(targetTs))); + break; + default: + String sourceString = rsSource.getString(currentColumnCount); + String targetString = bigQueryData.get(jsonObjectIdx).get(columnName).getAsString(); + Assert.assertEquals(String.format("Different %s values found for column : %s", columnTypeName, columnName), + String.valueOf(sourceString), String.valueOf(targetString)); + } + currentColumnCount++; + } + jsonObjectIdx++; + } + Assert.assertFalse("Number of rows in Source table is greater than the number of rows in Target table", + rsSource.next()); + return true; + } +} diff --git a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/stepsdesign/CloudMySql.java b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/stepsdesign/CloudMySql.java index e130279b9..f3d96427e 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/stepsdesign/CloudMySql.java +++ b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/stepsdesign/CloudMySql.java @@ -1,4 +1,5 @@ package io.cdap.plugin.CloudMySql.stepsdesign; public class CloudMySql { + } diff --git a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySqlClient.java b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySqlClient.java index 81027d43d..90942ceb3 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySqlClient.java +++ b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySqlClient.java @@ -31,7 +31,7 @@ public static Connection getCloudMysqlConnection() throws SQLException, ClassNot String Password = "v@123"; String jdbcUrl = String.format("jdbc:mysql:///%s?cloudSqlInstance=%s&socketFactory=com.google.cloud.sql.mysql.SocketFactory&user=%s&password=%s", databaseName, instanceConnectionName, Username, Password); Connection conn = DriverManager.getConnection(jdbcUrl); - //System.out.println("connected to database"); + System.out.println("connected to database"); return conn; } @@ -148,6 +148,7 @@ public static void createSourceDatatypesTable(String sourceTable) throws SQLExce String datatypesColumns = PluginPropertyUtils.pluginProp("datatypesColumns"); String createSourceTableQuery = "CREATE TABLE " + sourceTable + " " + datatypesColumns; statement.executeUpdate(createSourceTableQuery); + System.out.println(createSourceTableQuery); // Insert dummy data. String datatypesValues = PluginPropertyUtils.pluginProp("datatypesValue1"); diff --git a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java index 1572abc71..abaf9f4aa 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java +++ b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java @@ -24,6 +24,10 @@ */ public class TestSetupHooks { + public static void main(String[] args) throws SQLException, ClassNotFoundException { + setTableName(); + createDatatypesTable(); + } private static void setTableName() { String randomString = RandomStringUtils.randomAlphabetic(10); String sourceTableName = String.format("SourceTable_%s", randomString); @@ -31,6 +35,8 @@ private static void setTableName() { PluginPropertyUtils.addPluginProp("sourceTable", sourceTableName); PluginPropertyUtils.addPluginProp("targetTable", targetTableName); PluginPropertyUtils.addPluginProp("selectQuery", String.format("select * from %s", sourceTableName)); + System.out.println(sourceTableName); + } @Before(order = 1) diff --git a/cloudsql-mysql-plugin/src/e2e-test/resources/pluginParameters.properties b/cloudsql-mysql-plugin/src/e2e-test/resources/pluginParameters.properties index 0e43a9e82..5e8d38d1a 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/resources/pluginParameters.properties +++ b/cloudsql-mysql-plugin/src/e2e-test/resources/pluginParameters.properties @@ -23,8 +23,6 @@ dataset=sql bqSourceTable=mysql driver=cloudsql-mysql table=myTable -name=NAME -pass=PASS invalidUserName=testUser invalidPassword=testPassword invalidTable=data diff --git a/pom.xml b/pom.xml index 97b6825f7..35a5f4161 100644 --- a/pom.xml +++ b/pom.xml @@ -715,7 +715,7 @@ io.cdap.tests.e2e cdap-e2e-framework - 0.2.0-SNAPSHOT + 0.3.0-SNAPSHOT test From 67e6e491100779b4b8b6e9d31366acfdfa29ff00 Mon Sep 17 00:00:00 2001 From: priyabhatnagar Date: Thu, 25 May 2023 23:37:38 +0530 Subject: [PATCH 6/7] CloudSql My Sql Sink Run Time Scenarios e2e tests --- .../features/sink/CloudMySqlRunTime.feature | 43 ----- .../sink/CloudMySqlRunTimeMacro.feature | 159 +++++++++++++++--- .../features/source/CloudMySqlRunTime.feature | 3 - .../source/CloudMySqlRunTimeMacro.feature | 39 ++--- .../java/io/cdap/plugin/CloudMySqlClient.java | 3 +- .../resources/pluginParameters.properties | 16 +- 6 files changed, 156 insertions(+), 107 deletions(-) diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTime.feature b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTime.feature index 96d1fd42b..fb6cc5374 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTime.feature +++ b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTime.feature @@ -58,49 +58,6 @@ Feature: CloudMySql Sink - Run time scenarios Then Verify the pipeline status is "Succeeded" Then Close the pipeline logs - @BQ_SOURCE_TEST @CLOUDMYSQL_SOURCE_TEST - Scenario: To verify data is getting transferred from BigQuery source to CloudMySql sink successfully when connection arguments are set - Given Open Datafusion Project to configure pipeline - When Expand Plugin group in the LHS plugins list: "Source" - When Select plugin: "BigQuery" from the plugins list as: "Source" - When Expand Plugin group in the LHS plugins list: "Sink" - When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" - Then Connect plugins: "BigQuery" and "CloudSQL MySQL" to establish connection - Then Navigate to the properties page of plugin: "BigQuery" - Then Replace input plugin property: "project" with value: "projectId" - Then Enter input plugin property: "datasetProject" with value: "projectId" - Then Enter input plugin property: "referenceName" with value: "BQReferenceName" - Then Enter input plugin property: "dataset" with value: "dataset" - Then Enter input plugin property: "table" with value: "bqTargetTable" - Then Click on the Get Schema button - Then Validate "BigQuery" plugin properties - Then Close the Plugin Properties page - Then Navigate to the properties page of plugin: "CloudSQL MySQL" - Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" - Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" - Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields - Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields - Then Enter input plugin property: "referenceName" with value: "RefName" - Then Enter input plugin property: "database" with value: "TestDatabase" - Then Replace input plugin property: "tableName" with value: "targetTable" - Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList" - Then Validate "CloudSQL MySQL" plugin properties - Then Close the Plugin Properties page - Then Save the pipeline - Then Preview and run the pipeline - Then Wait till pipeline preview is in running state - Then Open and capture pipeline preview logs - Then Verify the preview run status of pipeline in the logs is "succeeded" - Then Close the pipeline logs - Then Close the preview - Then Deploy the pipeline - Then Run the Pipeline in Runtime - Then Wait till pipeline is in running state - Then Open and capture logs - Then Verify the pipeline status is "Succeeded" - Then Close the pipeline logs - diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTimeMacro.feature b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTimeMacro.feature index a6100e8c8..97d537c70 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTimeMacro.feature +++ b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTimeMacro.feature @@ -15,6 +15,7 @@ Feature: CloudMySql Sink - Run time scenarios (macro) + @BQ_SOURCE_TEST @CLOUDMYSQL_TEST_TABLE Scenario: To verify data is getting transferred from BigQuery source to CloudMySql sink using macro arguments in connection section Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" @@ -24,34 +25,28 @@ Feature: CloudMySql Sink - Run time scenarios (macro) Then Connect plugins: "BigQuery" and "CloudSQL MySQL" to establish connection Then Navigate to the properties page of plugin: "BigQuery" Then Enter input plugin property: "referenceName" with value: "BQReferenceName" - Then Click on the Macro button of Property: "project" and set the value to: "projectId" - Then Click on the Macro button of Property: "datasetProject" and set the value to: "bqDatasetProjectId" - Then Click on the Macro button of Property: "dataset" and set the value to: "bqDataset" - Then Click on the Macro button of Property: "table" and set the value to: "bqTargetTable" - Then Click on the Get Schema button + And Replace input plugin property: "project" with value: "projectId" + And Enter input plugin property: "datasetProject" with value: "datasetprojectId" + And Enter input plugin property: "dataset" with value: "dataset" + And Enter input plugin property: "table" with value: "bqSourceTable" Then Validate "BigQuery" plugin properties Then Close the Plugin Properties page Then Navigate to the properties page of plugin: "CloudSQL MySQL" - Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudsql-mysql" + Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "driverName" Then Select radio button plugin property: "instanceType" with value: "public" Then Enter input plugin property: "connectionName" with value: "ConnectionName" - Then Click on the Macro button of Property: "user" and set the value to: "username" - Then Click on the Macro button of Property: "password" and set the value to: "password" + Then Click on the Macro button of Property: "user" and set the value to: "Username" + Then Click on the Macro button of Property: "password" and set the value to: "Password" Then Enter input plugin property: "referenceName" with value: "RefName" - Then Enter input plugin property: "database" with value: "TestDatabase" - Then Click on the Macro button of Property: "tableName" and set the value to: "mytable" + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" Then Validate "CloudSQL MySQL" plugin properties Then Close the Plugin Properties page Then Save the pipeline Then Preview and run the pipeline - Then Enter runtime argument value "projectId" for key "projectId" - Then Enter runtime argument value "bqDatasetId" for key "bqDatasetProjectId" - Then Enter runtime argument value "dataset" for key "bqDataset" - Then Enter runtime argument value "bqSourceTable" for key "bqTargetTable" - Then Enter runtime argument value "driver" for key "cloudsql-mysql" - Then Enter runtime argument value from environment variable "name" for key "username" - Then Enter runtime argument value from environment variable "pass" for key "password" - Then Enter runtime argument value "table" for key "mytable" + Then Enter runtime argument value "driver" for key "driverName" + Then Enter runtime argument value "username" for key "Username" + Then Enter runtime argument value "password" for key "Password" Then Run the preview of pipeline with runtime arguments Then Wait till pipeline preview is in running state Then Open and capture pipeline preview logs @@ -60,18 +55,128 @@ Feature: CloudMySql Sink - Run time scenarios (macro) Then Close the preview Then Deploy the pipeline Then Run the Pipeline in Runtime - Then Enter runtime argument value "projectId" for key "projectId" - Then Enter runtime argument value "bqDatasetId" for key "bqDatasetProjectId" - Then Enter runtime argument value "dataset" for key "bqDataset" - Then Enter runtime argument value "bqSourceTable" for key "bqTargetTable" - Then Enter runtime argument value "driver" for key "cloudsql-mysql" - Then Enter runtime argument value from environment variable "name" for key "username" - Then Enter runtime argument value from environment variable "pass" for key "password" - Then Click on the Macro button of Property: "dataset" and set the value to: "bqDataset" - Then Enter runtime argument value "table" for key "mytable" + Then Enter runtime argument value "driver" for key "driverName" + Then Enter runtime argument value "username" for key "Username" + Then Enter runtime argument value "password" for key "Password" Then Run the Pipeline in Runtime with runtime arguments Then Wait till pipeline is in running state Then Open and capture logs Then Verify the pipeline status is "Succeeded" Then Close the pipeline logs + @BQ_SOURCE_TEST @CLOUDMYSQL_TEST_TABLE + Scenario: To verify data is getting transferred from BigQuery source to CloudMySql sink using macro arguments in basic section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "CloudSQL MySQL" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + And Replace input plugin property: "project" with value: "projectId" + And Enter input plugin property: "datasetProject" with value: "datasetprojectId" + And Enter input plugin property: "dataset" with value: "dataset" + And Enter input plugin property: "table" with value: "bqSourceTable" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Click on the Macro button of Property: "tableName" and set the value to: "Tablename" + Then Validate "CloudSQL MySQL2" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "targetTable" for key "Tablename" + Then Run the preview of pipeline with runtime arguments + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "targetTable" for key "Tablename" + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + + @BQ_SOURCE_TEST @CLOUDMYSQL_TEST_TABLE + Scenario: Verify pipeline failure message in logs when user provides invalid Table Name of plugin with Macros + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "CloudSQL MySQL" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + And Replace input plugin property: "project" with value: "projectId" + And Enter input plugin property: "datasetProject" with value: "datasetprojectId" + And Enter input plugin property: "dataset" with value: "dataset" + And Enter input plugin property: "table" with value: "bqSourceTable" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Click on the Macro button of Property: "tableName" and set the value to: "invalidTablename" + Then Validate "CloudSQL MySQL2" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "invalidTablename" for key "invalidTablename" + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + And Verify the pipeline status is "Failed" + + @BQ_SOURCE_TEST @CLOUDMYSQL_TEST_TABLE + Scenario: Verify pipeline failure message in logs when user provides invalid credentials of plugin with Macros + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "CloudSQL MySQL" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + And Replace input plugin property: "project" with value: "projectId" + And Enter input plugin property: "datasetProject" with value: "datasetprojectId" + And Enter input plugin property: "dataset" with value: "dataset" + And Enter input plugin property: "table" with value: "bqSourceTable" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Click on the Macro button of Property: "user" and set the value to: "Username" + Then Click on the Macro button of Property: "password" and set the value to: "Password" + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "invalidUserName" for key "Username" + Then Enter runtime argument value "invalidPassword" for key "Password" + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + And Verify the pipeline status is "Failed" + diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTime.feature b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTime.feature index 4214abe5d..2d2176fa5 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTime.feature +++ b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTime.feature @@ -229,6 +229,3 @@ Feature: CloudMySql - Verify data transfer from CloudMySql source to BigQuery si Then Wait till pipeline is in running state Then Open and capture logs And Verify the pipeline status is "Failed" - - - diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTimeMacro.feature b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTimeMacro.feature index 516b59d96..ada6144bc 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTimeMacro.feature +++ b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTimeMacro.feature @@ -82,7 +82,7 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" - Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "database" with value: "DatabaseName" Then Click on the Macro button of Property: "importQuery" and set the value in textarea: "CloudMySqlImportQuery" Then Validate "CloudSQL MySQL" plugin properties Then Close the Plugin Properties page @@ -99,12 +99,10 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Close the Plugin Properties page Then Save the pipeline Then Preview and run the pipeline - Then Enter runtime argument value "insertQuery" for key "CloudMySqlImportQuery" + Then Enter runtime argument value "selectQuery" for key "CloudMySqlImportQuery" Then Run the preview of pipeline with runtime arguments Then Wait till pipeline preview is in running state - Then Open and capture pipeline preview logs - Then Verify the preview run status of pipeline in the logs is "succeeded" - Then Close the pipeline logs + Then Verify the preview of pipeline is "success" Then Close the preview Then Deploy the pipeline Then Run the Pipeline in Runtime @@ -121,8 +119,8 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument When Expand Plugin group in the LHS plugins list: "Source" When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" When Expand Plugin group in the LHS plugins list: "Sink" - When Select plugin: "BigQuery" from the plugins list as: "Sink" - Then Connect plugins: "CloudSQL MySQL" and "BigQuery" to establish connection + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL MySQL" and "CloudSQL MySQL2" to establish connection Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" @@ -134,7 +132,6 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Enter textarea plugin property: "importQuery" with value: "selectQuery" And Click on the Macro button of Property: "fetchSize" and set the value to: "fetchSize" And Click on the Macro button of Property: "splitBy" and set the value to: "SplitBy" - And Click on the Macro button of Property: "numSplits" and set the value to: "NumSplits" Then Validate "CloudSQL MySQL" plugin properties Then Close the Plugin Properties page Then Navigate to the properties page of plugin: "CloudSQL MySQL2" @@ -152,18 +149,14 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Preview and run the pipeline Then Enter runtime argument value "fetchSize" for key "fetchSize" Then Enter runtime argument value "splitBy" for key "SplitBy" - Then Enter runtime argument value "numSplits" for key "NumSplits" Then Run the preview of pipeline with runtime arguments Then Wait till pipeline preview is in running state - Then Open and capture pipeline preview logs - Then Verify the preview run status of pipeline in the logs is "succeeded" - Then Close the pipeline logs + Then Verify the preview of pipeline is "success" Then Close the preview Then Deploy the pipeline Then Run the Pipeline in Runtime Then Enter runtime argument value "fetchSize" for key "fetchSize" Then Enter runtime argument value "splitBy" for key "SplitBy" - Then Enter runtime argument value "numSplits" for key "NumSplits" Then Run the Pipeline in Runtime with runtime arguments Then Wait till pipeline is in running state Then Open and capture logs @@ -172,6 +165,7 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument @CLOUDMYSQL_SOURCE_TEST @BQ_SINK_TEST Scenario: To verify data is getting transferred from CloudMySql source to BigQuery sink successfully using macro arguments in connection section + Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" When Expand Plugin group in the LHS plugins list: "Sink" @@ -203,9 +197,7 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Enter runtime argument value "password" for key "Password" Then Run the preview of pipeline with runtime arguments Then Wait till pipeline preview is in running state - Then Open and capture pipeline preview logs - Then Verify the preview run status of pipeline in the logs is "succeeded" - Then Close the pipeline logs + Then Verify the preview of pipeline is "success" Then Close the preview Then Deploy the pipeline Then Run the Pipeline in Runtime @@ -233,7 +225,7 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" - Then Replace input plugin property: "database" with value: "databaseName" + Then Replace input plugin property: "database" with value: "DatabaseName" Then Click on the Macro button of Property: "importQuery" and set the value in textarea: "CloudMySqlImportQuery" Then Validate "CloudSQL MySQL" plugin properties Then Close the Plugin Properties page @@ -247,12 +239,10 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Close the Plugin Properties page Then Save the pipeline Then Preview and run the pipeline - Then Enter runtime argument value "insertQuery" for key "CloudMySqlImportQuery" + Then Enter runtime argument value "selectQuery" for key "CloudMySqlImportQuery" Then Run the preview of pipeline with runtime arguments Then Wait till pipeline preview is in running state - Then Open and capture pipeline preview logs - Then Verify the preview run status of pipeline in the logs is "succeeded" - Then Close the pipeline logs + Then Verify the preview of pipeline is "success" Then Close the preview Then Deploy the pipeline Then Run the Pipeline in Runtime @@ -282,7 +272,6 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Enter textarea plugin property: "importQuery" with value: "selectQuery" And Click on the Macro button of Property: "fetchSize" and set the value to: "fetchSize" And Click on the Macro button of Property: "splitBy" and set the value to: "SplitBy" - And Click on the Macro button of Property: "numSplits" and set the value to: "NumSplits" Then Validate "CloudSQL MySQL" plugin properties Then Close the Plugin Properties page Then Navigate to the properties page of plugin: "BigQuery" @@ -297,18 +286,14 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Preview and run the pipeline Then Enter runtime argument value "fetchSize" for key "fetchSize" Then Enter runtime argument value "splitBy" for key "SplitBy" - Then Enter runtime argument value "numSplits" for key "NumSplits" Then Run the preview of pipeline with runtime arguments Then Wait till pipeline preview is in running state - Then Open and capture pipeline preview logs - Then Verify the preview run status of pipeline in the logs is "succeeded" - Then Close the pipeline logs + Then Verify the preview of pipeline is "success" Then Close the preview Then Deploy the pipeline Then Run the Pipeline in Runtime Then Enter runtime argument value "fetchSize" for key "fetchSize" Then Enter runtime argument value "splitBy" for key "SplitBy" - Then Enter runtime argument value "numSplits" for key "NumSplits" Then Run the Pipeline in Runtime with runtime arguments Then Wait till pipeline is in running state Then Open and capture logs diff --git a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySqlClient.java b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySqlClient.java index 81027d43d..bccbe983b 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySqlClient.java +++ b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySqlClient.java @@ -31,7 +31,6 @@ public static Connection getCloudMysqlConnection() throws SQLException, ClassNot String Password = "v@123"; String jdbcUrl = String.format("jdbc:mysql:///%s?cloudSqlInstance=%s&socketFactory=com.google.cloud.sql.mysql.SocketFactory&user=%s&password=%s", databaseName, instanceConnectionName, Username, Password); Connection conn = DriverManager.getConnection(jdbcUrl); - //System.out.println("connected to database"); return conn; } @@ -169,7 +168,7 @@ public static void createTargetCloudMysqlTable(String targetTable) throws SQLExc ClassNotFoundException { try (Connection connect = getCloudMysqlConnection(); Statement statement = connect.createStatement()) { - String datatypesColumns = PluginPropertyUtils.pluginProp("SqlServerDatatypesColumns"); + String datatypesColumns = PluginPropertyUtils.pluginProp("CloudMySqlDatatypesColumns"); String createTargetTableQuery = "CREATE TABLE " + targetTable + " " + datatypesColumns; statement.executeUpdate(createTargetTableQuery); } diff --git a/cloudsql-mysql-plugin/src/e2e-test/resources/pluginParameters.properties b/cloudsql-mysql-plugin/src/e2e-test/resources/pluginParameters.properties index 0e43a9e82..116a58500 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/resources/pluginParameters.properties +++ b/cloudsql-mysql-plugin/src/e2e-test/resources/pluginParameters.properties @@ -10,12 +10,15 @@ invalidRef=invalidRef&^*&&* zeroValue=0 ConnectionName=cdf-athena:us-central1:sql-automation-test-instance zeroSplits=isha -insertQuery= select * from mytable -CloudMySqlImportQuery=select * from mytable +insertQuery= select * from mytable; +CloudMySqlImportQuery=select * from mytable; +Tablename=mytable fetchSize=1000 NumSplits=1 SplitBy=ID +splitBy=ID projectId=cdf-athena +datasetprojectId=cdf-athena BQReferenceName=reference bqTargetTable=mytable bqDatasetId=1234 @@ -25,6 +28,7 @@ driver=cloudsql-mysql table=myTable name=NAME pass=PASS +invalidTablename=table123 invalidUserName=testUser invalidPassword=testPassword invalidTable=data @@ -44,7 +48,7 @@ outputDatatypesSchema2=[{"key":"ID","value":"string"},{"key":"COL1","value":"str {"key":"COL6","value":"bytes"}] outputDatatypesSchema3=[{"key":"ID","value":"string"},{"key":"COL1","value":"bytes"}] outputDatatypesSchema4=[{"key":"ID","value":"string"},{"key":"COL1","value":"string"}] -splitBy=column name + invalidBoundingQuery=SELECT MIN(id),MAX(id) FROM table outputDatatypesSchema1=[{"key":"COL23","value":"double"},{"key":"COL28","value":"timestamp"},\ {"key":"COL29","value":"timestamp"},{"key":"COL30","value":"string"},{"key":"COL31","value":"string"},\ @@ -78,8 +82,10 @@ datatypesSchema=[{"key":"ID","value":"string"},{"key":"COL1","value":"boolean"}, bqOutputMultipleDatatypesSchema= [{"key":"col1","value":"bytes"},{"key":"col2","value":"string"},\ {"key":"col3","value":"date"},{"key":"col4","value":"double"},{"key":"col6","value":"timestamp"},\ {"key":"col8","value":"boolean"},{"key":"col9","value":"long"},{"key":"col10","value":"time"}] + CloudMySqlDatatypesColumns=(COL1 VARBINARY(100) , COL2 VARCHAR(100), COL3 DATE, COL4 DOUBLE,\ COL6 TIMESTAMP, COL8 BIT, COL9 BIGINT, COL10 TIME) + #bq queries file path -CreateBQTableQueryFile=testData/BigQuery/BigQueryCreateTableQuery.txt -InsertBQDataQueryFile=testData/BigQuery/BigQueryInsertDataQuery.txt +CreateBQTableQueryFile=BigQuery/BigQueryCreateTableQuery.txt +InsertBQDataQueryFile=BigQuery/BigQueryInsertDataQuery.txt From b6c555b5f19974d2b06ebeb8708160feaa988973 Mon Sep 17 00:00:00 2001 From: priyabhatnagar Date: Fri, 26 May 2023 13:52:41 +0530 Subject: [PATCH 7/7] bq validation --- .../sink/CloudMySqlDesignTime.feature | 18 +- .../CloudMySqlDesignTimeValidation.feature | 43 ++- .../CloudMySqlDesignTimeWithMacro.feature | 23 +- .../features/sink/CloudMySqlRunTime.feature | 96 +++++- .../sink/CloudMySqlRunTimeMacro.feature | 65 +++- .../source/CloudMySqlDesignTime.feature | 1 + .../CloudMySqlDesignTimeVaidation.feature | 20 +- .../CloudMySqlDesignTimeWithMacro.feature | 13 +- .../features/source/CloudMySqlRunTime.feature | 42 +-- .../source/CloudMySqlRunTimeMacro.feature | 30 +- .../cdap/plugin/CloudMySql/BQValidation.java | 320 +++++++++++------- .../plugin/CloudMySql/runners/TestRunner.java | 7 +- .../CloudMySql/runners/package-info.java | 21 ++ .../CloudMySql/stepsdesign/CloudMySql.java | 5 - .../CloudMySql/stepsdesign/CloudMysql.java | 66 ++++ .../CloudMySql/stepsdesign/package-info.java | 21 ++ .../java/io/cdap/plugin/CloudMySqlClient.java | 292 +++++++--------- .../common/stepsdesign/TestSetupHooks.java | 235 +++++++------ .../BigQuery/BigQueryCreateTableQuery.txt | 4 +- .../BigQuery/BigQueryInsertDataQuery.txt | 6 +- .../resources/errorMessage.properties | 5 + .../resources/pluginParameters.properties | 25 +- 22 files changed, 854 insertions(+), 504 deletions(-) create mode 100644 cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/runners/package-info.java delete mode 100644 cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/stepsdesign/CloudMySql.java create mode 100644 cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/stepsdesign/CloudMysql.java create mode 100644 cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/stepsdesign/package-info.java diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTime.feature b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTime.feature index ad17579d3..cda75f202 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTime.feature +++ b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTime.feature @@ -46,5 +46,21 @@ Feature: CloudMySql sink- Verify CloudMySql sink plugin design time scenarios Then Validate "CloudSQL MySQL" plugin properties Then Close the Plugin Properties page - + Scenario: To verify CloudMySql source plugin validation setting up connection arguments + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList" + Then Enter input plugin property: "referenceName" with value: "referencename" + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "insertQuery" + Then Click on the Get Schema button + Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTimeValidation.feature b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTimeValidation.feature index ea6f744e2..7039062d4 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTimeValidation.feature +++ b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTimeValidation.feature @@ -1,4 +1,19 @@ -Feature: CloudMySql sink- Verify ,Mysql sink plugin design time validation scenarios +# Copyright © 2023 Cask Data, Inc. +## +## Licensed under the Apache License, Version 2.0 (the "License"); you may not +## use this file except in compliance with the License. You may obtain a copy of +## the License at +## +## http://www.apache.org/licenses/LICENSE-2.0 +## +## Unless required by applicable law or agreed to in writing, software +## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +## License for the specific language governing permissions and limitations under +# the License.. + +@CloudMySql +Feature: CloudMySql sink- Verify CloudsqlMysql sink plugin design time validation scenarios Scenario: To verify CloudMySql sink plugin validation error message with invalid database Given Open Datafusion Project to configure pipeline @@ -10,15 +25,15 @@ Feature: CloudMySql sink- Verify ,Mysql sink plugin design time validation scena Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" Then Enter input plugin property: "database" with value: "TestDatabase" Then Enter textarea plugin property: "importQuery" with value: "insertQuery" -# Then Click on the Get Schema button -# Then Verify the Output Schema matches the Expected Schema: "outputSchema" -# Then Validate "CloudSQL MySQL" plugin properties + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "outputSchema" + Then Validate "CloudSQL MySQL" plugin properties Then Close the Plugin Properties page Then Navigate to the properties page of plugin: "CloudSQL MySQL2" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" @@ -30,7 +45,7 @@ Feature: CloudMySql sink- Verify ,Mysql sink plugin design time validation scena Then Enter input plugin property: "database" with value: "invalidDatabaseName" Then Enter input plugin property: "tableName" with value: "mytable" Then Click on the Validate button -# Then Verify that the Plugin is displaying an error message: "errorMessageInvalidSinkDatabase" on the header + Then Verify that the Plugin is displaying an error message: "errorMessageInvalidSinkDatabase" on the header Scenario: To verify CloudMySql sink plugin validation error message with invalid tablename Given Open Datafusion Project to configure pipeline @@ -42,26 +57,26 @@ Feature: CloudMySql sink- Verify ,Mysql sink plugin design time validation scena Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" Then Enter input plugin property: "database" with value: "TestDatabase" Then Enter textarea plugin property: "importQuery" with value: "insertQuery" -# Then Click on the Get Schema button -# Then Verify the Output Schema matches the Expected Schema: "outputSchema" -# Then Validate "CloudSQL MySQL" plugin properties + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "outputSchema" + Then Validate "CloudSQL MySQL" plugin properties Then Close the Plugin Properties page Then Navigate to the properties page of plugin: "CloudSQL MySQL2" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "invalidRefName" Then Enter input plugin property: "database" with value: "TestDatabase" Then Enter input plugin property: "tableName" with value: "Invalidtable" Then Click on the Validate button -# Then Verify that the Plugin Property: "table" is displaying an in-line error message: "errorMessageInvalidTableName" + Then Verify that the Plugin Property: "table" is displaying an in-line error message: "errorMessageInvalidTableName" Scenario: To verify CloudMySql sink plugin validation error message with invalid reference Name Given Open Datafusion Project to configure pipeline @@ -70,7 +85,7 @@ Feature: CloudMySql sink- Verify ,Mysql sink plugin design time validation scena Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "invalidRef" @@ -87,7 +102,7 @@ Feature: CloudMySql sink- Verify ,Mysql sink plugin design time validation scena Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList" Then Enter input plugin property: "referenceName" with value: "RefName" diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTimeWithMacro.feature b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTimeWithMacro.feature index 9ffda6fc4..a2de617c0 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTimeWithMacro.feature +++ b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTimeWithMacro.feature @@ -1,3 +1,18 @@ +# Copyright © 2023 Cask Data, Inc. +## +## Licensed under the Apache License, Version 2.0 (the "License"); you may not +## use this file except in compliance with the License. You may obtain a copy of +## the License at +## +## http://www.apache.org/licenses/LICENSE-2.0 +## +## Unless required by applicable law or agreed to in writing, software +## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +## License for the specific language governing permissions and limitations under +# the License.. + +@CloudMySql Feature: CloudMySql sink- Verify CloudMySql sink plugin design time macro scenarios Scenario: To verify CloudMySql sink plugin validation with macro enabled fields for connection section @@ -6,7 +21,7 @@ Feature: CloudMySql sink- Verify CloudMySql sink plugin design time macro scenar When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudsql-mysql" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Select radio button plugin property: "instanceType" with value: "public" Then Click on the Macro button of Property: "user" and set the value to: "username" Then Click on the Macro button of Property: "password" and set the value to: "password" @@ -14,7 +29,7 @@ Feature: CloudMySql sink- Verify CloudMySql sink plugin design time macro scenar Then Enter input plugin property: "referenceName" with value: "sourceRef" Then Replace input plugin property: "database" with value: "TestDatabase" Then Click on the Validate button -# Then Validate "CloudSQL MySQL" plugin properties + Then Validate "CloudSQL MySQL" plugin properties Then Close the Plugin Properties page Scenario: To verify cloudsql sink plugin validation with macro enabled fields for basic section @@ -23,7 +38,7 @@ Feature: CloudMySql sink- Verify CloudMySql sink plugin design time macro scenar When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Select radio button plugin property: "instanceType" with value: "public" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields @@ -31,5 +46,5 @@ Feature: CloudMySql sink- Verify CloudMySql sink plugin design time macro scenar Then Replace input plugin property: "database" with value: "TestDatabase" Then Click on the Macro button of Property: "tableName" and set the value to: "mytable" Then Click on the Validate button -# Then Validate "CloudSQL MySQL" plugin properties + Then Validate "CloudSQL MySQL" plugin properties Then Close the Plugin Properties page \ No newline at end of file diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTime.feature b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTime.feature index fb6cc5374..450686e5f 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTime.feature +++ b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTime.feature @@ -15,7 +15,7 @@ @CloudMySql Feature: CloudMySql Sink - Run time scenarios - @BQ_SOURCE_TEST @CLOUDMYSQL_TEST_TABLE + @BQ_SOURCE_TEST @CLOUDMYSQL_TEST_TABLE @PLUGIN-1526 Scenario: To verify data is getting transferred from BigQuery source to CloudMySql sink successfully Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" @@ -24,7 +24,7 @@ Feature: CloudMySql Sink - Run time scenarios And Enter input plugin property: "referenceName" with value: "Reference" And Replace input plugin property: "project" with value: "projectId" And Enter input plugin property: "datasetProject" with value: "datasetprojectId" - And Enter input plugin property: "dataset" with value: "dataset" + And Replace input plugin property: "dataset" with value: "dataset" And Enter input plugin property: "table" with value: "bqSourceTable" Then Click on the Get Schema button Then Verify the Output Schema matches the Expected Schema: "bqOutputMultipleDatatypesSchema" @@ -36,7 +36,7 @@ Feature: CloudMySql Sink - Run time scenarios Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" @@ -57,7 +57,97 @@ Feature: CloudMySql Sink - Run time scenarios Then Open and capture logs Then Verify the pipeline status is "Succeeded" Then Close the pipeline logs + Then Validate the values of records transferred to target CloudSQLMySql table is equal to the values from source BigQuery table + @BQ_SOURCE_TEST @CLOUDMYSQL_TEST_TABLE @PLUGIN-1526 + Scenario: To verify data is getting transferred from BigQuery source to CloudMySql sink successfully when connection arguments are set + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + And Navigate to the properties page of plugin: "BigQuery" + And Enter input plugin property: "referenceName" with value: "Reference" + And Replace input plugin property: "project" with value: "projectId" + And Enter input plugin property: "datasetProject" with value: "datasetprojectId" + And Replace input plugin property: "dataset" with value: "dataset" + And Enter input plugin property: "table" with value: "bqSourceTable" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "bqOutputMultipleDatatypesSchema" + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "CloudSQL MySQL" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList" + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate the values of records transferred to target CloudSQLMySql table is equal to the values from source BigQuery table + + @BQ_SOURCE_TEST @CLOUDMYSQL_TEST_TABLE @PLUGIN-1526 + Scenario: To verify data is getting transferred from BigQuery source to CloudSMySQL sink with Advanced property Connection timeout + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + And Navigate to the properties page of plugin: "BigQuery" + And Enter input plugin property: "referenceName" with value: "Reference" + And Replace input plugin property: "project" with value: "projectId" + And Enter input plugin property: "datasetProject" with value: "datasetprojectId" + And Replace input plugin property: "dataset" with value: "dataset" + And Enter input plugin property: "table" with value: "bqSourceTable" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "bqOutputMultipleDatatypesSchema" + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "CloudSQL MySQL" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "connectionTimeout" with value: "connectionTimeout" + Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate the values of records transferred to target CloudSQLMySql table is equal to the values from source BigQuery table diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTimeMacro.feature b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTimeMacro.feature index 97d537c70..be729b396 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTimeMacro.feature +++ b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTimeMacro.feature @@ -15,7 +15,7 @@ Feature: CloudMySql Sink - Run time scenarios (macro) - @BQ_SOURCE_TEST @CLOUDMYSQL_TEST_TABLE + @BQ_SOURCE_TEST @CLOUDMYSQL_TEST_TABLE @PLUGIN-1526 Scenario: To verify data is getting transferred from BigQuery source to CloudMySql sink using macro arguments in connection section Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" @@ -34,7 +34,7 @@ Feature: CloudMySql Sink - Run time scenarios (macro) Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "driverName" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Click on the Macro button of Property: "user" and set the value to: "Username" Then Click on the Macro button of Property: "password" and set the value to: "Password" Then Enter input plugin property: "referenceName" with value: "RefName" @@ -63,8 +63,9 @@ Feature: CloudMySql Sink - Run time scenarios (macro) Then Open and capture logs Then Verify the pipeline status is "Succeeded" Then Close the pipeline logs + Then Validate the values of records transferred to target CloudSQLMySql table is equal to the values from source BigQuery table - @BQ_SOURCE_TEST @CLOUDMYSQL_TEST_TABLE + @BQ_SOURCE_TEST @CLOUDMYSQL_TEST_TABLE @PLUGIN-1526 Scenario: To verify data is getting transferred from BigQuery source to CloudMySql sink using macro arguments in basic section Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" @@ -82,7 +83,7 @@ Feature: CloudMySql Sink - Run time scenarios (macro) Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" @@ -107,6 +108,7 @@ Feature: CloudMySql Sink - Run time scenarios (macro) Then Open and capture logs Then Verify the pipeline status is "Succeeded" Then Close the pipeline logs + Then Validate the values of records transferred to target CloudSQLMySql table is equal to the values from source BigQuery table @BQ_SOURCE_TEST @CLOUDMYSQL_TEST_TABLE Scenario: Verify pipeline failure message in logs when user provides invalid Table Name of plugin with Macros @@ -126,7 +128,7 @@ Feature: CloudMySql Sink - Run time scenarios (macro) Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" @@ -142,6 +144,9 @@ Feature: CloudMySql Sink - Run time scenarios (macro) Then Wait till pipeline is in running state Then Open and capture logs And Verify the pipeline status is "Failed" + Then Open Pipeline logs and verify Log entries having below listed Level and Message: + | Level | Message | + | ERROR | errorLogsMessageInvalidTableName | @BQ_SOURCE_TEST @CLOUDMYSQL_TEST_TABLE Scenario: Verify pipeline failure message in logs when user provides invalid credentials of plugin with Macros @@ -162,7 +167,7 @@ Feature: CloudMySql Sink - Run time scenarios (macro) Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Click on the Macro button of Property: "user" and set the value to: "Username" Then Click on the Macro button of Property: "password" and set the value to: "Password" Then Enter input plugin property: "referenceName" with value: "RefName" @@ -179,4 +184,52 @@ Feature: CloudMySql Sink - Run time scenarios (macro) Then Wait till pipeline is in running state Then Open and capture logs And Verify the pipeline status is "Failed" + Then Open Pipeline logs and verify Log entries having below listed Level and Message: + | Level | Message | + | ERROR | errorLogsMessageInvalidCredentials | + @BQ_SOURCE_TEST @CLOUDMYSQL_TEST_TABLE @PLUGIN-1526 + Scenario: To verify data is getting transferred from BigQuery source to CloudMySql sink using macro arguments in advance section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "CloudSQL MySQL" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + And Replace input plugin property: "project" with value: "projectId" + And Enter input plugin property: "datasetProject" with value: "datasetprojectId" + And Enter input plugin property: "dataset" with value: "dataset" + And Enter input plugin property: "table" with value: "bqSourceTable" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Click on the Macro button of Property: "connectionTimeout" and set the value to: "ConnectionTimeout" + Then Validate "CloudSQL MySQL2" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "connectionTimeout" for key "ConnectionTimeout" + Then Run the preview of pipeline with runtime arguments + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "connectionTimeout" for key "ConnectionTimeout" + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate the values of records transferred to target CloudSQLMySql table is equal to the values from source BigQuery table diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTime.feature b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTime.feature index 6041602f2..0822fc381 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTime.feature +++ b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTime.feature @@ -23,6 +23,7 @@ Feature: CloudMySql source- Verify CloudMySql source plugin design time scenario Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Select radio button plugin property: "instanceType" with value: "public" Then Enter input plugin property: "connectionName" with value: "ConnectionName" Then Enter input plugin property: "referenceName" with value: "RefName" diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTimeVaidation.feature b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTimeVaidation.feature index e1494327b..d21495919 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTimeVaidation.feature +++ b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTimeVaidation.feature @@ -21,7 +21,7 @@ Feature: CloudMySql source- Verify CloudMySql source plugin design time validati Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" @@ -37,7 +37,7 @@ Feature: CloudMySql source- Verify CloudMySql source plugin design time validati Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "sourceRef" @@ -54,7 +54,7 @@ Feature: CloudMySql source- Verify CloudMySql source plugin design time validati Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "invalidRef" @@ -70,7 +70,7 @@ Feature: CloudMySql source- Verify CloudMySql source plugin design time validati Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "sourceRef" Then Replace input plugin property: "database" with value: "DatabaseName" @@ -85,7 +85,7 @@ Feature: CloudMySql source- Verify CloudMySql source plugin design time validati Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "sourceRef" Then Replace input plugin property: "database" with value: "DatabaseName" @@ -100,7 +100,7 @@ Feature: CloudMySql source- Verify CloudMySql source plugin design time validati Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "sourceRef" @@ -117,7 +117,7 @@ Feature: CloudMySql source- Verify CloudMySql source plugin design time validati Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "sourceRef" @@ -135,7 +135,7 @@ Feature: CloudMySql source- Verify CloudMySql source plugin design time validati Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "sourceRef" @@ -152,7 +152,7 @@ Feature: CloudMySql source- Verify CloudMySql source plugin design time validati Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "sourceRef" @@ -169,7 +169,7 @@ Feature: CloudMySql source- Verify CloudMySql source plugin design time validati Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "sourceRef" diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTimeWithMacro.feature b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTimeWithMacro.feature index 216a14c28..a24e45227 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTimeWithMacro.feature +++ b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTimeWithMacro.feature @@ -15,38 +15,37 @@ @CloudMySql Feature: CloudMySql source- Verify CloudMySql source plugin design time macro scenarios + @CLOUDMYSQL_SOURCE_TEST Scenario: To verify CloudMySql source plugin validation with macro enabled fields for connection section Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "DriverName" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Select radio button plugin property: "instanceType" with value: "public" Then Click on the Macro button of Property: "user" and set the value to: "username" Then Click on the Macro button of Property: "password" and set the value to: "password" Then Click on the Macro button of Property: "connectionArguments" and set the value to: "connectionArgumentsList" Then Enter input plugin property: "referenceName" with value: "sourceRef" Then Replace input plugin property: "database" with value: "DatabaseName" - Then Enter textarea plugin property: "importQuery" with value: "insertQuery" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" Then Click on the Validate button Then Close the Plugin Properties page + @CLOUDMYSQL_SOURCE_TEST Scenario: To verify cloudsql source plugin validation with macro enabled fields for basic section Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Select radio button plugin property: "instanceType" with value: "public" Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "sourceRef" Then Replace input plugin property: "database" with value: "DatabaseName" - Then Click on the Macro button of Property: "importQuery" and set the value in textarea: "CloudMySqlImportQuery" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" Then Click on the Validate button Then Close the Plugin Properties page - - - diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTime.feature b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTime.feature index ad700116c..5700f2900 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTime.feature +++ b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTime.feature @@ -17,7 +17,7 @@ Feature: CloudMySql Source - Run time scenarios Feature: CloudMySql - Verify data transfer from CloudMySql source to BigQuery sink - @CLOUDMYSQL_SOURCE_TEST + @CLOUDMYSQL_SOURCE_TEST @BQ_SINK_TEST Scenario: To verify data is getting transferred from CloudMySql source to BigQuery sink successfully Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" @@ -28,11 +28,9 @@ Feature: CloudMySql - Verify data transfer from CloudMySql source to BigQuery si Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Enter input plugin property: "user" with value: "username" Then Enter input plugin property: "password" with value: "password" -# Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields -# Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" Then Enter input plugin property: "database" with value: "DatabaseName" Then Enter textarea plugin property: "importQuery" with value: "selectQuery" @@ -50,10 +48,7 @@ Feature: CloudMySql - Verify data transfer from CloudMySql source to BigQuery si Then Close the Plugin Properties page Then Save the pipeline Then Preview and run the pipeline - Then Wait till pipeline preview is in running state - Then Open and capture pipeline preview logs - Then Verify the preview run status of pipeline in the logs is "succeeded" - Then Close the pipeline logs + Then Verify the preview of pipeline is "success" Then Close the preview Then Deploy the pipeline Then Run the Pipeline in Runtime @@ -61,8 +56,7 @@ Feature: CloudMySql - Verify data transfer from CloudMySql source to BigQuery si Then Open and capture logs Then Verify the pipeline status is "Succeeded" Then Close the pipeline logs - #Then Validate the values of records transferred to target Big Query table is equal to the values from source table - + Then Validate the values of records transferred to target Big Query table is equal to the values from source table @CLOUDMYSQL_SOURCE_DATATYPES_TEST @BQ_SINK_TEST Scenario: To verify data is getting transferred from CloudMySql source to BigQuery sink successfully with all datatypes Given Open Datafusion Project to configure pipeline @@ -74,7 +68,7 @@ Feature: CloudMySql - Verify data transfer from CloudMySql source to BigQuery si Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" @@ -104,9 +98,10 @@ Feature: CloudMySql - Verify data transfer from CloudMySql source to BigQuery si Then Open and capture logs Then Verify the pipeline status is "Succeeded" Then Close the pipeline logs - #Then Validate the values of records transferred to target Big Query table is equal to the values from source table + Then Validate the values of records transferred to target Big Query table is equal to the values from source table - @CLOUDMYSQL_SOURCE_DATATYPES_TEST + + @CLOUDMYSQL_SOURCE_DATATYPES_TEST @CLOUDMYSQL_SINK_TEST @PLUGIN-20670 Scenario: To verify data is getting transferred from CloudMySql source to CloudMySql sink successfully Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" @@ -117,20 +112,20 @@ Feature: CloudMySql - Verify data transfer from CloudMySql source to BigQuery si Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" Then Enter input plugin property: "database" with value: "DatabaseName" Then Enter textarea plugin property: "importQuery" with value: "selectQuery" Then Click on the Get Schema button -# Then Verify the Output Schema matches the Expected Schema: "OutputSchema" + Then Verify the Output Schema matches the Expected Schema: "datatypesSchema" Then Validate "CloudSQL MySQL" plugin properties Then Close the Plugin Properties page Then Navigate to the properties page of plugin: "CloudSQL MySQL2" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" @@ -147,8 +142,9 @@ Feature: CloudMySql - Verify data transfer from CloudMySql source to BigQuery si Then Wait till pipeline is in running state Then Open and capture logs Then Verify the pipeline status is "Succeeded" + Then Validate the values of records transferred to target table is equal to the values from source table - @CLOUDMYSQL_SOURCE_DATATYPES_TEST + @CLOUDMYSQL_SOURCE_DATATYPES_TEST @CLOUDMYSQL_SINK_TEST @PLUGIN-20670 Scenario: To verify data is getting transferred from CloudMySql source to CloudMySql successfully when connection arguments are set Given Open Datafusion Project to configure pipeline When Expand Plugin group in the LHS plugins list: "Source" @@ -159,7 +155,7 @@ Feature: CloudMySql - Verify data transfer from CloudMySql source to BigQuery si Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList" @@ -172,7 +168,7 @@ Feature: CloudMySql - Verify data transfer from CloudMySql source to BigQuery si Then Navigate to the properties page of plugin: "CloudSQL MySQL2" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" @@ -189,6 +185,7 @@ Feature: CloudMySql - Verify data transfer from CloudMySql source to BigQuery si Then Wait till pipeline is in running state Then Open and capture logs Then Verify the pipeline status is "Succeeded" + Then Validate the values of records transferred to target table is equal to the values from source table @CLOUDMYSQL_SOURCE_DATATYPES_TEST Scenario: Verify user should not be able to deploy and run the pipeline when plugin is configured with invalid bounding query @@ -201,7 +198,7 @@ Feature: CloudMySql - Verify data transfer from CloudMySql source to BigQuery si Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" @@ -217,7 +214,7 @@ Feature: CloudMySql - Verify data transfer from CloudMySql source to BigQuery si Then Navigate to the properties page of plugin: "CloudSQL MySQL2" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" @@ -231,3 +228,6 @@ Feature: CloudMySql - Verify data transfer from CloudMySql source to BigQuery si Then Wait till pipeline is in running state Then Open and capture logs And Verify the pipeline status is "Failed" + Then Open Pipeline logs and verify Log entries having below listed Level and Message: + | Level | Message | + | ERROR | errorLogsMessageInvalidBoundingQuery | diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTimeMacro.feature b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTimeMacro.feature index ada6144bc..69d29c030 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTimeMacro.feature +++ b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTimeMacro.feature @@ -26,7 +26,7 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "driverName" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Click on the Macro button of Property: "user" and set the value to: "Username" Then Click on the Macro button of Property: "password" and set the value to: "Password" Then Enter input plugin property: "referenceName" with value: "RefName" @@ -37,7 +37,7 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Navigate to the properties page of plugin: "CloudSQL MySQL2" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" @@ -66,6 +66,7 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Open and capture logs Then Verify the pipeline status is "Succeeded" Then Close the pipeline logs + Then Validate the values of records transferred to target table is equal to the values from source table @CLOUDMYSQL_SOURCE_TEST Scenario: To verify data is getting transferred from CloudMySql to CloudMySql successfully using macro arguments in basic section @@ -78,7 +79,7 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" @@ -89,7 +90,7 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Navigate to the properties page of plugin: "CloudSQL MySQL2" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" @@ -112,6 +113,7 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Open and capture logs Then Verify the pipeline status is "Succeeded" Then Close the pipeline logs + Then Validate the values of records transferred to target table is equal to the values from source table @CLOUDMYSQL_SOURCE_TEST Scenario: To verify data is getting transferred from CloudMySql to CloudMySql successfully using macro arguments in advance section @@ -124,7 +126,7 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" @@ -137,7 +139,7 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Navigate to the properties page of plugin: "CloudSQL MySQL2" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" @@ -148,7 +150,7 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Save the pipeline Then Preview and run the pipeline Then Enter runtime argument value "fetchSize" for key "fetchSize" - Then Enter runtime argument value "splitBy" for key "SplitBy" + Then Enter runtime argument value "splitby" for key "SplitBy" Then Run the preview of pipeline with runtime arguments Then Wait till pipeline preview is in running state Then Verify the preview of pipeline is "success" @@ -156,12 +158,13 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Deploy the pipeline Then Run the Pipeline in Runtime Then Enter runtime argument value "fetchSize" for key "fetchSize" - Then Enter runtime argument value "splitBy" for key "SplitBy" + Then Enter runtime argument value "splitby" for key "SplitBy" Then Run the Pipeline in Runtime with runtime arguments Then Wait till pipeline is in running state Then Open and capture logs Then Verify the pipeline status is "Succeeded" Then Close the pipeline logs + Then Validate the values of records transferred to target table is equal to the values from source table @CLOUDMYSQL_SOURCE_TEST @BQ_SINK_TEST Scenario: To verify data is getting transferred from CloudMySql source to BigQuery sink successfully using macro arguments in connection section @@ -174,7 +177,7 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "driverName" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Click on the Macro button of Property: "user" and set the value to: "Username" Then Click on the Macro button of Property: "password" and set the value to: "Password" Then Enter input plugin property: "referenceName" with value: "RefName" @@ -209,6 +212,7 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Open and capture logs Then Verify the pipeline status is "Succeeded" Then Close the pipeline logs + Then Validate the values of records transferred to target Big Query table is equal to the values from source table @CLOUDMYSQL_SOURCE_TEST @BQ_SINK_TEST Scenario: To verify data is getting transferred from CloudMySql source to BigQuery sink successfully using macro arguments in basic section @@ -221,7 +225,7 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" @@ -252,6 +256,8 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Open and capture logs Then Verify the pipeline status is "Succeeded" Then Close the pipeline logs + Then Validate the values of records transferred to target Big Query table is equal to the values from source table + @CLOUDMYSQL_SOURCE_TEST @BQ_SINK_TEST Scenario: To verify data is getting transferred from CloudMySql source to BigQuery sink successfully using macro arguments in advance section @@ -264,7 +270,7 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Navigate to the properties page of plugin: "CloudSQL MySQL" Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "cloudsql-mysql" Then Select radio button plugin property: "instanceType" with value: "public" - Then Enter input plugin property: "connectionName" with value: "ConnectionName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields Then Enter input plugin property: "referenceName" with value: "RefName" @@ -299,3 +305,5 @@ Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro argument Then Open and capture logs Then Verify the pipeline status is "Succeeded" Then Close the pipeline logs + Then Validate the values of records transferred to target Big Query table is equal to the values from source table + diff --git a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/BQValidation.java b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/BQValidation.java index 850ea08a1..b4e3311eb 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/BQValidation.java +++ b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/BQValidation.java @@ -6,149 +6,219 @@ import io.cdap.e2e.utils.BigQueryClient; import io.cdap.e2e.utils.PluginPropertyUtils; import io.cdap.plugin.CloudMySqlClient; +import io.cdap.plugin.common.stepsdesign.TestSetupHooks; import org.junit.Assert; +import org.junit.Test; import java.io.IOException; import java.sql.*; +import java.sql.Date; import java.text.ParseException; import java.text.SimpleDateFormat; -import java.util.ArrayList; -import java.util.Base64; -import java.util.Date; -import java.util.List; +import java.time.LocalDateTime; +import java.time.format.DateTimeFormatter; +import java.util.*; +import org.apache.spark.sql.types.Decimal; /** - * BQValidation + * BQValidation */ public class BQValidation { - public static void main(String[] args) { -// validateBQAndDBRecordValues(String schema, String sourceTable, String targetTable) - } - /** - * Extracts entire data from source and target tables. - * @param sourceTable table at the source side - * @param targetTable table at the sink side - * @return true if the values in source and target side are equal - */ - - public static boolean validateBQAndDBRecordValues(String schema, String sourceTable, String targetTable) - throws SQLException, ClassNotFoundException, ParseException, IOException, InterruptedException { - List jsonResponse = new ArrayList<>(); - List bigQueryRows = new ArrayList<>(); - getBigQueryTableData(targetTable, bigQueryRows); - for (Object rows : bigQueryRows) { - JsonObject json = new Gson().fromJson(String.valueOf(rows), JsonObject.class); - jsonResponse.add(json); - } - String getSourceQuery = "SELECT * FROM " + schema + "." + sourceTable; - try (Connection connect = CloudMySqlClient.getCloudMysqlConnection()) { - connect.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT); - Statement statement1 = connect.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE, - ResultSet.HOLD_CURSORS_OVER_COMMIT); - - ResultSet rsSource = statement1.executeQuery(getSourceQuery); - return compareResultSetData(rsSource, jsonResponse); - } + public static List bigQueryResponse = new ArrayList<>(); + public static List bigQueryRows = new ArrayList<>(); + public static Gson gson = new Gson(); + + /** + * Extracts entire data from source and target tables. + * + * @param sourceTable table at the source side + * @param targetTable table at the sink side + * @return true if the values in source and target side are equal + */ + + + public static boolean validateBQAndDBRecordValues(String sourceTable, String targetTable) + throws SQLException, ClassNotFoundException, IOException, InterruptedException, ParseException { + getBigQueryTableData(sourceTable, bigQueryRows); + for (Object rows : bigQueryRows) { + JsonObject json = gson.fromJson(String.valueOf(rows), JsonObject.class); + bigQueryResponse.add(json); } - - /** - * Retrieves the data from a specified BigQuery table and populates it into the provided list of objects. - * - * @param table The name of the BigQuery table to fetch data from. - * @param bigQueryRows The list to store the fetched BigQuery data. - * - */ - - private static void getBigQueryTableData(String table, List bigQueryRows) - throws IOException, InterruptedException { - - String projectId = PluginPropertyUtils.pluginProp("projectId"); - String dataset = PluginPropertyUtils.pluginProp("dataset"); - String selectQuery = "SELECT TO_JSON(t) FROM `" + projectId + "." + dataset + "." + table + "` AS t"; - TableResult result = BigQueryClient.getQueryResult(selectQuery); - result.iterateAll().forEach(value -> bigQueryRows.add(value.get(0).getValue())); + String getSourceQuery = "SELECT * FROM " + targetTable; + try (Connection connect = CloudMySqlClient.getCloudSqlConnection()) { + connect.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT); + Statement statement1 = connect.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE, + ResultSet.HOLD_CURSORS_OVER_COMMIT); + ResultSet rsTarget = statement1.executeQuery(getSourceQuery); + return compareResultSetWithJsonData(rsTarget, bigQueryResponse); } + } - /** - * Compares the data in the result set obtained from the Oracle database with the provided BigQuery JSON objects. - * - * @param rsSource The result set obtained from the Oracle database. - * @param bigQueryData The list of BigQuery JSON objects to compare with the result set data. - * - * @return True if the result set data matches the BigQuery data, false otherwise. - * @throws SQLException If an SQL error occurs during the result set operations. - * @throws ParseException If an error occurs while parsing the data. - */ - - public static boolean compareResultSetData(ResultSet rsSource, List bigQueryData) throws SQLException, - ParseException { - ResultSetMetaData mdSource = rsSource.getMetaData(); - boolean result = false; - int columnCountSource = mdSource.getColumnCount(); - - if (bigQueryData == null) { - Assert.fail("bigQueryData is null"); - return result; - } + public static boolean validateDBAndBQRecordValues(String sourceTable, String targetTable) + throws SQLException, ClassNotFoundException, IOException, InterruptedException, ParseException { - // Get the column count of the first JsonObject in bigQueryData - int columnCountTarget = 0; - if (bigQueryData.size() > 0) { - columnCountTarget = bigQueryData.get(0).entrySet().size(); - } - // Compare the number of columns in the source and target - Assert.assertEquals("Number of columns in source and target are not equal", - columnCountSource, columnCountTarget); - - //Variable 'jsonObjectIdx' to track the index of the current JsonObject in the bigQueryData list, - int jsonObjectIdx = 0; - while (rsSource.next()) { - int currentColumnCount = 1; - while (currentColumnCount <= columnCountSource) { - String columnTypeName = mdSource.getColumnTypeName(currentColumnCount); - int columnType = mdSource.getColumnType(currentColumnCount); - String columnName = mdSource.getColumnName(currentColumnCount); - // Perform different comparisons based on column type - switch (columnType) { - // Since we skip BFILE in Oracle Sink, we are not comparing the BFILE source and sink values - case Types.BLOB: - case Types.VARBINARY: - case Types.LONGVARBINARY: - String sourceB64String = new String(Base64.getEncoder().encode(rsSource.getBytes(currentColumnCount))); - String targetB64String = bigQueryData.get(jsonObjectIdx).get(columnName).getAsString(); - Assert.assertEquals("Different values found for column : %s", + getBigQueryTableData(targetTable, bigQueryRows); + for (Object rows : bigQueryRows) { + JsonObject json = gson.fromJson(String.valueOf(rows), JsonObject.class); + bigQueryResponse.add(json); + } + String getTargetQuery = "SELECT * FROM " + sourceTable; + try (Connection connect = CloudMySqlClient.getCloudSqlConnection()) { + connect.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT); + Statement statement1 = connect.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE, + ResultSet.HOLD_CURSORS_OVER_COMMIT); + ResultSet rsSource = statement1.executeQuery(getTargetQuery); + return compareResultSetWithJsonData(rsSource, bigQueryResponse); + } + } + + /** + * Retrieves the data from a specified BigQuery table and populates it into the provided list of objects. + * + * @param table The name of the BigQuery table to fetch data from. + * @param bigQueryRows The list to store the fetched BigQuery data. + */ + + private static void getBigQueryTableData(String table, List bigQueryRows) + throws IOException, InterruptedException { + String projectId = PluginPropertyUtils.pluginProp("projectId"); + String dataset = PluginPropertyUtils.pluginProp("dataset"); + String selectQuery = "SELECT TO_JSON(t) FROM `" + projectId + "." + dataset + "." + table + "` AS t"; + TableResult result = BigQueryClient.getQueryResult(selectQuery); + result.iterateAll().forEach(value -> bigQueryRows.add(value.get(0).getValue())); + } + + /** + * Compares the data in the result set obtained from the CloudSql MySql database with the provided BigQuery JSON objects. + * + * @param rsSource The result set obtained from the CloudSql MySql database. + * @param bigQueryData The list of BigQuery JSON objects to compare with the result set data. + * @return True if the result set data matches the BigQuery data, false otherwise. + * @throws SQLException If an SQL error occurs during the result set operations. + */ + + public static boolean compareResultSetWithJsonData(ResultSet rsSource, List bigQueryData) throws + SQLException { + ResultSetMetaData mdSource = rsSource.getMetaData(); + boolean result = false; + int columnCountSource = mdSource.getColumnCount(); + + if (bigQueryData == null) { + Assert.fail("bigQueryData is null"); + return result; + } + + // Get the column count of the first JsonObject in bigQueryData + int columnCountTarget = 0; + if (bigQueryData.size() > 0) { + columnCountTarget = bigQueryData.get(0).entrySet().size(); + } + // Compare the number of columns in the source and target + Assert.assertEquals("Number of columns in source and target are not equal", + columnCountSource, columnCountTarget); + + //Variable 'jsonObjectIdx' to track the index of the current JsonObject in the bigQueryData list, + int jsonObjectIdx = 0; + while (rsSource.next()) { + int currentColumnCount = 2; + while (currentColumnCount <= columnCountSource) { + String columnTypeName = mdSource.getColumnTypeName(currentColumnCount); + int columnType = mdSource.getColumnType(currentColumnCount); + String columnName = mdSource.getColumnName(currentColumnCount); + // Perform different comparisons based on column type + switch (columnType) { + // Since we skip BFILE in Oracle Sink, we are not comparing the BFILE source and sink values + case Types.BIT: + Boolean sourceBit = rsSource.getBoolean(currentColumnCount); + Boolean targetBit = Boolean.parseBoolean(bigQueryData.get(jsonObjectIdx).get(columnName).getAsString()); + Assert.assertTrue("Different values found for column : %s", + String.valueOf(sourceBit).equals(String.valueOf(targetBit))); + break; + + case Types.SMALLINT: + case Types.INTEGER: + case Types.TINYINT: + Integer sourceTinyInt = rsSource.getInt(currentColumnCount); + Integer targetTinyInt = Integer.parseInt(bigQueryData.get(jsonObjectIdx).get(columnName).getAsString()); + Assert.assertTrue("Different values found for column : %s", + String.valueOf(sourceTinyInt).equals(String.valueOf(targetTinyInt))); + break; + + case Types.REAL: + Float sourceFloat = rsSource.getFloat(currentColumnCount); + Float targetFloat = Float.parseFloat(bigQueryData.get(jsonObjectIdx).get(columnName).getAsString()); + Assert.assertTrue("Different values found for column : %s", + String.valueOf(sourceFloat).equals(String.valueOf(targetFloat))); + break; + + case Types.DOUBLE: + Double sourceDouble = rsSource.getDouble(currentColumnCount); + Double targetDouble = Double.parseDouble(bigQueryData.get(jsonObjectIdx).get(columnName).getAsString()); + Assert.assertTrue("Different values found for column : %s", + String.valueOf(sourceDouble).equals(String.valueOf(targetDouble))); + break; + + case Types.DATE: + Date sourceDate = rsSource.getDate(currentColumnCount); + Date targetDate = java.sql.Date.valueOf(bigQueryData.get(jsonObjectIdx).get(columnName).getAsString()); + Assert.assertTrue("Different values found for column : %s", + String.valueOf(sourceDate).equals(String.valueOf(targetDate))); + break; + + case Types.TIME: + Time sourceTime = rsSource.getTime(currentColumnCount); + Time targetTime = Time.valueOf(bigQueryData.get(jsonObjectIdx).get(columnName).getAsString()); + Assert.assertTrue("Different values found for column : %s", + String.valueOf(sourceTime).equals(String.valueOf(targetTime))); + break; + + case Types.DECIMAL: + Decimal sourceDecimal = Decimal.fromDecimal(rsSource.getBigDecimal(currentColumnCount)); + Decimal targetDecimal = Decimal.fromDecimal(bigQueryData.get(jsonObjectIdx).get(columnName).getAsBigDecimal()); + Assert.assertEquals("Different values found for column : %s", sourceDecimal, targetDecimal); + break; + + case Types.BLOB: + case Types.VARBINARY: + case Types.LONGVARBINARY: + case Types.BINARY: + String sourceB64String = new String(Base64.getEncoder().encode(rsSource.getBytes(currentColumnCount))); + String targetB64String = bigQueryData.get(jsonObjectIdx).get(columnName).getAsString(); + Assert.assertEquals("Different values found for column : %s", sourceB64String, targetB64String); - break; - - case Types.NUMERIC: - long sourceVal = rsSource.getLong(currentColumnCount); - long targetVal = Long.parseLong(bigQueryData.get(jsonObjectIdx).get(columnName).getAsString()); - Assert.assertTrue("Different values found for column : %s", - String.valueOf(sourceVal).equals(String.valueOf(targetVal))); - break; - - case Types.TIMESTAMP: - Timestamp sourceTS = rsSource.getTimestamp(columnName); - SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'hh:mm:ss'Z'"); - Date parsedDate = dateFormat.parse(bigQueryData.get(jsonObjectIdx).get(columnName).getAsString()); - Timestamp targetTs = new Timestamp(parsedDate.getTime()); - Assert.assertEquals("Different values found for column : %s", String.valueOf(sourceTS). - equals(String.valueOf(targetTs))); - break; - default: - String sourceString = rsSource.getString(currentColumnCount); - String targetString = bigQueryData.get(jsonObjectIdx).get(columnName).getAsString(); - Assert.assertEquals(String.format("Different %s values found for column : %s", columnTypeName, columnName), + break; + + case Types.NUMERIC: + long sourceVal = rsSource.getLong(currentColumnCount); + long targetVal = Long.parseLong(bigQueryData.get(jsonObjectIdx).get(columnName).getAsString()); + Assert.assertTrue("Different values found for column : %s", + String.valueOf(sourceVal).equals(String.valueOf(targetVal))); + break; + + case Types.TIMESTAMP: + String sourceTS = String.valueOf(rsSource.getTimestamp(currentColumnCount)); + String targetTS = bigQueryData.get(jsonObjectIdx).get(columnName).getAsString(); + LocalDateTime timestamp = LocalDateTime.parse(targetTS, DateTimeFormatter.ISO_DATE_TIME); + DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss.S"); + String formattedTimestamp = timestamp.format(formatter); + Assert.assertEquals(sourceTS, formattedTimestamp); + break; + + default: + String sourceString = rsSource.getString(currentColumnCount); + String targetString = bigQueryData.get(jsonObjectIdx).get(columnName).getAsString(); + Assert.assertEquals(String.format("Different %s values found for column : %s", columnTypeName, columnName), String.valueOf(sourceString), String.valueOf(targetString)); - } - currentColumnCount++; - } - jsonObjectIdx++; + break; } - Assert.assertFalse("Number of rows in Source table is greater than the number of rows in Target table", - rsSource.next()); - return true; + currentColumnCount++; + } + jsonObjectIdx++; } + Assert.assertFalse("Number of rows in Source table is greater than the number of rows in Target table", + rsSource.next()); + return true; + } } diff --git a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/runners/TestRunner.java b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/runners/TestRunner.java index 2d8ac7c4e..05772562d 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/runners/TestRunner.java +++ b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/runners/TestRunner.java @@ -25,8 +25,11 @@ @RunWith(Cucumber.class) @CucumberOptions( features = {"src/e2e-test/features"}, - glue = {"stepsdesign", "io.cdap.plugin.common.stepsdesign"}, - tags = {"@CloudMySql"}, + glue = {"stepsdesign", "io.cdap.plugin.common.stepsdesign", "io.cdap.plugin.CloudMySql.stepsdesign"}, + tags = {"@CloudMySql and not @PLUGIN-20670 and not @PLUGIN-1526"}, + /* TODO :Enable tests once issue fixed https://cdap.atlassian.net/browse/CDAP-20670, + https://cdap.atlassian.net/browse/PLUGIN-1526 + */ plugin = {"pretty", "html:target/cucumber-html-report/CloudMySql", "json:target/cucumber-reports/cucumber-mysql.json", "junit:target/cucumber-reports/cucumber-mysql.xml"} diff --git a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/runners/package-info.java b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/runners/package-info.java new file mode 100644 index 000000000..ac35d14dc --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/runners/package-info.java @@ -0,0 +1,21 @@ +/* + * Copyright © 2022 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +/** + * Package contains the runners for CoudMysql features. + */ + +package io.cdap.plugin.CloudMySql.runners; \ No newline at end of file diff --git a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/stepsdesign/CloudMySql.java b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/stepsdesign/CloudMySql.java deleted file mode 100644 index f3d96427e..000000000 --- a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/stepsdesign/CloudMySql.java +++ /dev/null @@ -1,5 +0,0 @@ -package io.cdap.plugin.CloudMySql.stepsdesign; - -public class CloudMySql { - -} diff --git a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/stepsdesign/CloudMysql.java b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/stepsdesign/CloudMysql.java new file mode 100644 index 000000000..3faf14cd7 --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/stepsdesign/CloudMysql.java @@ -0,0 +1,66 @@ +package io.cdap.plugin.CloudMySql.stepsdesign; + +import io.cdap.e2e.pages.actions.CdfPipelineRunAction; +import io.cdap.e2e.utils.BigQueryClient; +import io.cdap.e2e.utils.CdfHelper; +import io.cdap.e2e.utils.PluginPropertyUtils; +import io.cdap.plugin.CloudMySqlClient; +import io.cucumber.java.en.Then; +import org.junit.Assert; +import stepsdesign.BeforeActions; +import io.cdap.plugin.CloudMySql.BQValidation; + +import java.io.IOException; +import java.sql.SQLException; +import java.text.ParseException; + +/** + * CloudSqlMySql Plugin related step design. + */ +public class CloudMysql implements CdfHelper { + @Then("Validate the values of records transferred to target table is equal to the values from source table") + public void validateTheValuesOfRecordsTransferredToTargetTableIsEqualToTheValuesFromSourceTable() + throws SQLException, ClassNotFoundException { + int countRecords = CloudMySqlClient.countRecord(PluginPropertyUtils.pluginProp("targetTable")); + Assert.assertEquals("Number of records transferred should be equal to records out ", + countRecords, recordOut()); + BeforeActions.scenario.write(" ******** Number of records transferred ********:" + countRecords); + boolean recordsMatched = CloudMySqlClient.validateRecordValues(PluginPropertyUtils.pluginProp("sourceTable"), + PluginPropertyUtils.pluginProp("targetTable")); + Assert.assertTrue("Value of records transferred to the target table should be equal to the value " + + "of the records in the source table", recordsMatched); + } + + @Then("Validate the values of records transferred to target Big Query table is equal to the values from source table") + public void validateTheValuesOfRecordsTransferredToTargetBigQueryTableIsEqualToTheValuesFromSourceTable() + throws IOException, InterruptedException, IOException, SQLException, ClassNotFoundException, ParseException { + int targetBQRecordsCount = BigQueryClient.countBqQuery(PluginPropertyUtils.pluginProp("bqTargetTable")); + BeforeActions.scenario.write("No of Records Transferred to BigQuery:" + targetBQRecordsCount); + Assert.assertEquals("Out records should match with target BigQuery table records count", + CdfPipelineRunAction.getCountDisplayedOnSourcePluginAsRecordsOut(), targetBQRecordsCount); + + boolean recordsMatched = BQValidation.validateDBAndBQRecordValues( + PluginPropertyUtils.pluginProp("sourceTable"), + PluginPropertyUtils.pluginProp("bqTargetTable")); + Assert.assertTrue("Value of records transferred to the target table should be equal to the value " + + "of the records in the source table", recordsMatched); + } + + @Then("Validate the values of records transferred to target CloudSQLMySql table is equal to the values from source " + + "BigQuery table") + public void validateTheValuesOfRecordsTransferredToTargetCloudSQLMySqlTableIsEqualToTheValuesFromSourceBigQueryTable() + throws IOException, InterruptedException, IOException, SQLException, ClassNotFoundException, ParseException, ParseException { + int sourceBQRecordsCount = BigQueryClient.countBqQuery(PluginPropertyUtils.pluginProp("bqSourceTable")); + BeforeActions.scenario.write("No of Records from source BigQuery table:" + sourceBQRecordsCount); + Assert.assertEquals("Out records should match with target PostgreSQL table records count", + CdfPipelineRunAction.getCountDisplayedOnSourcePluginAsRecordsOut(), sourceBQRecordsCount); + + boolean recordsMatched = BQValidation.validateBQAndDBRecordValues( + PluginPropertyUtils.pluginProp("bqSourceTable"), + PluginPropertyUtils.pluginProp("targetTable")); + Assert.assertTrue("Value of records transferred to the target table should be equal to the value " + + "of the records in the source table", recordsMatched); + } + +} + diff --git a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/stepsdesign/package-info.java b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/stepsdesign/package-info.java new file mode 100644 index 000000000..56ff78143 --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySql/stepsdesign/package-info.java @@ -0,0 +1,21 @@ +/* + * Copyright © 2023 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +/** + * Package contains the stepDesign for CloudMysql features. + */ + +package io.cdap.plugin.CloudMySql.stepsdesign; \ No newline at end of file diff --git a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySqlClient.java b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySqlClient.java index c78b7aee0..bce60717f 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySqlClient.java +++ b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySqlClient.java @@ -2,7 +2,6 @@ import io.cdap.e2e.utils.PluginPropertyUtils; import org.junit.Assert; - import java.sql.*; import java.util.Date; import java.util.GregorianCalendar; @@ -10,180 +9,151 @@ public class CloudMySqlClient { - private static final String database = PluginPropertyUtils.pluginProp("DatabaseName"); - private static final String connectionName = PluginPropertyUtils.pluginProp("ConnectionName"); - - public static void main(String[] args) throws SQLException, ClassNotFoundException { - getCloudMysqlConnection(); - //createSourceTable("myTable"); -// createSourceTable("newTable"); -// String[] tablesToDrop = {"newTable"}; -// dropTables(tablesToDrop); - //System.out.println("done"); - + private static final String database = PluginPropertyUtils.pluginProp("DatabaseName"); + private static final String connectionName = PluginPropertyUtils.pluginProp("ConnectionName"); + + public static Connection getCloudSqlConnection() throws ClassNotFoundException, SQLException { + Class.forName("com.google.cloud.sql.mysql.SocketFactory"); + String instanceConnectionName = System.getenv("CLOUDSQLMYSQL_CONNECTIONNAME"); + String database = PluginPropertyUtils.pluginProp("DatabaseName"); + String username = System.getenv("CLOUDSQLMYSQL_USERNAME"); + String password = System.getenv("CLOUDSQLMYSQL_PASSWORD"); + String jdbcUrl = String.format(PluginPropertyUtils.pluginProp("jdbcURL"), database, instanceConnectionName, + username, password); + Connection conn = DriverManager.getConnection(jdbcUrl); + return conn; + } + + public static int countRecord(String table) throws SQLException, ClassNotFoundException { + String countQuery = "SELECT COUNT(*) as total FROM " + table; + try (Connection connect = getCloudSqlConnection(); Statement statement = connect.createStatement(); ResultSet rs = statement.executeQuery( + countQuery)) { + int num = 0; + while (rs.next()) { + num = (rs.getInt(1)); + } + return num; } - - public static Connection getCloudMysqlConnection() throws SQLException, ClassNotFoundException { - Class.forName("com.google.cloud.sql.mysql.SocketFactory"); - String instanceConnectionName = "cdf-athena:us-central1:sql-automation-test-instance"; - String databaseName = "TestDatabase"; - String Username = "v"; - String Password = "v@123"; - String jdbcUrl = String.format("jdbc:mysql:///%s?cloudSqlInstance=%s&socketFactory=com.google.cloud.sql.mysql.SocketFactory&user=%s&password=%s", databaseName, instanceConnectionName, Username, Password); - Connection conn = DriverManager.getConnection(jdbcUrl); - System.out.println("connected to database"); - return conn; - } - - public static int countRecord(String table) throws SQLException, ClassNotFoundException { - String countQuery = "SELECT COUNT(*) as total FROM " + table; - try (Connection connect = getCloudMysqlConnection(); - Statement statement = connect.createStatement(); - ResultSet rs = statement.executeQuery(countQuery)) { - int num = 0; - while (rs.next()) { - num = (rs.getInt(1)); - } - return num; - } + } + + public static boolean validateRecordValues(String sourceTable, String targetTable) throws SQLException, + ClassNotFoundException { + String getSourceQuery = "SELECT * FROM " + sourceTable; + String getTargetQuery = "SELECT * FROM " + targetTable; + try (Connection connect = getCloudSqlConnection()) { + connect.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT); + Statement statement1 = connect.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE, + ResultSet.HOLD_CURSORS_OVER_COMMIT); + Statement statement2 = connect.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE, + ResultSet.HOLD_CURSORS_OVER_COMMIT); + ResultSet rsSource = statement1.executeQuery(getSourceQuery); + ResultSet rsTarget = statement2.executeQuery(getTargetQuery); + return compareResultSetData(rsSource, rsTarget); } - - public static boolean validateRecordValues(String sourceTable, String targetTable) - throws SQLException, ClassNotFoundException { - String getSourceQuery = "SELECT * FROM " + sourceTable; - String getTargetQuery = "SELECT * FROM " + targetTable; - try (Connection connect = getCloudMysqlConnection()) { - connect.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT); - Statement statement1 = connect.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE, - ResultSet.HOLD_CURSORS_OVER_COMMIT); - Statement statement2 = connect.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE, - ResultSet.HOLD_CURSORS_OVER_COMMIT); - ResultSet rsSource = statement1.executeQuery(getSourceQuery); - ResultSet rsTarget = statement2.executeQuery(getTargetQuery); - return compareResultSetData(rsSource, rsTarget); - } - } - - /** - * Compares the result Set data in source table and sink table.. - * - * @param rsSource result set of the source table data - * @param rsTarget result set of the target table data - * @return true if rsSource matches rsTarget - */ - public static boolean compareResultSetData(ResultSet rsSource, ResultSet rsTarget) throws SQLException { - ResultSetMetaData mdSource = rsSource.getMetaData(); - ResultSetMetaData mdTarget = rsTarget.getMetaData(); - int columnCountSource = mdSource.getColumnCount(); - int columnCountTarget = mdTarget.getColumnCount(); - Assert.assertEquals("Number of columns in source and target are not equal", - columnCountSource, columnCountTarget); - while (rsSource.next() && rsTarget.next()) { - int currentColumnCount = 1; - while (currentColumnCount <= columnCountSource) { - String columnTypeName = mdSource.getColumnTypeName(currentColumnCount); - int columnType = mdSource.getColumnType(currentColumnCount); - String columnName = mdSource.getColumnName(currentColumnCount); - if (columnType == Types.TIMESTAMP) { - GregorianCalendar gc = new GregorianCalendar(TimeZone.getTimeZone("UTC")); - gc.setGregorianChange(new Date(Long.MIN_VALUE)); - Timestamp sourceTS = rsSource.getTimestamp(currentColumnCount, gc); - Timestamp targetTS = rsTarget.getTimestamp(currentColumnCount, gc); - Assert.assertTrue(String.format("Different values found for column : %s", columnName), + } + + /** + * Compares the result Set data in source table and sink table.. + * + * @param rsSource result set of the source table data + * @param rsTarget result set of the target table data + * @return true if rsSource matches rsTarget + */ + public static boolean compareResultSetData(ResultSet rsSource, ResultSet rsTarget) throws SQLException { + ResultSetMetaData mdSource = rsSource.getMetaData(); + ResultSetMetaData mdTarget = rsTarget.getMetaData(); + int columnCountSource = mdSource.getColumnCount(); + int columnCountTarget = mdTarget.getColumnCount(); + Assert.assertEquals("Number of columns in source and target are not equal", columnCountSource, columnCountTarget); + while (rsSource.next() && rsTarget.next()) { + int currentColumnCount = 1; + while (currentColumnCount <= columnCountSource) { + String columnTypeName = mdSource.getColumnTypeName(currentColumnCount); + int columnType = mdSource.getColumnType(currentColumnCount); + String columnName = mdSource.getColumnName(currentColumnCount); + if (columnType == Types.TIMESTAMP) { + GregorianCalendar gc = new GregorianCalendar(TimeZone.getTimeZone("UTC")); + gc.setGregorianChange(new Date(Long.MIN_VALUE)); + Timestamp sourceTS = rsSource.getTimestamp(currentColumnCount, gc); + Timestamp targetTS = rsTarget.getTimestamp(currentColumnCount, gc); + Assert.assertTrue(String.format("Different values found for column : %s", columnName), sourceTS.equals(targetTS)); - } else { - String sourceString = rsSource.getString(currentColumnCount); - String targetString = rsTarget.getString(currentColumnCount); - Assert.assertTrue(String.format("Different values found for column : %s", columnName), + } else { + String sourceString = rsSource.getString(currentColumnCount); + String targetString = rsTarget.getString(currentColumnCount); + Assert.assertTrue(String.format("Different values found for column : %s", columnName), String.valueOf(sourceString).equals(String.valueOf(targetString))); - } - currentColumnCount++; - } } - Assert.assertFalse("Number of rows in Source table is greater than the number of rows in Target table", - rsSource.next()); - Assert.assertFalse("Number of rows in Target table is greater than the number of rows in Source table", - rsTarget.next()); - return true; + currentColumnCount++; + } } - - public static void createSourceTable(String sourceTable) throws SQLException, ClassNotFoundException { - try (Connection connect = getCloudMysqlConnection(); - Statement statement = connect.createStatement()) { - String createSourceTableQuery = "CREATE TABLE IF NOT EXISTS " + sourceTable + - "(id int, lastName varchar(255), PRIMARY KEY (id))"; - statement.executeUpdate(createSourceTableQuery); - - // Truncate table to clean the data of last failure run. - String truncateSourceTableQuery = "TRUNCATE TABLE " + sourceTable; - statement.executeUpdate(truncateSourceTableQuery); - - // Insert dummy data. - statement.executeUpdate("INSERT INTO " + sourceTable + " (id, lastName)" + - "VALUES (1, 'Priya')"); - statement.executeUpdate("INSERT INTO " + sourceTable + " (id, lastName)" + - "VALUES (2, 'Shubhangi')"); - statement.executeUpdate("INSERT INTO " + sourceTable + " (id, lastName)" + - "VALUES (3, 'Shorya')"); - - - } + Assert.assertFalse("Number of rows in Source table is greater than the number of rows in Target table", + rsSource.next()); + Assert.assertFalse("Number of rows in Target table is greater than the number of rows in Source table", + rsTarget.next()); + return true; + } + + public static void createSourceTable(String sourceTable) throws SQLException, ClassNotFoundException { + try (Connection connect = getCloudSqlConnection(); Statement statement = connect.createStatement()) { + String createSourceTableQuery = "CREATE TABLE IF NOT EXISTS " + sourceTable + "(id int, lastName varchar(255), PRIMARY KEY (id))"; + statement.executeUpdate(createSourceTableQuery); + + // Truncate table to clean the data of last failure run. + String truncateSourceTableQuery = "TRUNCATE TABLE " + sourceTable; + statement.executeUpdate(truncateSourceTableQuery); + + // Insert dummy data. + statement.executeUpdate("INSERT INTO " + sourceTable + " (id, lastName)" + "VALUES (1, 'Priya')"); + statement.executeUpdate("INSERT INTO " + sourceTable + " (id, lastName)" + "VALUES (2, 'Shubhangi')"); + statement.executeUpdate("INSERT INTO " + sourceTable + " (id, lastName)" + "VALUES (3, 'Shorya')"); } + } - public static void createTargetTable(String targetTable) throws SQLException, ClassNotFoundException { - try (Connection connect = getCloudMysqlConnection(); - Statement statement = connect.createStatement()) { - String createTargetTableQuery = "CREATE TABLE IF NOT EXISTS " + targetTable + - "(id int, lastName varchar(255), PRIMARY KEY (id))"; - statement.executeUpdate(createTargetTableQuery); + public static void createTargetTable(String targetTable) throws SQLException, ClassNotFoundException { + try (Connection connect = getCloudSqlConnection(); Statement statement = connect.createStatement()) { + String createTargetTableQuery = "CREATE TABLE IF NOT EXISTS " + targetTable + "(id int, lastName varchar(255), PRIMARY KEY (id))"; + statement.executeUpdate(createTargetTableQuery); // Truncate table to clean the data of last failure run. - String truncateTargetTableQuery = "TRUNCATE TABLE " + targetTable; - statement.executeUpdate(truncateTargetTableQuery); - } + String truncateTargetTableQuery = "TRUNCATE TABLE " + targetTable; + statement.executeUpdate(truncateTargetTableQuery); } - - public static void createSourceDatatypesTable(String sourceTable) throws SQLException, ClassNotFoundException { - try (Connection connect = getCloudMysqlConnection(); - Statement statement = connect.createStatement()) { - String datatypesColumns = PluginPropertyUtils.pluginProp("datatypesColumns"); - String createSourceTableQuery = "CREATE TABLE " + sourceTable + " " + datatypesColumns; - statement.executeUpdate(createSourceTableQuery); - System.out.println(createSourceTableQuery); - - // Insert dummy data. - String datatypesValues = PluginPropertyUtils.pluginProp("datatypesValue1"); - String datatypesColumnsList = PluginPropertyUtils.pluginProp("datatypesColumnsList"); - statement.executeUpdate("INSERT INTO " + sourceTable + " " + datatypesColumnsList + " " + datatypesValues); - } + } + + public static void createSourceDatatypesTable(String sourceTable) throws SQLException, ClassNotFoundException { + try (Connection connect = getCloudSqlConnection(); Statement statement = connect.createStatement()) { + String datatypesColumns = PluginPropertyUtils.pluginProp("datatypesColumns"); + String createSourceTableQuery = "CREATE TABLE " + sourceTable + " " + datatypesColumns; + statement.executeUpdate(createSourceTableQuery); + // Insert dummy data. + String datatypesValues = PluginPropertyUtils.pluginProp("datatypesValue1"); + String datatypesColumnsList = PluginPropertyUtils.pluginProp("datatypesColumnsList"); + statement.executeUpdate("INSERT INTO " + sourceTable + " " + datatypesColumnsList + " " + datatypesValues); } + } - public static void createTargetDatatypesTable(String targetTable) throws SQLException, ClassNotFoundException { - try (Connection connect = getCloudMysqlConnection(); - Statement statement = connect.createStatement()) { - String datatypesColumns = PluginPropertyUtils.pluginProp("datatypesColumns"); - String createTargetTableQuery = "CREATE TABLE " + targetTable + " " + datatypesColumns; - statement.executeUpdate(createTargetTableQuery); - } + public static void createTargetDatatypesTable(String targetTable) throws SQLException, ClassNotFoundException { + try (Connection connect = getCloudSqlConnection(); Statement statement = connect.createStatement()) { + String datatypesColumns = PluginPropertyUtils.pluginProp("datatypesColumns"); + String createTargetTableQuery = "CREATE TABLE " + targetTable + " " + datatypesColumns; + statement.executeUpdate(createTargetTableQuery); } + } - public static void createTargetCloudMysqlTable(String targetTable) throws SQLException, - ClassNotFoundException { - try (Connection connect = getCloudMysqlConnection(); - Statement statement = connect.createStatement()) { - String datatypesColumns = PluginPropertyUtils.pluginProp("CloudMySqlDatatypesColumns"); - String createTargetTableQuery = "CREATE TABLE " + targetTable + " " + datatypesColumns; - statement.executeUpdate(createTargetTableQuery); - } + public static void createTargetCloudMysqlTable(String targetTable) throws SQLException, ClassNotFoundException { + try (Connection connect = getCloudSqlConnection(); Statement statement = connect.createStatement()) { + String datatypesColumns = PluginPropertyUtils.pluginProp("CloudMySqlDatatypesColumns"); + String createTargetTableQuery = "CREATE TABLE " + targetTable + " " + datatypesColumns; + statement.executeUpdate(createTargetTableQuery); } - - - public static void dropTables(String[] tables) throws SQLException, ClassNotFoundException { - try (Connection connect = getCloudMysqlConnection(); - Statement statement = connect.createStatement()) { - for (String table : tables) { - String dropTableQuery = "Drop Table " + table; - statement.executeUpdate(dropTableQuery); - } - } + } + + public static void dropTables(String[] tables) throws SQLException, ClassNotFoundException { + try (Connection connect = getCloudSqlConnection(); Statement statement = connect.createStatement()) { + for (String table : tables) { + String dropTableQuery = "Drop Table " + table; + statement.executeUpdate(dropTableQuery); + } } + } } \ No newline at end of file diff --git a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java index abaf9f4aa..420044c5f 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java +++ b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java @@ -24,132 +24,129 @@ */ public class TestSetupHooks { - public static void main(String[] args) throws SQLException, ClassNotFoundException { - setTableName(); - createDatatypesTable(); + public static void setTableName() { + String randomString = RandomStringUtils.randomAlphabetic(10); + String sourceTableName = String.format("SourceTable_%s", randomString); + String targetTableName = String.format("TargetTable_%s", randomString); + PluginPropertyUtils.addPluginProp("sourceTable", sourceTableName); + PluginPropertyUtils.addPluginProp("targetTable", targetTableName); + PluginPropertyUtils.addPluginProp("selectQuery", String.format("select * from %s", sourceTableName)); + } + + @Before(order = 1) + public static void initializeDBProperties() { + String username = System.getenv("username"); + if (username != null && !username.isEmpty()) { + PluginPropertyUtils.addPluginProp("username", username); } - private static void setTableName() { - String randomString = RandomStringUtils.randomAlphabetic(10); - String sourceTableName = String.format("SourceTable_%s", randomString); - String targetTableName = String.format("TargetTable_%s", randomString); - PluginPropertyUtils.addPluginProp("sourceTable", sourceTableName); - PluginPropertyUtils.addPluginProp("targetTable", targetTableName); - PluginPropertyUtils.addPluginProp("selectQuery", String.format("select * from %s", sourceTableName)); - System.out.println(sourceTableName); - - } - - @Before(order = 1) - public static void initializeDBProperties() { - String username = System.getenv("username"); - if (username != null && !username.isEmpty()) { - PluginPropertyUtils.addPluginProp("username", username); - } - String password = System.getenv("password"); - if (password != null && !password.isEmpty()) { - PluginPropertyUtils.addPluginProp("password", password); - } - TestSetupHooks.setTableName(); - } - - @Before(order = 2, value = "@CLOUDMYSQL_SOURCE_TEST") - public static void createTables() throws SQLException, ClassNotFoundException { - CloudMySqlClient.createSourceTable(PluginPropertyUtils.pluginProp("sourceTable")); - CloudMySqlClient.createTargetTable(PluginPropertyUtils.pluginProp("targetTable")); + String password = System.getenv("password"); + if (password != null && !password.isEmpty()) { + PluginPropertyUtils.addPluginProp("password", password); } - - - @Before(order = 2, value = "@CLOUDMYSQL_SOURCE_DATATYPES_TEST") - public static void createDatatypesTable() throws SQLException, ClassNotFoundException { - CloudMySqlClient.createSourceDatatypesTable(PluginPropertyUtils.pluginProp("sourceTable")); - CloudMySqlClient.createTargetDatatypesTable(PluginPropertyUtils.pluginProp("targetTable")); + TestSetupHooks.setTableName(); + } + + @Before(order = 2, value = "@CLOUDMYSQL_SOURCE_TEST") + public static void createTables() throws SQLException, ClassNotFoundException { + CloudMySqlClient.createSourceTable(PluginPropertyUtils.pluginProp("CLOUDMYSQL_SOURCE_TEST")); + CloudMySqlClient.createTargetTable(PluginPropertyUtils.pluginProp("targetTable")); + } + + + @Before(order = 2, value = "@CLOUDMYSQL_SOURCE_DATATYPES_TEST") + public static void createDatatypesTable() throws SQLException, ClassNotFoundException { + CloudMySqlClient.createSourceDatatypesTable(PluginPropertyUtils.pluginProp("sourceTable")); + CloudMySqlClient.createTargetDatatypesTable(PluginPropertyUtils.pluginProp("targetTable")); + } + + @After(order = 2, value = "@CLOUDMYSQL_SINK_TEST") + public static void dropTables() throws SQLException, ClassNotFoundException { + CloudMySqlClient.dropTables( + new String[]{PluginPropertyUtils.pluginProp("sourceTable"), PluginPropertyUtils.pluginProp("targetTable")}); + } + + @Before(order = 2, value = "@CLOUDMYSQL_TEST_TABLE") + public static void createCloudMysqlTestTable() throws SQLException, ClassNotFoundException { + CloudMySqlClient.createTargetCloudMysqlTable(PluginPropertyUtils.pluginProp("targetTable")); + } + + @Before(order = 1, value = "@BQ_SINK_TEST") + public static void setTempTargetBQTableName() { + String bqTargetTableName = "E2E_TARGET_" + UUID.randomUUID().toString().replaceAll("-", "_"); + PluginPropertyUtils.addPluginProp("bqTargetTable", bqTargetTableName); + BeforeActions.scenario.write("BQ Target table name - " + bqTargetTableName); + } + + @After(order = 1, value = "@BQ_SINK_TEST") + public static void deleteTempTargetBQTable() throws IOException, InterruptedException { + String bqTargetTableName = PluginPropertyUtils.pluginProp("bqTargetTable"); + try { + BigQueryClient.dropBqQuery(bqTargetTableName); + BeforeActions.scenario.write("BQ Target table - " + bqTargetTableName + " deleted successfully"); + PluginPropertyUtils.removePluginProp("bqTargetTable"); + } catch (BigQueryException e) { + if (e.getMessage().contains("Not found: Table")) { + BeforeActions.scenario.write("BQ Target Table " + bqTargetTableName + " does not exist"); + } else { + Assert.fail(e.getMessage()); + } } - - @After(order = 2, value = "@CLOUDMYSQL_SINK_TEST") - public static void dropTables() throws SQLException, ClassNotFoundException { - CloudMySqlClient.dropTables(new String[]{PluginPropertyUtils.pluginProp("sourceTable"), - PluginPropertyUtils.pluginProp("targetTable")}); + } + + @Before(order = 1, value = "@BQ_SOURCE_TEST") + public static void createTempSourceBQTable() throws IOException, InterruptedException { + createSourceBQTableWithQueries(PluginPropertyUtils.pluginProp("CreateBQTableQueryFile"), + PluginPropertyUtils.pluginProp("InsertBQDataQueryFile")); + } + + @After(order = 1, value = "@BQ_SOURCE_TEST") + public static void deleteTempSourceBQTable() throws IOException, InterruptedException { + String bqSourceTable = PluginPropertyUtils.pluginProp("bqSourceTable"); + BigQueryClient.dropBqQuery(bqSourceTable); + BeforeActions.scenario.write("BQ source Table " + bqSourceTable + " deleted successfully"); + PluginPropertyUtils.removePluginProp("bqSourceTable"); + } + + private static void createSourceBQTableWithQueries(String bqCreateTableQueryFile, String bqInsertDataQueryFile) throws + IOException, InterruptedException, NullPointerException { + String bqSourceTable = "E2E_SOURCE_" + UUID.randomUUID().toString().substring(0, 5).replaceAll("-", "_"); + + String createTableQuery = StringUtils.EMPTY; + try { + createTableQuery = new String( + Files.readAllBytes(Paths.get(TestSetupHooks.class.getResource("/" + bqCreateTableQueryFile).toURI())), + StandardCharsets.UTF_8); + createTableQuery = createTableQuery.replace("DATASET", PluginPropertyUtils.pluginProp("dataset")) + .replace("TABLE_NAME", bqSourceTable); + } catch (Exception e) { + e.printStackTrace(); + BeforeActions.scenario.write("Exception in reading " + bqCreateTableQueryFile + " - " + e.getMessage()); + Assert.fail( + "Exception in BigQuery testdata prerequisite setup " + "- error in reading create table query file " + e.getMessage()); } - @Before(order = 2, value = "@CLOUDMYSQL_TEST_TABLE") - public static void createCloudMysqlTestTable() throws SQLException, ClassNotFoundException { - CloudMySqlClient.createTargetCloudMysqlTable(PluginPropertyUtils.pluginProp("targetTable")); - } - - @Before(order = 1, value = "@BQ_SINK_TEST") - public static void setTempTargetBQTableName() { - String bqTargetTableName = "E2E_TARGET_" + UUID.randomUUID().toString().replaceAll("-", "_"); - PluginPropertyUtils.addPluginProp("bqTargetTable", bqTargetTableName); - BeforeActions.scenario.write("BQ Target table name - " + bqTargetTableName); - } - - @After(order = 1, value = "@BQ_SINK_TEST") - public static void deleteTempTargetBQTable() throws IOException, InterruptedException { - String bqTargetTableName = PluginPropertyUtils.pluginProp("bqTargetTable"); - try { - BigQueryClient.dropBqQuery(bqTargetTableName); - BeforeActions.scenario.write("BQ Target table - " + bqTargetTableName + " deleted successfully"); - PluginPropertyUtils.removePluginProp("bqTargetTable"); - } catch (BigQueryException e) { - if (e.getMessage().contains("Not found: Table")) { - BeforeActions.scenario.write("BQ Target Table " + bqTargetTableName + " does not exist"); - } else { - Assert.fail(e.getMessage()); - } - } - } - - @Before(order = 1, value = "@BQ_SOURCE_TEST") - public static void createTempSourceBQTable() throws IOException, InterruptedException { - createSourceBQTableWithQueries(PluginPropertyUtils.pluginProp("CreateBQTableQueryFile"), - PluginPropertyUtils.pluginProp("InsertBQDataQueryFile")); - } + String insertDataQuery = StringUtils.EMPTY; + try { + insertDataQuery = new String( + Files.readAllBytes(Paths.get(TestSetupHooks.class.getResource("/" + bqInsertDataQueryFile).toURI())), + StandardCharsets.UTF_8); + insertDataQuery = insertDataQuery.replace("DATASET", PluginPropertyUtils.pluginProp("dataset")) + .replace("TABLE_NAME", bqSourceTable); + } catch (Exception e) { + BeforeActions.scenario.write("Exception in reading " + bqInsertDataQueryFile + " - " + e.getMessage()); + Assert.fail( + "Exception in BigQuery testdata prerequisite setup " + "- error in reading insert data query file " + e.getMessage()); - @After(order = 1, value = "@BQ_SOURCE_TEST") - public static void deleteTempSourceBQTable() throws IOException, InterruptedException { - String bqSourceTable = PluginPropertyUtils.pluginProp("bqSourceTable"); - BigQueryClient.dropBqQuery(bqSourceTable); - BeforeActions.scenario.write("BQ source Table " + bqSourceTable + " deleted successfully"); - PluginPropertyUtils.removePluginProp("bqSourceTable"); } - private static void createSourceBQTableWithQueries(String bqCreateTableQueryFile, String bqInsertDataQueryFile) - throws IOException, InterruptedException { - String bqSourceTable = "E2E_SOURCE_" + UUID.randomUUID().toString().replaceAll("-", "_"); - - String createTableQuery = StringUtils.EMPTY; - try { - createTableQuery = new String(Files.readAllBytes(Paths.get(TestSetupHooks.class.getResource - ("/" + bqCreateTableQueryFile).toURI())) - , StandardCharsets.UTF_8); - createTableQuery = createTableQuery.replace("DATASET", PluginPropertyUtils.pluginProp("dataset")) - .replace("TABLE_NAME", bqSourceTable); - } catch (Exception e) { - BeforeActions.scenario.write("Exception in reading " + bqCreateTableQueryFile + " - " + e.getMessage()); - Assert.fail("Exception in BigQuery testdata prerequisite setup " + - "- error in reading create table query file " + e.getMessage()); - } - - String insertDataQuery = StringUtils.EMPTY; - try { - insertDataQuery = new String(Files.readAllBytes(Paths.get(TestSetupHooks.class.getResource - ("/" + bqInsertDataQueryFile).toURI())) - , StandardCharsets.UTF_8); - insertDataQuery = insertDataQuery.replace("DATASET", PluginPropertyUtils.pluginProp("dataset")) - .replace("TABLE_NAME", bqSourceTable); - } catch (Exception e) { - BeforeActions.scenario.write("Exception in reading " + bqInsertDataQueryFile + " - " + e.getMessage()); - Assert.fail("Exception in BigQuery testdata prerequisite setup " + - "- error in reading insert data query file " + e.getMessage()); - } - BigQueryClient.getSoleQueryResult(createTableQuery); - try { - BigQueryClient.getSoleQueryResult(insertDataQuery); - } catch (NoSuchElementException e) { - // Insert query does not return any record. - // Iterator on TableResult values in getSoleQueryResult method throws NoSuchElementException - } - PluginPropertyUtils.addPluginProp("bqSourceTable", bqSourceTable); - BeforeActions.scenario.write("BQ Source Table " + bqSourceTable + " created successfully"); + BigQueryClient.getSoleQueryResult(createTableQuery); + try { + BigQueryClient.getSoleQueryResult(insertDataQuery); + } catch (NoSuchElementException e) { + // Insert query does not return any record. + // Iterator on TableResult values in getSoleQueryResult method throws NoSuchElementException } + PluginPropertyUtils.addPluginProp("bqSourceTable", bqSourceTable); + BeforeActions.scenario.write("BQ Source Table " + bqSourceTable + " created successfully"); + } } diff --git a/cloudsql-mysql-plugin/src/e2e-test/resources/BigQuery/BigQueryCreateTableQuery.txt b/cloudsql-mysql-plugin/src/e2e-test/resources/BigQuery/BigQueryCreateTableQuery.txt index 54fd6ef5e..1188d6591 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/resources/BigQuery/BigQueryCreateTableQuery.txt +++ b/cloudsql-mysql-plugin/src/e2e-test/resources/BigQuery/BigQueryCreateTableQuery.txt @@ -1,2 +1,2 @@ -create table `DATASET.TABLE_NAME` (col1 BYTES, col2 STRING, col3 DATE, col4 FLOAT64, col6 TIMESTAMP, -col8 BOOL, col9 INT64, col10 TIME) +create table `DATASET.TABLE_NAME` (COL1 BYTES, COL2 STRING, COL3 DATE, COL4 FLOAT64, COL6 TIMESTAMP, +COL8 BOOL, COL9 INT64, COL10 TIME) diff --git a/cloudsql-mysql-plugin/src/e2e-test/resources/BigQuery/BigQueryInsertDataQuery.txt b/cloudsql-mysql-plugin/src/e2e-test/resources/BigQuery/BigQueryInsertDataQuery.txt index bdccb0ea8..5b8643fff 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/resources/BigQuery/BigQueryInsertDataQuery.txt +++ b/cloudsql-mysql-plugin/src/e2e-test/resources/BigQuery/BigQueryInsertDataQuery.txt @@ -1,3 +1,3 @@ -insert into `DATASET.TABLE_NAME` (col1, col2, col3, col4, col6, col8, col9, col10) values -(b'01011011','priya','2021-01-28',1.110,'2019-03-10 04:50:01 UTC',false,92233720,'21:26:00'), -(b'01011011','surya','2021-01-21',1.110000001,'2018-03-10 04:50:01 UTC',true,92233729,'20:26:00'); +insert into `DATASET.TABLE_NAME` (COL1, COL2, COL3, COL4, COL6, COL8, COL9, COL10) values +(b'01011011','priya','2021-01-27',1.110,'2019-03-10 04:50:01 UTC',false,92233720,'21:26:00'), +(b'01011011','surya','2021-01-27',1.110000001,'2018-03-10 04:50:01 UTC',true,92233729,'20:26:00'); diff --git a/cloudsql-mysql-plugin/src/e2e-test/resources/errorMessage.properties b/cloudsql-mysql-plugin/src/e2e-test/resources/errorMessage.properties index 27437b5f0..2ad0a8369 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/resources/errorMessage.properties +++ b/cloudsql-mysql-plugin/src/e2e-test/resources/errorMessage.properties @@ -16,3 +16,8 @@ errorMessageInvalidTableName=Exception while trying to validate schema of databa errorMessageConnectionName=Connection Name must be in the format :: to connect to a public CloudSQL PostgreSQL instance. validationSuccessMessage=No errors found. validationErrorMessage=COUNT ERROR found +errorLogsMessageInvalidTableName=Spark program 'phase-1' failed with error: Errors were encountered during validation. \ + Table 'Table123' does not exist +errorLogsMessageInvalidCredentials =Spark program 'phase-1' failed with error: Errors were encountered during validation. +errorLogsMessageInvalidBoundingQuery=Spark program 'phase-1' failed with error: The column index is out of range: 1, \ + number of columns: 0.. Please check the system logs for more details. diff --git a/cloudsql-mysql-plugin/src/e2e-test/resources/pluginParameters.properties b/cloudsql-mysql-plugin/src/e2e-test/resources/pluginParameters.properties index 0e43a9e82..68b1c41d1 100644 --- a/cloudsql-mysql-plugin/src/e2e-test/resources/pluginParameters.properties +++ b/cloudsql-mysql-plugin/src/e2e-test/resources/pluginParameters.properties @@ -8,18 +8,20 @@ invalidImportQuery=select numberOfSplits=2 invalidRef=invalidRef&^*&&* zeroValue=0 -ConnectionName=cdf-athena:us-central1:sql-automation-test-instance +connectionName=CLOUDSQLMYSQL_CONNECTIONNAME zeroSplits=isha insertQuery= select * from mytable CloudMySqlImportQuery=select * from mytable fetchSize=1000 NumSplits=1 SplitBy=ID +jdbcURL=jdbc:mysql:///%s?cloudSqlInstance=%s&socketFactory=com.google.cloud.sql.mysql.SocketFactory&user=%s&password=%s projectId=cdf-athena +datasetprojectId=cdf-athena BQReferenceName=reference -bqTargetTable=mytable +targetTable=mytable5 bqDatasetId=1234 -dataset=sql +dataset=test_automation bqSourceTable=mysql driver=cloudsql-mysql table=myTable @@ -28,6 +30,7 @@ pass=PASS invalidUserName=testUser invalidPassword=testPassword invalidTable=data +jdbcURL=jdbc:mysql:///%s?cloudSqlInstance=%s&socketFactory=com.google.cloud.sql.mysql.SocketFactory&user=%s&password=%s CloudMySqlDriverName=cloudsql-mysql bqTruncateTable=truncateTable bqUpdateTableSchema=updateSchema @@ -36,6 +39,8 @@ invalidboundQuery=SELECT MIN(id),MAX(id) FROM table cloudsqlimportQuery=where $CONDITIONS; splitby=ID numbersplitsgenerate=2 +connectionTimeout=100 +invalidTablename=Table123 outputSchema=[{"key":"fname","value":"string"},{"key":"lname","value":"string"},{"key":"cost","value":"double"},\ {"key":"zipcode","value":"int"}] OutputSchema=[{"key":"id","value":"int"},{"key":"lastName","value":"string"}] @@ -66,20 +71,20 @@ datatypesValue1=VALUES ('User1',1,-1,true,-32768,HEX('27486920546869732069732061 'This is a test message to check ','X') datatypesSchema=[{"key":"ID","value":"string"},{"key":"COL1","value":"boolean"},{"key":"COL2","value":"int"},\ {"key":"COL3","value":"boolean"},{"key":"COL4","value":"int"},{"key":"COL5","value":"bytes"},\ - {"key":"COL6","value":"double"},{"key":"COL7","value":"int"},{"key":"COL8","value":"long"},\ + {"key":"COL6","value":"int"},{"key":"COL7","value":"int"},{"key":"COL8","value":"long"},\ {"key":"COL9","value":"float"},{"key":"COL10","value":"date"},{"key":"COL11","value":"timestamp"},\ {"key":"COL12","value":"decimal"},{"key":"COL13","value":"double"},{"key":"COL14","value":"string"},\ {"key":"COL15","value":"time"},{"key":"COL16","value":"timestamp"},\ {"key":"COL18","value":"string"},{"key":"COL19","value":"bytes"},{"key":"COL20","value":"string"},\ {"key":"COL21","value":"bytes"},{"key":"COL22","value":"bytes"},{"key":"COL23","value":"bytes"},\ - {"key":"COL24","value":"bytes"},{"key":"COL25","value":"string"},{"key":"COL26","value":"time"},\ + {"key":"COL24","value":"bytes"},{"key":"COL25","value":"string"},{"key":"COL26","value":"string"},\ {"key":"COL27","value":"bytes"},{"key":"COL28","value":"string"},{"key":"COL29","value":"string"}] -bqOutputMultipleDatatypesSchema= [{"key":"col1","value":"bytes"},{"key":"col2","value":"string"},\ - {"key":"col3","value":"date"},{"key":"col4","value":"double"},{"key":"col6","value":"timestamp"},\ - {"key":"col8","value":"boolean"},{"key":"col9","value":"long"},{"key":"col10","value":"time"}] +bqOutputMultipleDatatypesSchema= [{"key":"COL1","value":"bytes"},{"key":"COL2","value":"string"},\ + {"key":"COL3","value":"date"},{"key":"COL4","value":"double"},{"key":"COL6","value":"timestamp"},\ + {"key":"COL8","value":"boolean"},{"key":"COL9","value":"long"},{"key":"COL10","value":"time"}] CloudMySqlDatatypesColumns=(COL1 VARBINARY(100) , COL2 VARCHAR(100), COL3 DATE, COL4 DOUBLE,\ COL6 TIMESTAMP, COL8 BIT, COL9 BIGINT, COL10 TIME) #bq queries file path -CreateBQTableQueryFile=testData/BigQuery/BigQueryCreateTableQuery.txt -InsertBQDataQueryFile=testData/BigQuery/BigQueryInsertDataQuery.txt +CreateBQTableQueryFile=BigQuery/BigQueryCreateTableQuery.txt +InsertBQDataQueryFile=BigQuery/BigQueryInsertDataQuery.txt