diff --git a/.github/workflows/e2e.yml b/.github/workflows/e2e.yml
new file mode 100644
index 0000000..4048a90
--- /dev/null
+++ b/.github/workflows/e2e.yml
@@ -0,0 +1,95 @@
+# Copyright © 2021 Cask Data, Inc.
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+
+# This workflow will build a Java project with Maven
+# For more information see: https://help.github.com/actions/language-and-framework-guides/building-and-testing-java-with-maven
+# Note: Any changes to this workflow would be used only after merging into develop
+name: Build e2e tests
+
+on:
+ push:
+ branches: [ develop ]
+ pull_request:
+ branches: [ develop]
+ types: [opened, synchronize, reopened, labeled]
+ workflow_dispatch:
+
+jobs:
+ build:
+ runs-on: k8s-runner-e2e
+
+ # We allow builds:
+ # 1) When triggered manually
+ # 2) When it's a merge into a branch
+ # 3) For PRs that are labeled as build and
+ # - It's a code change
+ # - A build label was just added
+ # A bit complex, but prevents builds when other labels are manipulated
+ if: >
+ github.event_name == 'workflow_dispatch'
+ || github.event_name == 'push'
+ || (contains(github.event.pull_request.labels.*.name, 'build')
+ && (github.event.action != 'labeled' || github.event.label.name == 'build')
+ )
+
+ steps:
+ # Pinned 1.0.0 version
+ - uses: actions/checkout@v2.3.4
+ with:
+ path: plugin
+ - name: Checkout e2e test repo
+ uses: actions/checkout@v2.3.4
+ with:
+ repository: cdapio/cdap-e2e-tests
+ path: e2e
+ - name: Cache
+ uses: actions/cache@v2.1.3
+ with:
+ path: ~/.m2/repository
+ key: ${{ runner.os }}-maven-${{ github.workflow }}-${{ hashFiles('**/pom.xml') }}
+ restore-keys: |
+ ${{ runner.os }}-maven-${{ github.workflow }}
+ - name: Run tests
+ run: python3 e2e/src/main/scripts/run_e2e_test.py
+ env:
+ AWS_KEY_ID: ${{ secrets.AWS_KEY_ID }}
+ AWS_SECRET_KEY: ${{ secrets.AWS_SECRET_KEY }}
+ - name: Upload report
+ uses: actions/upload-artifact@v2.2.4
+ if: always()
+ with:
+ name: Cucumber report
+ path: ./plugin/target/cucumber-reports
+ - name: Upload debug files
+ uses: actions/upload-artifact@v2.2.4
+ if: always()
+ with:
+ name: Debug files
+ path: ./**/target/e2e-debug
+ - name: Deploy report
+ uses: peaceiris/actions-gh-pages@v3
+ if: always()
+ with:
+ github_token: ${{ secrets.GITHUB_TOKEN }}
+ publish_dir: ./plugin/target/cucumber-reports
+ destination_dir: cucumber-reports/${{ github.ref }}
+
+ - name: github-status-action
+ uses: Sibz/github-status-action@67af1f4042a5a790681aad83c44008ca6cfab83d
+ if: always()
+ with:
+ authToken: ${{ secrets.GITHUB_TOKEN }}
+ state: success
+ context: Cucumber report
+ target_url: https://data-integrations.github.io/google-cloud/cucumber-reports/${{ github.ref }}/advanced-reports/cucumber-html-reports/overview-features.html
+ sha: ${{github.event.pull_request.head.sha || github.sha}}
+
+
diff --git a/pom.xml b/pom.xml
index 96d5a96..1d19e33 100644
--- a/pom.xml
+++ b/pom.xml
@@ -82,6 +82,7 @@
2.8.0
1.11.133
${project.basedir}
+ ${project.basedir}/src/test/java/
@@ -238,6 +239,7 @@
+ ${testSourceLocation}
@@ -289,6 +291,10 @@
true
true
true
+
+ src/test/java
+ src/e2e-test
+
check
@@ -357,5 +363,95 @@
+
+ e2e-tests
+
+ src/e2e-test/java
+
+
+
+
+ src/e2e-test/resources
+ true
+
+
+
+
+ org.apache.maven.plugins
+ maven-surefire-plugin
+ 2.18.1
+
+ true
+
+
+
+ org.apache.maven.plugins
+ maven-failsafe-plugin
+ 3.0.0-M5
+
+
+ TestRunner.java
+
+
+
+
+
+ integration-test
+
+
+
+
+
+ net.masterthought
+ maven-cucumber-reporting
+ 5.5.0
+
+
+ execution
+ verify
+
+ generate
+
+
+ Cucumber Reports
+ target/cucumber-reports/advanced-reports
+ 1
+ false
+ ${project.build.directory}/cucumber-reports
+
+ **/*.json
+
+ ${project.build.directory}/cucumber-reports
+ true
+
+
+
+
+
+ org.apache.felix
+ maven-bundle-plugin
+ 3.5.1
+
+
+
+
+
+
+ com.google.guava
+ guava
+ 31.0.1-jre
+ compile
+
+
+
+
+
+ io.cdap.tests.e2e
+ cdap-e2e-framework
+ 0.0.1-SNAPSHOT
+ test
+
+
+
diff --git a/src/e2e-test/features/amazonS3.EndtoEnd.feature b/src/e2e-test/features/amazonS3.EndtoEnd.feature
new file mode 100644
index 0000000..430814e
--- /dev/null
+++ b/src/e2e-test/features/amazonS3.EndtoEnd.feature
@@ -0,0 +1,233 @@
+Feature: AmazonS3 End to End records transfer
+
+ @AmazonS3
+ Scenario:Validate successful records transfer from Amazon to GCS
+ Given Open Datafusion Project to configure pipeline
+ When Source is Amazon
+ When Sink is GCS
+ Then Open Amazon Properties
+ Then Enter the Amazon properties for bucket "amazonPath"
+ Then Capture output schema
+ Then Validate Amazon properties
+ Then Close the Amazon properties
+ Then Enter the GCS properties
+ Then Close the GCS Properties
+ Then Connect Source as "Amazon" and sink as "GCS" to establish connection
+ Then Add pipeline name
+ Then Preview and run the pipeline
+ Then Verify the preview of pipeline is "success"
+ Then Click on PreviewData for Amazon
+ Then Verify Preview output schema matches the outputSchema captured in properties
+ Then Close the Preview and deploy the pipeline
+ Then Run the Pipeline in Runtime
+ Then Wait till pipeline is in running state
+ Then Verify the pipeline status is "Succeeded"
+ Then Open the Logs and capture raw logs
+ Then Validate the output record count
+
+ @AmazonS3
+ Scenario:Validate successful records transfer from Amazon to BigQuery using authentication method as IAM
+ Given Open Datafusion Project to configure pipeline
+ Given Delete the table "amazonBqTableDemo"
+ When Source is Amazon
+ When Target is BigQuery
+ Then Open Amazon Properties
+ Then Enter the Amazon properties using IAM for bucket "amazonPath"
+ Then Capture output schema
+ Then Validate Amazon properties
+ Then Close the Amazon properties
+ Then Open BigQuery Target Properties
+ Then Enter the BigQuery properties for table "amazonBqTableDemo"
+ Then Validate Bigquery properties
+ Then Close the BigQuery properties
+ Then Connect Source as "Amazon" and sink as "BigQuery" to establish connection
+ Then Add pipeline name
+ Then Preview and run the pipeline
+ Then Verify the preview of pipeline is "success"
+ Then Click on PreviewData for Amazon
+ Then Verify Preview output schema matches the outputSchema captured in properties
+ Then Close the Preview and deploy the pipeline
+ Then Run the Pipeline in Runtime
+ Then Wait till pipeline is in running state
+ Then Verify the pipeline status is "Succeeded"
+ Then Validate the output record count
+ Then Open the Logs and capture raw logs
+ Then Get Count of no of records transferred to BigQuery in "amazonBqTableDemo"
+ Then Validate records out from Amazon is equal to records transferred in BigQuery "amazonBqTableDemo" output records
+
+ @AmazonS3
+ Scenario:Validate successful records transfer from Amazon to BigQuery when Path Filename Only is set to true
+ Given Open Datafusion Project to configure pipeline
+ When Source is Amazon
+ When Target is BigQuery
+ Then Open Amazon Properties
+ Then Enter the Amazon properties for bucket "amazonPath" and path field "amazonPathField" with pathFileName only set "True"
+ Then Capture output schema
+ Then Validate Amazon properties
+ Then Close the Amazon properties
+ Then Open BigQuery Target Properties
+ Then Enter the BigQuery properties for table "amazonBqTableDemo"
+ Then Validate Bigquery properties
+ Then Close the BigQuery properties
+ Then Connect Source as "Amazon" and sink as "BigQuery" to establish connection
+ Then Add pipeline name
+ Then Preview and run the pipeline
+ Then Verify the preview of pipeline is "success"
+ Then Click on PreviewData for Amazon
+ Then Verify Preview output schema matches the outputSchema captured in properties
+ Then Close the Preview and deploy the pipeline
+ Then Run the Pipeline in Runtime
+ Then Wait till pipeline is in running state
+ Then Verify the pipeline status is "Succeeded"
+ Then Validate the output record count
+ Then Open the Logs and capture raw logs
+ Then Get Count of no of records transferred to BigQuery in "amazonBqTableDemo"
+ Then Validate records out from Amazon is equal to records transferred in BigQuery "amazonBqTableDemo" output records
+ Then Delete the table "amazonBqTableDemo"
+
+ @AmazonS3
+ Scenario:Validate successful records transfer from Amazon to BigQuery when Path Filename Only is set to False
+ Given Open Datafusion Project to configure pipeline
+ When Source is Amazon
+ When Target is BigQuery
+ Then Open Amazon Properties
+ Then Enter the Amazon properties for bucket "amazonPath" and path field "amazonPathField" with pathFileName only set "False"
+ Then Capture output schema
+ Then Validate Amazon properties
+ Then Close the Amazon properties
+ Then Open BigQuery Target Properties
+ Then Enter the BigQuery properties for table "amazonBqTableDemo"
+ Then Validate Bigquery properties
+ Then Close the BigQuery properties
+ Then Connect Source as "Amazon" and sink as "BigQuery" to establish connection
+ Then Add pipeline name
+ Then Preview and run the pipeline
+ Then Verify the preview of pipeline is "success"
+ Then Click on PreviewData for Amazon
+ Then Verify Preview output schema matches the outputSchema captured in properties
+ Then Close the Preview and deploy the pipeline
+ Then Run the Pipeline in Runtime
+ Then Wait till pipeline is in running state
+ Then Verify the pipeline status is "Succeeded"
+ Then Validate the output record count
+ Then Get Count of no of records transferred to BigQuery in "amazonBqTableDemo"
+ Then Validate records out from Amazon is equal to records transferred in BigQuery "amazonBqTableDemo" output records
+ Then Delete the table "amazonBqTableDemo"
+
+ @AmazonS3
+ Scenario:Validate successful records transfer from Amazon to BigQuery when Read Files Recursively is set to true
+ Given Open Datafusion Project to configure pipeline
+ When Source is Amazon
+ When Target is BigQuery
+ Then Open Amazon Properties
+ Then Enter the Amazon properties for bucket "amazonRecursiveDataPath" and field readFilesRecursively
+ Then Capture output schema
+ Then Validate Amazon properties
+ Then Close the Amazon properties
+ Then Open BigQuery Target Properties
+ Then Enter the BigQuery properties for table "amazonBqTableDemo"
+ Then Validate Bigquery properties
+ Then Close the BigQuery properties
+ Then Connect Source as "Amazon" and sink as "BigQuery" to establish connection
+ Then Add pipeline name
+ Then Preview and run the pipeline
+ Then Verify the preview of pipeline is "success"
+ Then Click on PreviewData for Amazon
+ Then Verify Preview output schema matches the outputSchema captured in properties
+ Then Close the Preview and deploy the pipeline
+ Then Run the Pipeline in Runtime
+ Then Wait till pipeline is in running state
+ Then Verify the pipeline status is "Succeeded"
+ Then Validate the output record count
+ Then Get Count of no of records transferred to BigQuery in "amazonBqTableDemo"
+ Then Validate records out from Amazon is equal to records transferred in BigQuery "amazonBqTableDemo" output records
+ Then Delete the table "amazonBqTableDemo"
+
+ @AmazonS3
+ Scenario: Verify records transfer from AmazonS3 to BigQuery on using Regex path filter
+ Given Open Datafusion Project to configure pipeline
+ When Source is Amazon
+ When Target is BigQuery
+ Then Open Amazon Properties
+ Then Enter the Amazon properties for bucket "amazonRegexPath" and regex filter "amazonRegexFilter"
+ Then Capture output schema
+ Then Validate Amazon properties
+ Then Close the BigQuery properties
+ Then Open BigQuery Target Properties
+ Then Enter the BigQuery properties for table "amazonBqTableDemo"
+ Then Validate Bigquery properties
+ Then Close the BigQuery properties
+ Then Connect Source as "Amazon" and sink as "BigQuery" to establish connection
+ Then Add pipeline name
+ Then Preview and run the pipeline
+ Then Verify the preview of pipeline is "success"
+ Then Click on PreviewData for BigQuery
+ Then Close the Preview and deploy the pipeline
+ Then Run the Pipeline in Runtime
+ Then Wait till pipeline is in running state
+ Then Verify the pipeline status is "Succeeded"
+ Then Validate the output record count
+ Then Get Count of no of records transferred to BigQuery in "amazonBqTableDemo"
+ Then Validate records out from Amazon is equal to records transferred in BigQuery "amazonBqTableDemo" output records
+ Then Delete the table "amazonBqTableDemo"
+
+ @AmazonS3
+ Scenario Outline: Verify output records are encoded while transferring data from Amazon to BigQuery using different file encoding
+ Given Open Datafusion Project to configure pipeline
+ When Source is Amazon
+ When Target is BigQuery
+ Then Open Amazon Properties
+ Then Enter the Amazon properties for bucket "amazonPath" and selecting "" File encoding
+ Then Capture output schema
+ Then Validate Amazon properties
+ Then Close the Amazon properties
+ Then Open BigQuery Target Properties
+ Then Enter the BigQuery properties for table "amazonBqTableDemo"
+ Then Validate Bigquery properties
+ Then Close the BigQuery properties
+ Then Connect Source as "Amazon" and sink as "BigQuery" to establish connection
+ Then Add pipeline name
+ Then Preview and run the pipeline
+ Then Verify the preview of pipeline is "success"
+ Then Click on PreviewData for Amazon
+ Then Verify Preview output schema matches the outputSchema captured in properties
+ Then Close the Preview and deploy the pipeline
+ Then Run the Pipeline in Runtime
+ Then Wait till pipeline is in running state
+ Then Verify the pipeline status is "Succeeded"
+ Then Validate the output record count
+ Then Get Count of no of records transferred to BigQuery in "amazonBqTableDemo"
+ Then Validate records out from Amazon is equal to records transferred in BigQuery "amazonBqTableDemo" output records
+ Then Delete the table "amazonBqTableDemo"
+ Examples:
+ | amazonDifferentEncodings |
+ | amazonLatinEncoding |
+ | amazonEuropeanEncoding |
+ | amazonTurkishEncoding |
+
+ @AmazonS3
+ Scenario:Validate successful records transfer from BigQuery to Amazon as sink
+ Given Open Datafusion Project to configure pipeline
+ When Source is BigQuery
+ When Target is Amazon
+ Then Open BigQuery Properties
+ Then Enter the BigQuery properties for source table "amazonBqTableName"
+ Then Capture output schema
+ Then Validate Bigquery properties
+ Then Close the BigQuery properties
+ Then Open Amazon Target Properties
+ Then Enter the Amazon properties for sink bucket "amazonSinkBucket"
+ Then Validate Amazon properties
+ Then Close the Amazon properties
+ Then Connect Source as "BigQuery" and sink as "Amazon" to establish connection
+ Then Add pipeline name
+ Then Preview and run the pipeline
+ Then Verify the preview of pipeline is "success"
+ Then Click on PreviewData for BigQuery
+ Then Verify Preview output schema matches the outputSchema captured in properties
+ Then Close the Preview and deploy the pipeline
+ Then Run the Pipeline in Runtime
+ Then Wait till pipeline is in running state
+ Then Verify the pipeline status is "Succeeded"
+ Then Open the Logs and capture raw logs
+ Then Validate the output record count
diff --git a/src/e2e-test/features/amazonS3.error.feature b/src/e2e-test/features/amazonS3.error.feature
new file mode 100644
index 0000000..977652f
--- /dev/null
+++ b/src/e2e-test/features/amazonS3.error.feature
@@ -0,0 +1,37 @@
+Feature: AmazonS3 error validations
+
+ @AmazonS3
+ Scenario Outline:Verify Amazon Source properties validation errors for mandatory fields
+ Given Open Datafusion Project to configure pipeline
+ When Source is Amazon
+ Then Open Amazon Properties
+ Then Enter the Amazon Properties with blank property ""
+ Then Validate mandatory property error for ""
+ Examples:
+ | property |
+ | referenceName |
+ | path |
+
+ @AmazonS3
+ Scenario:Verify Error message for incorrect Amazon bucket path
+ Given Open Datafusion Project to configure pipeline
+ When Source is Amazon
+ Then Open Amazon Properties
+ Then Enter the Amazon properties for bucket "amazonIncorrectPath"
+ Then Verify invalid amazon bucket path error
+
+ @AmazonS3
+ Scenario:Verify Error message for incorrect Maximum Split Size
+ Given Open Datafusion Project to configure pipeline
+ When Source is Amazon
+ Then Open Amazon Properties
+ Then Enter the Amazon properties for bucket "amazonPath" and incorrect split size "amazonIncorrectSplitSize"
+ Then Verify invalid split size error
+
+ @AmazonS3
+ Scenario:Verify Error message for incorrect path field
+ Given Open Datafusion Project to configure pipeline
+ When Source is Amazon
+ Then Open Amazon Properties
+ Then Enter the Amazon properties for bucket "amazonPath" and incorrect path field "amazonIncorrectPathField"
+ Then Verify invalid path field error
diff --git a/src/e2e-test/java/io/cdap/plugin/amazons3/actions/AmazonS3Actions.java b/src/e2e-test/java/io/cdap/plugin/amazons3/actions/AmazonS3Actions.java
new file mode 100644
index 0000000..c117ed9
--- /dev/null
+++ b/src/e2e-test/java/io/cdap/plugin/amazons3/actions/AmazonS3Actions.java
@@ -0,0 +1,119 @@
+/*
+ * Copyright © 2021 Cask Data, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package io.cdap.plugin.amazons3.actions;
+
+import io.cdap.e2e.pages.actions.CdfStudioActions;
+import io.cdap.e2e.utils.SeleniumDriver;
+import io.cdap.e2e.utils.SeleniumHelper;
+import io.cdap.plugin.amazons3.locators.AmazonS3Locators;
+import org.openqa.selenium.By;
+
+/**
+ * StepActions for AmazonS3.
+ */
+public class AmazonS3Actions {
+
+ static {
+ SeleniumHelper.getPropertiesLocators(AmazonS3Locators.class);
+ }
+
+ public static void selectS3() {
+ SeleniumHelper.waitAndClick(AmazonS3Locators.amazonS3Bucket);
+ }
+
+ public static void clickAmazonProperties() {
+ AmazonS3Locators.amazonS3Properties.click();
+ }
+
+ public static void enterReferenceName(String referenceName) {
+ AmazonS3Locators.referenceName.sendKeys(referenceName);
+ }
+
+ public static void closeButton() {
+ AmazonS3Locators.closeButton.click();
+ }
+
+ public static void enterBucketPath(String recursiveReadPath) {
+ AmazonS3Locators.amazonBucketPath.sendKeys(recursiveReadPath);
+ }
+
+ public static void readFilesRecursively() {
+ AmazonS3Locators.readFilesRecursively.click();
+ }
+
+ public static void allowEmptyInput() {
+ AmazonS3Locators.allowEmptyInput.click();
+ }
+
+ public static void fileEncoding(String encoding) throws InterruptedException {
+ AmazonS3Locators.fileEncoding.click();
+ SeleniumHelper.waitAndClick(SeleniumDriver.getDriver().findElement(By.xpath(
+ "//*[contains(text(),'" + encoding + "')]")));
+ CdfStudioActions.clickValidateButton();
+ }
+
+ public static void clickPreviewData() {
+ SeleniumHelper.waitElementIsVisible(AmazonS3Locators.previewData);
+ AmazonS3Locators.previewData.click();
+ }
+
+ public static void sinkAmazon() {
+ AmazonS3Locators.sink.click();
+ AmazonS3Locators.amazon.click();
+ }
+
+ public static void amazonProperties() {
+ AmazonS3Locators.amazonProperties.click();
+ }
+
+ public static void selectFormat(String formatType) throws InterruptedException {
+ AmazonS3Locators.format.click();
+ SeleniumHelper.waitAndClick(SeleniumDriver.getDriver().findElement
+ (By.xpath("//li[@data-value='" + formatType + "']")));
+ }
+
+ public static void accessID(String accessID) {
+ AmazonS3Locators.accessID.sendKeys(accessID);
+ }
+
+ public static void accessKey(String accessKey) {
+ AmazonS3Locators.accessKey.sendKeys(accessKey);
+ }
+
+ public static void enterPathField(String fieldInOutputSchema) {
+ AmazonS3Locators.pathField.sendKeys(fieldInOutputSchema);
+ }
+
+ public static void enterRegexFilter(String regexFilter) {
+ AmazonS3Locators.regexPathFilter.sendKeys(regexFilter);
+ }
+
+ public static void enterMaxSplitSize(String splitsize) {
+ AmazonS3Locators.maximumSplitSize.sendKeys(splitsize);
+ }
+
+ public static void clickIAM() {
+ AmazonS3Locators.authenticationMethodIAM.click();
+ }
+
+ public static void selectPathFilename() {
+ AmazonS3Locators.pathFileName.click();
+ }
+
+ public static void toggleSkipHeader() {
+ AmazonS3Locators.skipHeader.click();
+ }
+}
diff --git a/src/e2e-test/java/io/cdap/plugin/amazons3/actions/package-info.java b/src/e2e-test/java/io/cdap/plugin/amazons3/actions/package-info.java
new file mode 100644
index 0000000..90d0228
--- /dev/null
+++ b/src/e2e-test/java/io/cdap/plugin/amazons3/actions/package-info.java
@@ -0,0 +1,4 @@
+/**
+ * This package contains actions for AmazonS3.
+ */
+package io.cdap.plugin.amazons3.actions;
diff --git a/src/e2e-test/java/io/cdap/plugin/amazons3/locators/AmazonS3Locators.java b/src/e2e-test/java/io/cdap/plugin/amazons3/locators/AmazonS3Locators.java
new file mode 100644
index 0000000..a601f4c
--- /dev/null
+++ b/src/e2e-test/java/io/cdap/plugin/amazons3/locators/AmazonS3Locators.java
@@ -0,0 +1,91 @@
+/*
+ * Copyright © 2021 Cask Data, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package io.cdap.plugin.amazons3.locators;
+
+import org.openqa.selenium.WebElement;
+import org.openqa.selenium.support.FindBy;
+import org.openqa.selenium.support.How;
+/**
+ * StepLocators for AmazonS3.
+ */
+public class AmazonS3Locators {
+
+ @FindBy(how = How.XPATH, using = "//*[@title=\"Amazon S3\"]//following-sibling::div")
+ public static WebElement amazonS3Properties;
+
+ @FindBy(how = How.XPATH, using = "//*[@data-cy='plugin-S3-batchsource']")
+ public static WebElement amazonS3Bucket;
+
+ @FindBy(how = How.XPATH, using = "//input[@data-cy='path']")
+ public static WebElement amazonBucketPath;
+
+ @FindBy(how = How.XPATH, using = "//*[@data-cy='plugin-properties-validate-btn']")
+ public static WebElement validate;
+
+ @FindBy(how = How.XPATH, using = "//*[@class='fa fa-remove']")
+ public static WebElement closeButton;
+
+ @FindBy(how = How.XPATH, using = "//input[@data-cy='maxSplitSize']")
+ public static WebElement maximumSplitSize;
+
+ @FindBy(how = How.XPATH, using = "//input[@data-cy='fileRegex']")
+ public static WebElement regexPathFilter;
+
+ @FindBy(how = How.XPATH, using = "//*[@data-cy='referenceName' and @class='MuiInputBase-input']")
+ public static WebElement referenceName;
+
+ @FindBy(how = How.XPATH, using = "//*[@name='filenameOnly' and@value='true']")
+ public static WebElement pathFileName;
+
+ @FindBy(how = How.XPATH, using = "//*[@name='recursive' and@value='true']")
+ public static WebElement readFilesRecursively;
+
+ @FindBy(how = How.XPATH, using = "//*[@name='ignoreNonExistingFolders' and@value='true']")
+ public static WebElement allowEmptyInput;
+
+ @FindBy(how = How.XPATH, using = "//*[@data-cy='select-fileEncoding']")
+ public static WebElement fileEncoding;
+
+ @FindBy(how = How.XPATH, using = "//*[@data-cy='S3-preview-data-btn' and @class='node-preview-data-btn ng-scope']")
+ public static WebElement previewData;
+
+ @FindBy(how = How.XPATH, using = "//*[text()='Sink ']")
+ public static WebElement sink;
+
+ @FindBy(how = How.XPATH, using = "//*[@data-cy='plugin-S3-batchsink']")
+ public static WebElement amazon;
+
+ @FindBy(how = How.XPATH, using = "//*[@title=\"Amazon S3\"]//following-sibling::div")
+ public static WebElement amazonProperties;
+
+ @FindBy(how = How.XPATH, using = "//input[@data-cy='accessID']")
+ public static WebElement accessID;
+
+ @FindBy(how = How.XPATH, using = "//input[@data-cy='accessKey']")
+ public static WebElement accessKey;
+
+ @FindBy(how = How.XPATH, using = "//*[@data-cy=\"select-format\"]")
+ public static WebElement format;
+
+ @FindBy(how = How.XPATH, using = "//*[@data-cy='pathField']//input")
+ public static WebElement pathField;
+
+ @FindBy(how = How.XPATH, using = "//input[@value='IAM']")
+ public static WebElement authenticationMethodIAM;
+
+ @FindBy(how = How.XPATH, using = "//*[@class='MuiInputBase-input' and @data-cy='skipHeader']")
+ public static WebElement skipHeader;
+}
diff --git a/src/e2e-test/java/io/cdap/plugin/amazons3/locators/package-info.java b/src/e2e-test/java/io/cdap/plugin/amazons3/locators/package-info.java
new file mode 100644
index 0000000..1d2a18c
--- /dev/null
+++ b/src/e2e-test/java/io/cdap/plugin/amazons3/locators/package-info.java
@@ -0,0 +1,4 @@
+/**
+ * This package contains locators for AmazonS3.
+ */
+package io.cdap.plugin.amazons3.locators;
diff --git a/src/e2e-test/java/io/cdap/plugin/amazons3/stepsdesign/AmazonS3.java b/src/e2e-test/java/io/cdap/plugin/amazons3/stepsdesign/AmazonS3.java
new file mode 100644
index 0000000..3ef485d
--- /dev/null
+++ b/src/e2e-test/java/io/cdap/plugin/amazons3/stepsdesign/AmazonS3.java
@@ -0,0 +1,615 @@
+/*
+ * Copyright © 2021 Cask Data, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package io.cdap.plugin.amazons3.stepsdesign;
+
+import io.cdap.e2e.pages.actions.CdfBigQueryPropertiesActions;
+import io.cdap.e2e.pages.actions.CdfGcsActions;
+import io.cdap.e2e.pages.actions.CdfLogActions;
+import io.cdap.e2e.pages.actions.CdfPipelineRunAction;
+import io.cdap.e2e.pages.actions.CdfStudioActions;
+import io.cdap.e2e.pages.locators.CdfBigQueryPropertiesLocators;
+import io.cdap.e2e.pages.locators.CdfStudioLocators;
+import io.cdap.e2e.utils.CdfHelper;
+import io.cdap.e2e.utils.GcpClient;
+import io.cdap.e2e.utils.SeleniumDriver;
+import io.cdap.e2e.utils.SeleniumHelper;
+import io.cdap.plugin.amazons3.actions.AmazonS3Actions;
+import io.cdap.plugin.amazons3.locators.AmazonS3Locators;
+import io.cdap.plugin.utils.E2ETestUtils;
+import io.cucumber.java.en.Given;
+import io.cucumber.java.en.Then;
+import io.cucumber.java.en.When;
+import org.apache.commons.lang3.StringUtils;
+import org.junit.Assert;
+import org.openqa.selenium.By;
+import org.openqa.selenium.WebElement;
+import org.openqa.selenium.support.ui.ExpectedConditions;
+import org.openqa.selenium.support.ui.WebDriverWait;
+import stepsdesign.BeforeActions;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Optional;
+import java.util.UUID;
+
+import static io.cdap.plugin.utils.E2ETestConstants.ERROR_MSG_AMAZON_INVALID_PATH;
+import static io.cdap.plugin.utils.E2ETestConstants.ERROR_MSG_AMAZON_INVALID_PATH_FIELD;
+import static io.cdap.plugin.utils.E2ETestConstants.ERROR_MSG_COLOR;
+import static io.cdap.plugin.utils.E2ETestConstants.ERROR_MSG_ERROR_FOUND_VALIDATION;
+import static io.cdap.plugin.utils.E2ETestConstants.ERROR_MSG_INCORRECT_TABLE;
+import static io.cdap.plugin.utils.E2ETestConstants.ERROR_MSG_VALIDATION;
+
+/**
+ * StepDesigns for AmazonS3.
+ */
+public class AmazonS3 implements CdfHelper {
+ GcpClient gcpClient = new GcpClient();
+
+ static PrintWriter out;
+ static String rawLog;
+ static int countRecords;
+ List propertiesOutputSchema = new ArrayList();
+
+ static {
+ try {
+ out = new PrintWriter(BeforeActions.myObj);
+ } catch (FileNotFoundException e) {
+ BeforeActions.scenario.write(e.toString());
+ }
+ }
+
+ @When("Source is Amazon")
+ public void sourceIsAmazon() throws InterruptedException {
+ AmazonS3Actions.selectS3();
+ }
+
+ @When("Target is BigQuery")
+ public void targetIsBigQuery() {
+ CdfStudioActions.sinkBigQuery();
+
+ }
+
+ @Given("Open Datafusion Project to configure pipeline")
+ public void openDatafusionProjectToConfigurePipeline() throws IOException, InterruptedException {
+ openCdf();
+ }
+
+ @When("Sink is GCS")
+ public void sinkIsGCS() {
+ CdfStudioActions.sinkGcs();
+ }
+
+ @Then("Close the Amazon properties")
+ public void closeTheAmazonProperties() {
+ AmazonS3Actions.closeButton();
+ }
+
+ @Then("Enter the GCS properties")
+ public void enterTheGCSProperties() throws InterruptedException, IOException {
+ CdfGcsActions.gcsProperties();
+ CdfGcsActions.enterReferenceName();
+ CdfGcsActions.enterProjectId();
+ CdfGcsActions.getGcsBucket(E2ETestUtils.pluginProp("amazonGcsBucketName"));
+ CdfGcsActions.selectFormat("json");
+ CdfGcsActions.clickValidateButton();
+ }
+
+ @Then("Add pipeline name")
+ public void addPipelineName() throws InterruptedException {
+ CdfStudioActions.pipelineName();
+ CdfStudioActions.pipelineNameIp("Amazon_GCS" + UUID.randomUUID().toString());
+ CdfStudioActions.pipelineSave();
+ SeleniumHelper.waitElementIsVisible(CdfStudioLocators.statusBanner);
+ WebDriverWait wait = new WebDriverWait(SeleniumDriver.getDriver(), 5);
+ wait.until(ExpectedConditions.invisibilityOf(CdfStudioLocators.statusBanner));
+ }
+
+ @Then("Click the preview")
+ public void clickThePreview() {
+ SeleniumHelper.waitElementIsVisible(CdfStudioLocators.preview, 400);
+ CdfStudioLocators.preview.click();
+ }
+
+ @Then("Close the GCS Properties")
+ public void closeTheGCSProperties() {
+ CdfGcsActions.closeButton();
+ }
+
+ @Then("Run the Pipeline in Runtime")
+ public void runThePipelineInRuntime() throws InterruptedException {
+ CdfPipelineRunAction.runClick();
+ }
+
+ @Then("Wait till pipeline is in running state")
+ public void waitTillPipelineIsInRunningState() throws InterruptedException {
+ WebDriverWait wait = new WebDriverWait(SeleniumDriver.getDriver(), 500);
+ wait.until(ExpectedConditions.or(
+ ExpectedConditions.visibilityOfElementLocated(By.xpath("//*[@data-cy='Succeeded']")),
+ ExpectedConditions.visibilityOfElementLocated(By.xpath("//*[@data-cy='Failed']"))));
+ }
+
+ @Then("Verify the pipeline status is {string}")
+ public void verifyThePipelineStatusIs(String status) {
+ Assert.assertTrue(SeleniumHelper.verifyElementPresent("//*[@data-cy='" + status + "']"));
+ }
+
+ @Then("Click on Advance logs and validate the success message")
+ public void clickOnAdvanceLogsAndValidateTheSuccessMessage() {
+ CdfLogActions.goToAdvanceLogs();
+ CdfLogActions.validateSucceeded();
+ }
+
+ @Then("Validate Amazon properties")
+ public void validateAmazonPropertiesForErrorWithoutProvidingMandatoryFields() {
+ CdfStudioActions.clickValidateButton();
+ SeleniumHelper.waitElementIsVisible(CdfStudioLocators.validateButton);
+ String expectedErrorMessage = E2ETestUtils.errorProp(ERROR_MSG_VALIDATION);
+ String actualErrorMessage = CdfStudioLocators.pluginValidationSuccessMsg.getText();
+ Assert.assertEquals(expectedErrorMessage, actualErrorMessage);
+ }
+
+ @Then("Enter the Amazon Properties with mandatory and basic fields")
+ public void enterTheAmazonProperties() throws InterruptedException, IOException {
+ AmazonS3Actions.clickAmazonProperties();
+ AmazonS3Actions.accessID(System.getenv("AWS_KEY_ID"));
+ AmazonS3Actions.accessKey(System.getenv("AWS_SECRET_KEY"));
+ AmazonS3Actions.enterReferenceName(E2ETestUtils.pluginProp("referenceName"));
+ AmazonS3Actions.enterBucketPath(E2ETestUtils.pluginProp("bucketPath"));
+ CdfBigQueryPropertiesActions.getSchema();
+ CdfGcsActions.clickValidateButton();
+ }
+
+ @Then("Open Amazon Properties")
+ public void openAmazonProperties() {
+ CdfStudioActions.clickProperties("Amazon");
+ }
+
+ @Then("Enter the Amazon properties for bucket {string}")
+ public void enterTheAmazonPropertiesForBucket(String path) {
+ AmazonS3Actions.enterBucketPath(E2ETestUtils.pluginProp(path));
+ AmazonS3Actions.enterReferenceName("Amazon" + UUID.randomUUID().toString());
+ AmazonS3Actions.accessID(System.getenv("AWS_KEY_ID"));
+ AmazonS3Actions.accessKey(System.getenv("AWS_SECRET_KEY"));
+ }
+
+ @Then("Enter the Amazon properties for sink bucket {string}")
+ public void enterTheAmazonPropertiesForSinkBucket(String path) {
+ AmazonS3Actions.enterBucketPath(E2ETestUtils.pluginProp(path));
+ AmazonS3Actions.enterReferenceName("Amazon" + UUID.randomUUID().toString());
+ AmazonS3Actions.accessID(System.getenv("AWS_KEY_ID"));
+ AmazonS3Actions.accessKey(System.getenv("AWS_SECRET_KEY"));
+ }
+
+ @Then("Capture output schema")
+ public void captureOutputSchema() {
+ CdfBigQueryPropertiesActions.getSchema();
+ WebDriverWait wait = new WebDriverWait(SeleniumDriver.getDriver(), 10);
+ wait.until(ExpectedConditions.numberOfElementsToBeMoreThan
+ (By.xpath("//*[@placeholder=\"Field name\"]"), 1));
+ SeleniumHelper.waitElementIsVisible(SeleniumDriver.getDriver().findElement(
+ By.xpath("//div[@data-cy='schema-fields-list']//*[@placeholder='Field name']")), 10L);
+ List propertiesOutputSchemaElements = SeleniumDriver.getDriver().findElements(
+ By.xpath("//div[@data-cy='schema-fields-list']//*[@placeholder='Field name']"));
+ for (WebElement element : propertiesOutputSchemaElements) {
+ propertiesOutputSchema.add(element.getAttribute("value"));
+ }
+ Assert.assertTrue(propertiesOutputSchema.size() >= 2);
+ }
+
+ @Then("Connect Source as {string} and sink as {string} to establish connection")
+ public void connectSourceAsAndSinkAsToEstablishConnection(String source, String sink) {
+ CdfStudioActions.connectSourceAndSink(source, sink);
+ }
+
+ @Then("Preview and run the pipeline")
+ public void previewAndRunThePipeline() {
+ SeleniumHelper.waitAndClick(CdfStudioLocators.preview, 5L);
+ CdfStudioLocators.runButton.click();
+ }
+
+ @Then("Verify the preview of pipeline is {string}")
+ public void verifyThePreviewOfPipelineIs(String previewStatus) {
+ WebDriverWait wait = new WebDriverWait(SeleniumDriver.getDriver(), 180);
+ wait.until(ExpectedConditions.visibilityOf(CdfStudioLocators.statusBanner));
+ Assert.assertTrue(CdfStudioLocators.statusBannerText.getText().contains(previewStatus));
+ if (!previewStatus.equalsIgnoreCase("failed")) {
+ wait.until(ExpectedConditions.invisibilityOf(CdfStudioLocators.statusBanner));
+ }
+ }
+
+ @Then("Click on PreviewData for Amazon")
+ public void clickOnPreviewDataForAmazon() {
+ AmazonS3Actions.clickPreviewData();
+ }
+
+ @Then("Verify Preview output schema matches the outputSchema captured in properties")
+ public void verifyPreviewOutputSchemaMatchesTheOutputSchemaCapturedInProperties() {
+ List previewOutputSchema = new ArrayList();
+ List previewOutputSchemaElements = SeleniumDriver.getDriver().findElements(
+ By.xpath("(//h2[text()='Output Records']/parent::div/div/div/div/div)[1]//div[text()!='']"));
+ for (WebElement element : previewOutputSchemaElements) {
+ previewOutputSchema.add(element.getAttribute("title"));
+ }
+ Assert.assertTrue(previewOutputSchema.equals(propertiesOutputSchema));
+
+ }
+
+ @Then("Close the Preview and deploy the pipeline")
+ public void closeThePreviewAndDeployThePipeline() {
+ SeleniumHelper.waitAndClick(CdfStudioLocators.closeButton, 5L);
+ CdfStudioActions.previewSelect();
+ SeleniumHelper.waitElementIsVisible(CdfStudioLocators.pipelineDeploy, 2);
+ CdfStudioActions.pipelineDeploy();
+ }
+
+ @Then("Open the Logs and capture raw logs")
+ public void openTheLogsAndCaptureRawLogs() {
+ CdfPipelineRunAction.logsClick();
+ rawLog = CdfPipelineRunAction.captureRawLogs();
+ SeleniumDriver.getDriver().navigate().refresh();
+ BeforeActions.scenario.write(rawLog);
+ out.println(rawLog);
+ out.close();
+ }
+
+ @Then("Validate successMessage is displayed when pipeline is succeeded")
+ public void validateSuccessMessageIsDisplayedWhenPipelineIsSucceeded() {
+ CdfLogActions.validateSucceeded();
+ }
+
+ @Then("Validate the output record count")
+ public void validateTheOutputRecordCount() {
+ Assert.assertTrue(recordOut() > 0);
+ }
+
+ @Then("Open BigQuery Target Properties")
+ public void openBigQueryTargetProperties() {
+ CdfStudioActions.clickProperties("BigQuery");
+ }
+
+ @Then("Enter the BigQuery properties for table {string}")
+ public void entertheBigQuerypropertiesfortable(String tableName) throws IOException {
+ enterTheBigQueryPropertiesForTable(tableName);
+ CdfBigQueryPropertiesActions.clickUpdateTable();
+ CdfBigQueryPropertiesActions.clickTruncatableSwitch();
+ }
+
+ private void enterTheBigQueryPropertiesForTable(String tableName) throws IOException {
+ CdfBigQueryPropertiesActions.enterProjectId(E2ETestUtils.pluginProp("projectId"));
+ CdfBigQueryPropertiesActions.enterDatasetProjectId(E2ETestUtils.pluginProp("projectId"));
+ CdfBigQueryPropertiesActions.enterBigQueryReferenceName("BQ_Ref_" + UUID.randomUUID().toString());
+ CdfBigQueryPropertiesActions.enterBigQueryDataset(E2ETestUtils.pluginProp("dataset"));
+ CdfBigQueryPropertiesActions.enterBigQueryTable(E2ETestUtils.pluginProp(tableName));
+ }
+
+ @Then("Validate Bigquery properties")
+ public void validateBigqueryProperties() {
+ CdfGcsActions.clickValidateButton();
+ String expectedErrorMessage = E2ETestUtils.errorProp(ERROR_MSG_VALIDATION);
+ String actualErrorMessage = CdfStudioLocators.pluginValidationSuccessMsg.getText();
+ Assert.assertEquals(expectedErrorMessage, actualErrorMessage);
+ }
+
+ @Then("Click on PreviewData for BigQuery")
+ public void clickOnPreviewDataForBigQuery() {
+ CdfBigQueryPropertiesActions.clickPreviewData();
+ }
+
+ @Then("Validate record transferred from Amazon {string} is equal to BigQuery {string} output records")
+ public void validateRecordTransferredFromAmazonIsEqualToBigQueryOutputRecords
+ (String table, String field) throws IOException, InterruptedException {
+ String projectId = (E2ETestUtils.pluginProp("projectId"));
+ String datasetName = (E2ETestUtils.pluginProp("dataset"));
+ String selectQuery = "SELECT count(*) FROM `" + projectId + "." + datasetName + "." + E2ETestUtils.pluginProp
+ (table) + "` WHERE " +
+ E2ETestUtils.pluginProp(field);
+ int count = GcpClient.executeQuery(selectQuery);
+ BeforeActions.scenario.write("number of records transferred with respect to filter:"
+ + count);
+ Assert.assertEquals(count, countRecords);
+ }
+
+ @Then("Enter the Amazon Properties with blank property {string}")
+ public void enterTheAmazonPropertiesWithBlankProperty(String property) {
+ if (property.equalsIgnoreCase("referenceName")) {
+ AmazonS3Actions.enterBucketPath(E2ETestUtils.pluginProp("amazonPath"));
+ } else if (property.equalsIgnoreCase("amazonPath")) {
+ AmazonS3Actions.enterReferenceName("Amazon_" + UUID.randomUUID().toString());
+ }
+ }
+
+ @Then("Validate mandatory property error for {string}")
+ public void validateMandatoryPropertyErrorFor(String property) {
+ CdfStudioActions.clickValidateButton();
+ SeleniumHelper.waitElementIsVisible(CdfStudioLocators.validateButton, 5L);
+ E2ETestUtils.validateMandatoryPropertyError(property);
+ }
+
+ @Then("Get Count of no of records transferred to BigQuery in {string}")
+ public void getCountOfNoOfRecordsTransferredToBigQueryIn(String tableName) throws IOException, InterruptedException {
+ int countRecords;
+ countRecords = GcpClient.countBqQuery(E2ETestUtils.pluginProp(tableName));
+ BeforeActions.scenario.write("**********No of Records Transferred******************:" + countRecords);
+ Assert.assertTrue(countRecords > 0);
+ }
+
+ @Then("Delete the table {string}")
+ public void deleteTheTable(String table) throws IOException, InterruptedException {
+ try {
+ int existingRecords = GcpClient.countBqQuery(E2ETestUtils.pluginProp(table));
+ if (existingRecords > 0) {
+ GcpClient.dropBqQuery(E2ETestUtils.pluginProp(table));
+ BeforeActions.scenario.write("Table Deleted Successfully");
+ }
+ } catch (Exception e) {
+ BeforeActions.scenario.write(e.toString());
+ }
+ }
+
+ @Then("Enter the Amazon Properties with incorrect property {string} value {string}")
+ public void enterTheAmazonPropertiesWithIncorrectPropertyValue(String field, String value) {
+ if (field.equalsIgnoreCase("path")) {
+ AmazonS3Actions.enterBucketPath(E2ETestUtils.pluginProp(value));
+ AmazonS3Actions.enterReferenceName("Amazon" + UUID.randomUUID().toString());
+ } else if (field.equalsIgnoreCase("Maximum Split Size")) {
+ SeleniumHelper.replaceElementValue(AmazonS3Locators.maximumSplitSize, E2ETestUtils.pluginProp(value));
+ AmazonS3Actions.enterBucketPath(E2ETestUtils.pluginProp("amazonPath"));
+ AmazonS3Actions.enterReferenceName("Amazon" + UUID.randomUUID().toString());
+ } else if (field.equalsIgnoreCase("Path Field ")) {
+ SeleniumHelper.replaceElementValue(AmazonS3Locators.pathField, E2ETestUtils.pluginProp(value));
+ AmazonS3Actions.enterBucketPath(E2ETestUtils.pluginProp("amazonPath"));
+ AmazonS3Actions.enterReferenceName("Amazon" + UUID.randomUUID().toString());
+ }
+ }
+
+ @Then("Validate incorrect property error for table {string} value {string}")
+ public void validateIncorrectPropertyErrorForTableValue(String property, String value) {
+ CdfBigQueryPropertiesActions.getSchema();
+ SeleniumHelper.waitElementIsVisible(CdfBigQueryPropertiesLocators.getSchemaButton, 5L);
+ String tableFullName = StringUtils.EMPTY;
+ if (property.equalsIgnoreCase("dataset")) {
+ tableFullName = E2ETestUtils.pluginProp("projectId") + ":" + E2ETestUtils.pluginProp(value)
+ + "." + E2ETestUtils.pluginProp("amazonBqTableName");
+ } else if (property.equalsIgnoreCase("table")) {
+ tableFullName = E2ETestUtils.pluginProp("projectId") + ":" + E2ETestUtils.pluginProp("dataset")
+ + "." + E2ETestUtils.pluginProp(value);
+ } else if (property.equalsIgnoreCase("datasetProject")) {
+ tableFullName = E2ETestUtils.pluginProp(value) + ":" + E2ETestUtils.pluginProp("dataset")
+ + "." + E2ETestUtils.pluginProp("amazonBqTableName");
+ }
+ String expectedErrorMessage = E2ETestUtils.errorProp(ERROR_MSG_INCORRECT_TABLE)
+ .replaceAll("TABLENAME", tableFullName);
+ String actualErrorMessage = E2ETestUtils.findPropertyErrorElement("table").getText();
+ Assert.assertEquals(expectedErrorMessage, actualErrorMessage);
+ String actualColor = E2ETestUtils.getErrorColor(E2ETestUtils.findPropertyErrorElement("table"));
+ String expectedColor = E2ETestUtils.errorProp(ERROR_MSG_COLOR);
+ Assert.assertEquals(expectedColor, actualColor);
+ }
+
+ @Then("Enter the Amazon Properties for bucket {string} and format {string} with delimiter field {string}")
+ public void enterTheAmazonPropertiesForBucketAndFormatWithDelimiterField
+ (String bucket, String format, String delimiter) throws InterruptedException {
+ enterTheAmazonPropertiesForBucket(bucket);
+ AmazonS3Actions.selectFormat(E2ETestUtils.pluginProp(format));
+ CdfGcsActions.enterDelimiterField(E2ETestUtils.pluginProp(delimiter));
+ }
+
+ @Then("Enter the Amazon properties for bucket {string} and path field {string} with pathFileName only set {string}")
+ public void enterTheAmazonPropertiesForBucketAndPathFieldWithPathFileNameOnlySet
+ (String path, String field, String pathNameOnly) {
+ enterTheAmazonPropertiesForBucket(path);
+ AmazonS3Actions.enterPathField(E2ETestUtils.pluginProp(field));
+ if (pathNameOnly.equalsIgnoreCase("true")) {
+ AmazonS3Actions.selectPathFilename();
+ }
+ }
+
+ @Then("Enter the Amazon properties for emptyvaluepath {string}")
+ public void enterTheAmazonPropertiesForEmptyvaluepath(String path) {
+ enterTheAmazonPropertiesForBucket(path);
+ AmazonS3Actions.allowEmptyInput();
+ }
+
+ @Then("Validate the output record count is equal to zero")
+ public void validateTheOutputRecordCountIsEqualToZero() {
+ Assert.assertTrue(recordOut() == 0);
+
+ }
+
+ @Then("Enter the Amazon properties for bucket {string} and field readFilesRecursively")
+ public void enterTheAmazonPropertiesForBucketAndField(String path) {
+ enterTheAmazonPropertiesForBucket(path);
+ AmazonS3Actions.readFilesRecursively();
+ }
+
+ @Then("Enter the Amazon properties for bucket {string} and regex filter {string}")
+ public void enterTheAmazonPropertiesForBucketAndRegexFilter(String path, String regexFilter) {
+ enterTheAmazonPropertiesForBucket(path);
+ AmazonS3Actions.enterRegexFilter(E2ETestUtils.pluginProp(regexFilter));
+ }
+
+ @Then("Verify plugin properties validation fails with error")
+ public void verifyPluginPropertiesValidationFailsWithError() {
+ CdfStudioActions.clickValidateButton();
+ SeleniumHelper.waitElementIsVisible(CdfStudioLocators.validateButton, 5L);
+ String expectedErrorMessage = E2ETestUtils.errorProp(ERROR_MSG_ERROR_FOUND_VALIDATION);
+ String actualErrorMessage = CdfStudioLocators.pluginValidationErrorMsg.getText();
+ Assert.assertEquals(expectedErrorMessage, actualErrorMessage);
+ }
+
+ @Then("Verify invalid amazon bucket path error")
+ public void verifyInvalidAmazonBucketPathError() {
+ CdfStudioActions.clickValidateButton();
+ String expectedErrorMessage = E2ETestUtils.errorProp(ERROR_MSG_AMAZON_INVALID_PATH);
+ String actualErrorMessage = E2ETestUtils.findPropertyErrorElement("path").getText();
+ Assert.assertEquals(expectedErrorMessage, actualErrorMessage);
+ String actualColor = E2ETestUtils.getErrorColor(E2ETestUtils.findPropertyErrorElement("path"));
+ String expectedColor = E2ETestUtils.errorProp(ERROR_MSG_COLOR);
+ Assert.assertEquals(expectedColor, actualColor);
+ }
+
+ @Then("Enter the Amazon properties with blank authentication fields {string}")
+ public void enterTheAmazonPropertiesWithBlankAuthenticationFields(String authenticationMethod) {
+ if (authenticationMethod.equalsIgnoreCase("accessID")) {
+ AmazonS3Actions.enterBucketPath(E2ETestUtils.pluginProp("amazonRecursiveDataPath"));
+ AmazonS3Actions.enterReferenceName("Amazon" + UUID.randomUUID().toString());
+ AmazonS3Actions.accessKey(E2ETestUtils.pluginProp("accessKey"));
+ } else if (authenticationMethod.equalsIgnoreCase("accessKey")) {
+ AmazonS3Actions.enterBucketPath(E2ETestUtils.pluginProp("amazonRecursiveDataPath"));
+ AmazonS3Actions.enterReferenceName("Amazon" + UUID.randomUUID().toString());
+ AmazonS3Actions.accessID(E2ETestUtils.pluginProp("accessID"));
+ }
+ }
+
+ @When("Target is Amazon")
+ public void targetIsAmazon() {
+ AmazonS3Actions.sinkAmazon();
+ }
+
+ @Then("Open BigQuery Properties")
+ public void openBigQueryProperties() {
+ CdfStudioActions.clickProperties("BigQuery");
+ }
+
+ @Then("Open Amazon Target Properties")
+ public void openAmazonTargetProperties() {
+ CdfStudioActions.clickProperties("Amazon");
+ }
+
+ @Then("Validate records out from Amazon is equal to records transferred in BigQuery {string} output records")
+ public void validateRecordsOutFromAmazonIsEqualToRecordsTransferredInBigQueryOutputRecords(String tableName)
+ throws IOException, InterruptedException {
+ int countRecords;
+ countRecords = gcpClient.countBqQuery(E2ETestUtils.pluginProp(tableName));
+ Assert.assertEquals(countRecords, recordOut());
+ }
+ @Then("Verify output field {string} in target BigQuery table {string} contains path of the amzonbucket {string}")
+ public void verifyOutputFieldInTargetBigQueryTableContainsPathOfTheAmzonbucket
+ (String field, String targetTable, String bucketPath) throws IOException, InterruptedException {
+ Optional result = GcpClient
+ .getSoleQueryResult("SELECT distinct " + E2ETestUtils.pluginProp(field) + " as bucket FROM `"
+ + (E2ETestUtils.pluginProp("projectId")) + "."
+ + (E2ETestUtils.pluginProp("dataset")) + "."
+ + E2ETestUtils.pluginProp(targetTable) + "` ");
+ String pathFromBQTable = StringUtils.EMPTY;
+ if (result.isPresent()) {
+ pathFromBQTable = result.get();
+ }
+ BeforeActions.scenario.write("Amazon bucket path in BQ Table :" + pathFromBQTable);
+ Assert.assertEquals(E2ETestUtils.pluginProp(bucketPath), pathFromBQTable);
+ }
+
+ @When("Source is BigQuery")
+ public void sourceIsBigQuery() throws InterruptedException {
+ CdfStudioActions.selectBQ();
+ }
+
+ @Then("Close the BigQuery properties")
+ public void closeTheBigQueryProperties() {
+ CdfStudioActions.clickCloseButton();
+ }
+
+ @Then("Enter the Amazon properties for bucket {string} and incorrect split size {string}")
+ public void enterTheAmazonPropertiesForBucketAndIncorrectSplitSize(String path, String value) {
+ enterTheAmazonPropertiesForBucket(path);
+ AmazonS3Actions.enterMaxSplitSize(E2ETestUtils.pluginProp(value));
+ }
+
+ @Then("Verify invalid split size error")
+ public void verifyInvalidSplitSizeError() {
+ CdfStudioActions.clickValidateButton();
+ String expectedErrorMessage = E2ETestUtils.errorProp(ERROR_MSG_ERROR_FOUND_VALIDATION);
+ String actualErrorMessage = E2ETestUtils.findPropertyErrorElement("maxSplitSize").getText();
+ Assert.assertEquals(expectedErrorMessage, actualErrorMessage);
+ String actualColor = E2ETestUtils.getErrorColor(E2ETestUtils.findPropertyErrorElement("maxSplitSize"));
+ String expectedColor = E2ETestUtils.errorProp(ERROR_MSG_COLOR);
+ Assert.assertEquals(expectedColor, actualColor);
+
+ }
+
+ @Then("Enter the Amazon properties using IAM for bucket {string}")
+ public void enterTheAmazonPropertiesUsingIAMForBucket(String path) {
+ AmazonS3Actions.enterBucketPath(E2ETestUtils.pluginProp(path));
+ AmazonS3Actions.enterReferenceName("Amazon" + UUID.randomUUID().toString());
+ AmazonS3Actions.clickIAM();
+ }
+
+ @Then("Enter the Amazon properties for bucket {string} and format type {string}")
+ public void enterTheAmazonPropertiesForBucketAndFormatType(String path, String format) throws InterruptedException {
+ enterTheAmazonPropertiesForBucket(path);
+ AmazonS3Actions.selectFormat(E2ETestUtils.pluginProp(format));
+ }
+
+ @Then("Validate output error is not displayed")
+ public void validateOutputErrorIsNotDisplayed() {
+ Assert.assertFalse(SeleniumHelper.isElementPresent(CdfStudioLocators.pluginValidationErrorMsg));
+ }
+
+ @Then("Validate get schema is loaded without error")
+ public void validateGetSchemaIsLoadedWithoutError() {
+ SeleniumHelper.waitElementIsVisible(CdfBigQueryPropertiesLocators.getSchemaButton, 10);
+ }
+
+ @Then("Enter the Amazon properties using IAM for bucket {string} and format type {string}")
+ public void enterTheAmazonPropertiesUsingIAMForBucketAndFormatType(String path, String format)
+ throws InterruptedException {
+ enterTheAmazonPropertiesForBucket(path);
+ AmazonS3Actions.selectFormat(E2ETestUtils.pluginProp(format));
+ }
+
+ @Then("Enter the Amazon properties for bucket {string} and selecting {string} File encoding")
+ public void enterTheAmazonPropertiesForBucketAndSelectingFileEncoding(String path, String encoding)
+ throws InterruptedException {
+ enterTheAmazonPropertiesForBucket(path);
+ AmazonS3Actions.fileEncoding(E2ETestUtils.pluginProp(encoding));
+ }
+
+ @Then("Enter the BigQuery properties for source table {string}")
+ public void enterTheBigQueryPropertiesForSourceTable(String table) throws IOException {
+ enterTheBigQueryPropertiesForTable(table);
+ }
+
+ @Then("Enter the Amazon properties for bucket {string} and incorrect path field {string}")
+ public void enterTheAmazonPropertiesForBucketAndIncorrectPathField(String path, String pathfield) {
+ enterTheAmazonPropertiesForBucket(path);
+ AmazonS3Actions.enterPathField(E2ETestUtils.pluginProp(pathfield));
+ }
+
+ @Then("Verify invalid path field error")
+ public void verifyInvalidPathFieldError() {
+ CdfStudioActions.clickValidateButton();
+ String expectedErrorMessage = E2ETestUtils.errorProp(ERROR_MSG_AMAZON_INVALID_PATH_FIELD);
+ String actualErrorMessage = E2ETestUtils.findPropertyErrorElement("pathField").getText();
+ Assert.assertEquals(expectedErrorMessage, actualErrorMessage);
+ String actualColor = E2ETestUtils.getErrorColor(E2ETestUtils.findPropertyErrorElement("pathField"));
+ String expectedColor = E2ETestUtils.errorProp(ERROR_MSG_COLOR);
+ Assert.assertEquals(expectedColor, actualColor);
+ }
+
+ @Then("Enter the Amazon properties for bucket {string} and skip header value {string}")
+ public void enterTheAmazonPropertiesForBucketAndSkipHeaderValue(String path, String arg1) {
+ enterTheAmazonPropertiesForBucket(path);
+ AmazonS3Actions.toggleSkipHeader();
+ }
+
+ @Then("Validate Skip header toggle button is selected")
+ public void validateSkipHeaderToggleButtonIsSelected() {
+ Assert.assertTrue(AmazonS3Locators.skipHeader.isSelected());
+ }
+}
diff --git a/src/e2e-test/java/io/cdap/plugin/amazons3/stepsdesign/package-info.java b/src/e2e-test/java/io/cdap/plugin/amazons3/stepsdesign/package-info.java
new file mode 100644
index 0000000..e846caf
--- /dev/null
+++ b/src/e2e-test/java/io/cdap/plugin/amazons3/stepsdesign/package-info.java
@@ -0,0 +1,4 @@
+/**
+ * This package contains stepDesigns for AmazonS3.
+ */
+package io.cdap.plugin.amazons3.stepsdesign;
diff --git a/src/e2e-test/java/io/cdap/plugin/amazons3/testrunner/TestRunner.java b/src/e2e-test/java/io/cdap/plugin/amazons3/testrunner/TestRunner.java
new file mode 100644
index 0000000..bdc553e
--- /dev/null
+++ b/src/e2e-test/java/io/cdap/plugin/amazons3/testrunner/TestRunner.java
@@ -0,0 +1,36 @@
+/*
+ * Copyright © 2021 Cask Data, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package io.cdap.plugin.amazons3.testrunner;
+
+import io.cucumber.junit.Cucumber;
+import io.cucumber.junit.CucumberOptions;
+import org.junit.runner.RunWith;
+
+/**
+ * Test Runner to execute Amazon as Source, Amazon as Sink and related test cases.
+ */
+
+@RunWith(Cucumber.class)
+@CucumberOptions(
+ features = {"src/e2e-test/features"},
+ glue = {"io.cdap.plugin.amazons3.stepsdesign", "stepsdesign"},
+ tags = {"@AmazonS3"},
+ monochrome = true,
+ plugin = {"pretty", "html:target/cucumber-html-report", "json:target/cucumber-reports/cucumber.json",
+ "junit:target/cucumber-reports/cucumber.xml"}
+)
+public class TestRunner {
+}
diff --git a/src/e2e-test/java/io/cdap/plugin/amazons3/testrunner/package-info.java b/src/e2e-test/java/io/cdap/plugin/amazons3/testrunner/package-info.java
new file mode 100644
index 0000000..6a0097e
--- /dev/null
+++ b/src/e2e-test/java/io/cdap/plugin/amazons3/testrunner/package-info.java
@@ -0,0 +1,4 @@
+/**
+ * This package contains testRunner for AmazonS3.
+ */
+package io.cdap.plugin.amazons3.testrunner;
diff --git a/src/e2e-test/java/io/cdap/plugin/utils/E2ETestConstants.java b/src/e2e-test/java/io/cdap/plugin/utils/E2ETestConstants.java
new file mode 100644
index 0000000..1f3012b
--- /dev/null
+++ b/src/e2e-test/java/io/cdap/plugin/utils/E2ETestConstants.java
@@ -0,0 +1,15 @@
+package io.cdap.plugin.utils;
+
+/**
+ * constants.
+ */
+public class E2ETestConstants {
+ public static final String ERROR_MSG_COLOR = "errorMessageColor";;
+ public static final String ERROR_MSG_MANDATORY = "errorMessageMandatory";
+ public static final String ERROR_MSG_VALIDATION = "errorMessageValidation";
+ public static final String ERROR_MSG_ERROR_FOUND_VALIDATION = "errorMessageErrorFoundValidation";
+ public static final String ERROR_MSG_INCORRECT_TABLE = "errorMessageTable";
+ public static final String ERROR_MSG_AMAZON_INVALID_PATH = "errorMessageAmazonInvalidPath";
+ public static final String ERROR_MSG_AMAZON_INVALID_PATH_FIELD = "errorMessageAmazonIncorrectPathField";
+ public static final String ERROR_MSG_AMAZON_INVALID_SPLIT_SIZE = "errorMessageAmazonIncorrectSplitSize";
+}
diff --git a/src/e2e-test/java/io/cdap/plugin/utils/E2ETestUtils.java b/src/e2e-test/java/io/cdap/plugin/utils/E2ETestUtils.java
new file mode 100644
index 0000000..da70bfa
--- /dev/null
+++ b/src/e2e-test/java/io/cdap/plugin/utils/E2ETestUtils.java
@@ -0,0 +1,84 @@
+/*
+ * Copyright © 2021 Cask Data, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package io.cdap.plugin.utils;
+
+import io.cdap.e2e.utils.ConstantsUtil;
+import io.cdap.e2e.utils.SeleniumDriver;
+import org.apache.log4j.Logger;
+import org.junit.Assert;
+import org.openqa.selenium.By;
+import org.openqa.selenium.WebElement;
+
+import java.io.IOException;
+import java.util.Properties;
+
+import static io.cdap.plugin.utils.E2ETestConstants.ERROR_MSG_COLOR;
+import static io.cdap.plugin.utils.E2ETestConstants.ERROR_MSG_MANDATORY;
+
+/**
+ * E2ETestUtils contains the helper functions.
+ */
+public class E2ETestUtils {
+
+ private static Properties pluginProperties = new Properties();
+ private static Properties errorProperties = new Properties();
+ private static final Logger logger = Logger.getLogger(E2ETestUtils.class);
+
+ static {
+
+ try {
+ pluginProperties.load(E2ETestUtils.class.getResourceAsStream("/pluginParameters.properties"));
+ errorProperties.load(E2ETestUtils.class.getResourceAsStream("/errorMessage.properties"));
+ } catch (IOException e) {
+ logger.error("Error while reading properties file" + e);
+ }
+ }
+
+ public static String pluginProp(String property) {
+ return pluginProperties.getProperty(property);
+ }
+
+ public static String errorProp(String property) {
+ return errorProperties.getProperty(property);
+ }
+
+ public static void validateMandatoryPropertyError(String property) {
+ String expectedErrorMessage = errorProp(ERROR_MSG_MANDATORY)
+ .replaceAll("PROPERTY", property);
+ String actualErrorMessage = findPropertyErrorElement(property).getText();
+ Assert.assertEquals(expectedErrorMessage, actualErrorMessage);
+ String actualColor = E2ETestUtils.getErrorColor(E2ETestUtils.findPropertyErrorElement(property));
+ String expectedColor = E2ETestUtils.errorProp(ERROR_MSG_COLOR);
+ Assert.assertEquals(expectedColor, actualColor);
+ }
+
+ public static WebElement findPropertyErrorElement(String property) {
+ return SeleniumDriver.getDriver().findElement(
+ By.xpath("//*[@data-cy='" + property + "']/following-sibling::div[@data-cy='property-row-error']"));
+ }
+
+ public static String getErrorColor(WebElement element) {
+ String color = element.getCssValue(ConstantsUtil.COLOR);
+ String[] hexValue = color.replace("rgba(", "").
+ replace(")", "").split(",");
+ int hexValue1 = Integer.parseInt(hexValue[0]);
+ hexValue[1] = hexValue[1].trim();
+ int hexValue2 = Integer.parseInt(hexValue[1]);
+ hexValue[2] = hexValue[2].trim();
+ int hexValue3 = Integer.parseInt(hexValue[2]);
+ return String.format("#%02x%02x%02x", hexValue1, hexValue2, hexValue3);
+ }
+}
diff --git a/src/e2e-test/java/io/cdap/plugin/utils/package-info.java b/src/e2e-test/java/io/cdap/plugin/utils/package-info.java
new file mode 100644
index 0000000..4d7941b
--- /dev/null
+++ b/src/e2e-test/java/io/cdap/plugin/utils/package-info.java
@@ -0,0 +1,4 @@
+/**
+ * Package contains the e2e test helpers.
+ */
+package io.cdap.plugin.utils;
diff --git a/src/e2e-test/resources/errorMessage.properties b/src/e2e-test/resources/errorMessage.properties
new file mode 100644
index 0000000..ac8e919
--- /dev/null
+++ b/src/e2e-test/resources/errorMessage.properties
@@ -0,0 +1,12 @@
+errorMessageTemporaryBucket=Bucket name can only contain lowercase letters, numbers, '.', '_', and '-'.
+errorMessageDataset=Required property 'dataset' has no value.
+errorMessageReference=Required property 'referenceName' has no value.
+errorMessageColor=#a40403
+errorMessageValidation=No errors found.
+errorMessageErrorFoundValidation=1 error found
+errorMessageMandatory=Required property 'PROPERTY' has no value.
+errorMessageTable=BigQuery table 'TABLENAME' does not exist. Ensure correct table name is provided.
+errorMessageAmazonInvalidPath=Path must start with s3a:// or s3n://.
+errorMessageAmazonIncorrectPathField=Path field 'aaa' must exist in input schema.
+errorMessageAmazonIncorrectSplitSize=Unable to create config for batchsource S3 'maxSplitSize' is invalid: \
+ Value of field class io.cdap.plugin.format.plugin.AbstractFileSourceConfig.maxSplitSize is expected to be a number.
diff --git a/src/e2e-test/resources/pluginParameters.properties b/src/e2e-test/resources/pluginParameters.properties
new file mode 100644
index 0000000..042d34a
--- /dev/null
+++ b/src/e2e-test/resources/pluginParameters.properties
@@ -0,0 +1,31 @@
+projectId=cdf-athena
+dataset=test_automation
+
+amazonPath=s3n://gcp-cdf-qa/unzip_files/10000 Records.csv
+amazonTextFormat=text
+amazonBqTableDemo=DemoCheck1
+amazonBqTableName=test_data3
+amazonRecursiveDataPath=s3n://gcp-cdf-qa/unzip_files/
+amazonEmptyInputPath=s3n://gcp-cdf-qa/file (2).csv
+amazonIncorrectPath=abc
+amazonPathField=body
+amazonDelimitedPath=s3n://gcp-cdf-qa/Delimited_format/2021-12-07-17-58/part-r-00000
+amazonDelimiter=.
+amazonRegexPath=s3n://gcp-cdf-qa/unzip_files/
+amazonRegexFilter=.+.csv
+amazonPathForCsvType=s3n://gcp-cdf-qa/file (2).csv
+amazonReferenceName=Amazon-Demo
+amazonFormatDelimited=delimited
+amazonIncorrectRegexFilter=......
+amazonIncorrectSplitSize=aaa
+amazonIncorrectPathField=aaa
+amazonSinkBucket=s3n://gcp-cdf-qa/sink_demo
+amazonCsvFormat=csv
+amazonParquetPath=s3n://gcp-cdf-qa/BQMTParquet (1)
+amazonFormatParquet=parquet
+amazonEncodingBug=UTF-32
+amazonSinkPath=s3n://gcp-cdf-qa/unzip_files/10000%20Records.csv
+amazonLatinEncoding=ISO-8859-1 (Latin-1 Western European)
+amazonTurkishEncoding=ISO-8859-9 (Latin-5 Turkish)
+amazonEuropeanEncoding=ISO-8859-2 (Latin-2 Central European)
+amazonGcsBucketName=cdf-athena/Employee_work/2021-09-03-21-00