Skip to content

Commit

Permalink
e2e-XmlReader-ITN
Browse files Browse the repository at this point in the history
  • Loading branch information
AnkitCLI committed May 21, 2024
1 parent 61b1d14 commit 0d89c56
Show file tree
Hide file tree
Showing 11 changed files with 405 additions and 0 deletions.
115 changes: 115 additions & 0 deletions core-plugins/src/e2e-test/features/xmlReader/XmlReader.feature
Original file line number Diff line number Diff line change
@@ -0,0 +1,115 @@
# Copyright © 2024 Cask Data, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.

@XmlReader_Source
Feature:File Sink - Verification of XmlReader plugin to File successful data transfer

@XmlReader_Source_Required @XMLREADER_TEST @FILE_SINK_TEST
Scenario: To verify data is getting transferred from XmlReader to File sink
Given Open Datafusion Project to configure pipeline
When Select plugin: "XML Reader" from the plugins list as: "Source"
When Expand Plugin group in the LHS plugins list: "Sink"
When Select plugin: "File" from the plugins list as: "Sink"
Then Connect plugins: "XMLReader" and "File" to establish connection
Then Navigate to the properties page of plugin: "XMLReader"
Then Enter input plugin property: "referenceName" with value: "ReferenceName"
Then Enter input plugin property: "path" with value: "xmlTestFile"
Then Enter input plugin property: "nodePath" with value: "node"
Then Select dropdown plugin property: "reprocessingRequired" with option value: "No"
Then Validate "XMLReader" plugin properties
Then Close the Plugin Properties page
Then Navigate to the properties page of plugin: "File"
Then Enter input plugin property: "referenceName" with value: "FileReferenceName"
Then Enter input plugin property: "path" with value: "fileSinkTargetBucket"
Then Replace input plugin property: "pathSuffix" with value: "yyyy-MM-dd-HH-mm"
Then Select dropdown plugin property: "format" with option value: "csv"
Then Validate "File" plugin properties
Then Close the Plugin Properties page
Then Save the pipeline
Then Deploy the pipeline
Then Run the Pipeline in Runtime
Then Wait till pipeline is in running state
Then Open and capture logs
Then Verify the pipeline status is "Succeeded"
Then Close the pipeline logs
Then Validate OUT record count is equal to IN record count
Then Validate output file generated by file sink plugin "fileSinkTargetBucket" is equal to expected output file "outputForXMLTest"

@XmlReader_Source_Required @XMLREADER_DELETE_TEST @FILE_SINK_TEST
Scenario: To verify data is getting transferred from XmlReader to File sink using pattern and delete action
Given Open Datafusion Project to configure pipeline
When Select plugin: "XML Reader" from the plugins list as: "Source"
When Expand Plugin group in the LHS plugins list: "Sink"
When Select plugin: "File" from the plugins list as: "Sink"
Then Connect plugins: "XMLReader" and "File" to establish connection
Then Navigate to the properties page of plugin: "XMLReader"
Then Enter input plugin property: "referenceName" with value: "ReferenceName"
Then Enter input plugin property: "path" with value: "xmlTestFile"
Then Enter input plugin property: "nodePath" with value: "node"
Then Select dropdown plugin property: "reprocessingRequired" with option value: "No"
Then Enter input plugin property: "pattern" with value: "filePattern"
Then Select dropdown plugin property: "ActionAfterProcessingFile" with option value: "Delete"
Then Validate "XMLReader" plugin properties
Then Close the Plugin Properties page
Then Navigate to the properties page of plugin: "File"
Then Enter input plugin property: "referenceName" with value: "FileReferenceName"
Then Enter input plugin property: "path" with value: "fileSinkTargetBucket"
Then Replace input plugin property: "pathSuffix" with value: "yyyy-MM-dd-HH-mm"
Then Select dropdown plugin property: "format" with option value: "csv"
Then Validate "File" plugin properties
Then Close the Plugin Properties page
Then Save the pipeline
Then Deploy the pipeline
Then Run the Pipeline in Runtime
Then Wait till pipeline is in running state
Then Open and capture logs
Then Verify the pipeline status is "Succeeded"
Then Close the pipeline logs
Then Validate OUT record count is equal to IN record count
Then Validate that file gets successfully deleted from the gcs bucket
Then Validate output file generated by file sink plugin "fileSinkTargetBucket" is equal to expected output file "outputForXMLTest"

@XMLREADER_TEST @FILE_SINK_TEST
Scenario: To verify data is getting transferred from XmlReader to File sink using move action
Given Open Datafusion Project to configure pipeline
When Select plugin: "XML Reader" from the plugins list as: "Source"
When Expand Plugin group in the LHS plugins list: "Sink"
When Select plugin: "File" from the plugins list as: "Sink"
Then Connect plugins: "XMLReader" and "File" to establish connection
Then Navigate to the properties page of plugin: "XMLReader"
Then Enter input plugin property: "referenceName" with value: "ReferenceName"
Then Enter input plugin property: "path" with value: "xmlTestFile"
Then Enter input plugin property: "nodePath" with value: "node"
Then Select dropdown plugin property: "reprocessingRequired" with option value: "No"
Then Select dropdown plugin property: "ActionAfterProcessingFile" with option value: "Move"
Then Enter input plugin property: "targetFolder" with value: "folder"
Then Validate "XMLReader" plugin properties
Then Close the Plugin Properties page
Then Navigate to the properties page of plugin: "File"
Then Enter input plugin property: "referenceName" with value: "FileReferenceName"
Then Enter input plugin property: "path" with value: "fileSinkTargetBucket"
Then Replace input plugin property: "pathSuffix" with value: "yyyy-MM-dd-HH-mm"
Then Select dropdown plugin property: "format" with option value: "csv"
Then Validate "File" plugin properties
Then Close the Plugin Properties page
Then Save the pipeline
Then Deploy the pipeline
Then Run the Pipeline in Runtime
Then Wait till pipeline is in running state
Then Open and capture logs
Then Verify the pipeline status is "Succeeded"
Then Close the pipeline logs
Then Validate OUT record count is equal to IN record count
Then Validate that file gets successfully moved to the target location
Then Validate output file generated by file sink plugin "fileSinkTargetBucket" is equal to expected output file "outputForXMLTest"
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
# Copyright © 2024 Cask Data, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.

@XmlReader_Source
Feature:File Sink - Verify XML Reader Plugin Error scenarios

@XMLREADER_DELETE_TEST @FILE_SINK_TEST
Scenario: Verify Pipeline fails when an invalid pattern is entered
Given Open Datafusion Project to configure pipeline
When Select plugin: "XML Reader" from the plugins list as: "Source"
When Expand Plugin group in the LHS plugins list: "Sink"
When Select plugin: "File" from the plugins list as: "Sink"
Then Connect plugins: "XMLReader" and "File" to establish connection
Then Navigate to the properties page of plugin: "XMLReader"
Then Enter input plugin property: "referenceName" with value: "ReferenceName"
Then Enter input plugin property: "path" with value: "xmlTestFile"
Then Enter input plugin property: "nodePath" with value: "node"
Then Select dropdown plugin property: "reprocessingRequired" with option value: "No"
Then Enter input plugin property: "pattern" with value: "invalidPattern"
Then Validate "XMLReader" plugin properties
Then Close the Plugin Properties page
Then Navigate to the properties page of plugin: "File"
Then Enter input plugin property: "referenceName" with value: "FileReferenceName"
Then Enter input plugin property: "path" with value: "fileSinkTargetBucket"
Then Replace input plugin property: "pathSuffix" with value: "yyyy-MM-dd-HH-mm"
Then Select dropdown plugin property: "format" with option value: "csv"
Then Validate "File" plugin properties
Then Close the Plugin Properties page
Then Save the pipeline
Then Deploy the pipeline
Then Run the Pipeline in Runtime
Then Wait till pipeline is in running state
Then Open and capture logs
Then Verify the pipeline status is "Failed"
Then Close the pipeline logs

@XMLREADER_TEST @FILE_SINK_TEST
Scenario: Verify no data is transferred when an invalid node path is entered
Given Open Datafusion Project to configure pipeline
When Select plugin: "XML Reader" from the plugins list as: "Source"
When Expand Plugin group in the LHS plugins list: "Sink"
When Select plugin: "File" from the plugins list as: "Sink"
Then Connect plugins: "XMLReader" and "File" to establish connection
Then Navigate to the properties page of plugin: "XMLReader"
Then Enter input plugin property: "referenceName" with value: "ReferenceName"
Then Enter input plugin property: "path" with value: "xmlTestFile"
Then Enter input plugin property: "nodePath" with value: "invalidNode"
Then Select dropdown plugin property: "reprocessingRequired" with option value: "No"
Then Validate "XMLReader" plugin properties
Then Close the Plugin Properties page
Then Navigate to the properties page of plugin: "File"
Then Enter input plugin property: "referenceName" with value: "FileReferenceName"
Then Enter input plugin property: "path" with value: "fileSinkTargetBucket"
Then Replace input plugin property: "pathSuffix" with value: "yyyy-MM-dd-HH-mm"
Then Select dropdown plugin property: "format" with option value: "csv"
Then Validate "File" plugin properties
Then Close the Plugin Properties page
Then Save the pipeline
Then Deploy the pipeline
Then Run the Pipeline in Runtime
Then Wait till pipeline is in running state
Then Open and capture logs
Then Verify the pipeline status is "Succeeded"
Then Close the pipeline logs
Then Validate OUT record count is equal to IN record count
Original file line number Diff line number Diff line change
Expand Up @@ -16,13 +16,17 @@
package io.cdap.plugin.common.stepsdesign;

import com.google.cloud.storage.Blob;
import com.google.cloud.storage.Storage;
import com.google.cloud.storage.StorageException;
import com.google.cloud.storage.StorageOptions;
import io.cdap.e2e.utils.CdfHelper;
import io.cdap.e2e.utils.PluginPropertyUtils;
import io.cdap.e2e.utils.StorageClient;
import io.cucumber.java.en.Then;
import org.apache.directory.api.util.Strings;
import org.junit.Assert;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import stepsdesign.BeforeActions;

import java.io.BufferedReader;
Expand All @@ -41,6 +45,7 @@
* Core Plugin Common Step Design.
*/
public class CorePlugin implements CdfHelper {
private static final Logger LOG = LoggerFactory.getLogger(CorePlugin.class);
@Then("Verify the CSV Output File matches the Expected Output File: {string} With Expected Partitions: {string}")
public void verifyCSVOutput(String file, String expectedPartitions) {
String gcsTargetBucket = PluginPropertyUtils.pluginProp("gcsTargetBucket");
Expand Down Expand Up @@ -126,4 +131,49 @@ public void validateOutputOrcFileGeneratedByFileSinkPluginIsEqualToExpectedOutpu
PluginPropertyUtils.pluginProp(expectedOutputFilePath));
}
}

@Then("Validate that file gets successfully deleted from the gcs bucket")
public static boolean validateThatFileGetsDeletedFromTheGcsBucket() {
String bucketName = TestSetupHooks.fileSourceBucket;
String fileName = PluginPropertyUtils.pluginProp("xmlFileName");
// Instantiate a client for Google Cloud Storage
Storage storage = StorageOptions.newBuilder().setProjectId(PluginPropertyUtils.pluginProp("projectId"))
.build().getService();
// Check if the file exists in the bucket
Blob blob = storage.get(bucketName, fileName);
// If blob is null, the file does not exist
boolean isDeleted = (blob == null);
if (isDeleted) {
LOG.info("The file " + fileName + " has been successfully deleted from the bucket " + bucketName + ".");
} else {
LOG.info("The file " + fileName + " still exists in the bucket " + bucketName + ".");
}
return isDeleted;
}

@Then("Validate that file gets successfully moved to the target location")
public static boolean verifyFileMovedWithinGCSBucket() {
// Instantiate a client for Google Cloud Storage with the specified project ID
Storage storage = StorageOptions.newBuilder().setProjectId(PluginPropertyUtils.pluginProp("projectId"))
.build().getService();
String bucketName = TestSetupHooks.fileSourceBucket;
String fileName = PluginPropertyUtils.pluginProp("xmlFileName");
String targetLocation = PluginPropertyUtils.pluginProp("bucketName");
// Check if the source file exists
Blob sourceBlob = storage.get(bucketName, fileName);

// Check if the target file exists
Blob targetBlob = storage.get(targetLocation, fileName);

// Verify the file has been moved by checking if the source file does not exist and the target file exists
boolean isMoved = sourceBlob == null && targetBlob != null;
if (isMoved) {
LOG.info("The file " + fileName + " was successfully moved to target location in the bucket ");
} else if (sourceBlob != null) {
LOG.info("The source file " + fileName + " still exists in the bucket " + bucketName + ".");
} else {
LOG.info("The target file " + fileName + " does not exist in the bucket " + bucketName + ".");
}
return isMoved;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ public class TestSetupHooks {
public static String gcsSourceBucketName1 = StringUtils.EMPTY;
public static String gcsSourceBucketName2 = StringUtils.EMPTY;
public static String gcsTargetBucketName = StringUtils.EMPTY;
public static String fileSourceBucket = StringUtils.EMPTY;
public static String fileSourceBucket1 = StringUtils.EMPTY;
public static String fileSourceBucket2 = StringUtils.EMPTY;
public static String fileSourceBucket3 = StringUtils.EMPTY;
Expand Down Expand Up @@ -291,6 +292,12 @@ public static void deleteSourceBucketWithFileCSVDataTypeTest1() {
fileSourceBucket1 = StringUtils.EMPTY;
}

@After(order = 1, value = "@XMLREADER_TEST or @XMLREADER_DELETE_TEST")
public static void deleteSourceBucketWithXmlFile() {
deleteGCSBucket(fileSourceBucket);
fileSourceBucket = StringUtils.EMPTY;
}

@Before(order = 1, value = "@CSV_NO_HEADER_FILE")
public static void createBucketWithCSVNoHeaderFile() throws IOException, URISyntaxException {
fileSourceBucket2 = createGCSBucketWithFile(PluginPropertyUtils.pluginProp("csvNoHeaderFile"));
Expand Down Expand Up @@ -455,6 +462,12 @@ private static String createGCSBucketWithFile(String filePath) throws IOExceptio
return bucketName;
}

private static String createGCSBucketWithXmlFile(String filePath) throws IOException, URISyntaxException {
String bucketName = StorageClient.createBucket("e2e-test-xml").getName();
StorageClient.uploadObject(bucketName, filePath, filePath);
return bucketName;
}

private static void deleteGCSBucket(String bucketName) {
try {
for (Blob blob : StorageClient.listObjects(bucketName).iterateAll()) {
Expand Down Expand Up @@ -494,4 +507,20 @@ public static void createBucketWithExcelFile() throws IOException, URISyntaxExce
PluginPropertyUtils.pluginProp("excelFile"));
BeforeActions.scenario.write("excel test bucket name - " + fileSourceBucket1);
}

@Before(order = 1, value = "@XMLREADER_TEST")
public static void createBucketWithXmlFile() throws IOException, URISyntaxException {
fileSourceBucket = createGCSBucketWithXmlFile(PluginPropertyUtils.pluginProp("xmlFile"));
PluginPropertyUtils.addPluginProp("xmlTestFile", "gs://" + fileSourceBucket + "/" +
PluginPropertyUtils.pluginProp("xmlFile"));
BeforeActions.scenario.write("xml test bucket name - " + fileSourceBucket);
}

@Before(order = 1, value = "@XMLREADER_DELETE_TEST")
public static void createBucketWithXmlFileForTestPattern() throws IOException, URISyntaxException {
fileSourceBucket = createGCSBucketWithXmlFile(PluginPropertyUtils.pluginProp("xmlFile"));
PluginPropertyUtils.addPluginProp("xmlTestFile", "gs://" + fileSourceBucket + "/testdata/xmldata/"
+ "*");
BeforeActions.scenario.write("xml test bucket name - " + fileSourceBucket);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
/*
* Copyright © 2024 Cask Data, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/

package io.cdap.plugin.xmlreader.runners;

import io.cucumber.junit.Cucumber;
import io.cucumber.junit.CucumberOptions;
import org.junit.runner.RunWith;

/**
* Test Runner to execute XmlReader Source plugin testcases.
*/
@RunWith(Cucumber.class)
@CucumberOptions(
features = {"src/e2e-test/features"},
glue = {"stepsdesign", "io.cdap.plugin.common.stepsdesign"},
tags = {"@XmlReader_Source"},
monochrome = true,
plugin = {"pretty", "html:target/cucumber-html-report/XmlReader-source",
"json:target/cucumber-reports/cucumber-xmlreader-source.json",
"junit:target/cucumber-reports/cucumber-xmlreader-source.xml"}
)
public class TestRunner {
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
/*
* Copyright © 2024 Cask Data, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package io.cdap.plugin.xmlreader.runners;

import io.cucumber.junit.Cucumber;
import io.cucumber.junit.CucumberOptions;
import org.junit.runner.RunWith;

/**
* Test Runner to execute only required XmlReader Source plugin testcases.
*/
@RunWith(Cucumber.class)
@CucumberOptions(
features = {"src/e2e-test/features"},
glue = {"stepsdesign", "io.cdap.plugin.common.stepsdesign"},
tags = {"@XmlReader_Source_Required"},
monochrome = true,
plugin = {"pretty", "html:target/cucumber-html-report/xmlreader-source",
"json:target/cucumber-reports/cucumber-xmlreader-source.json",
"junit:target/cucumber-reports/cucumber-xmlreader-source.xml"}
)
public class TestRunnerRequired {
}
Loading

0 comments on commit 0d89c56

Please sign in to comment.