From 0d89c562041088e80566fd9b5a4daf98841f67df Mon Sep 17 00:00:00 2001 From: AnkitCLI Date: Thu, 9 May 2024 16:05:26 +0530 Subject: [PATCH] e2e-XmlReader-ITN --- .../features/xmlReader/XmlReader.feature | 115 ++++++++++++++++++ .../xmlReader/XmlReaderErrorScenarios.feature | 76 ++++++++++++ .../plugin/common/stepsdesign/CorePlugin.java | 50 ++++++++ .../common/stepsdesign/TestSetupHooks.java | 29 +++++ .../plugin/xmlreader/runners/TestRunner.java | 37 ++++++ .../xmlreader/runners/TestRunnerRequired.java | 36 ++++++ .../xmlreader/runners/package-info.java | 19 +++ .../pluginDataCyAttributes.properties | 1 + .../resources/pluginParameters.properties | 9 ++ .../OUTPUT_FOR_XMLREADER_TEST.csv | 15 +++ .../testdata/xmldata/testxmlfile.xml | 18 +++ 11 files changed, 405 insertions(+) create mode 100644 core-plugins/src/e2e-test/features/xmlReader/XmlReader.feature create mode 100644 core-plugins/src/e2e-test/features/xmlReader/XmlReaderErrorScenarios.feature create mode 100644 core-plugins/src/e2e-test/java/io/cdap/plugin/xmlreader/runners/TestRunner.java create mode 100644 core-plugins/src/e2e-test/java/io/cdap/plugin/xmlreader/runners/TestRunnerRequired.java create mode 100644 core-plugins/src/e2e-test/java/io/cdap/plugin/xmlreader/runners/package-info.java create mode 100644 core-plugins/src/e2e-test/resources/testdata/file/expected_outputs/OUTPUT_FOR_XMLREADER_TEST.csv create mode 100644 core-plugins/src/e2e-test/resources/testdata/xmldata/testxmlfile.xml diff --git a/core-plugins/src/e2e-test/features/xmlReader/XmlReader.feature b/core-plugins/src/e2e-test/features/xmlReader/XmlReader.feature new file mode 100644 index 000000000..968d5afc6 --- /dev/null +++ b/core-plugins/src/e2e-test/features/xmlReader/XmlReader.feature @@ -0,0 +1,115 @@ +# Copyright © 2024 Cask Data, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. + +@XmlReader_Source +Feature:File Sink - Verification of XmlReader plugin to File successful data transfer + + @XmlReader_Source_Required @XMLREADER_TEST @FILE_SINK_TEST + Scenario: To verify data is getting transferred from XmlReader to File sink + Given Open Datafusion Project to configure pipeline + When Select plugin: "XML Reader" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "File" from the plugins list as: "Sink" + Then Connect plugins: "XMLReader" and "File" to establish connection + Then Navigate to the properties page of plugin: "XMLReader" + Then Enter input plugin property: "referenceName" with value: "ReferenceName" + Then Enter input plugin property: "path" with value: "xmlTestFile" + Then Enter input plugin property: "nodePath" with value: "node" + Then Select dropdown plugin property: "reprocessingRequired" with option value: "No" + Then Validate "XMLReader" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "File" + Then Enter input plugin property: "referenceName" with value: "FileReferenceName" + Then Enter input plugin property: "path" with value: "fileSinkTargetBucket" + Then Replace input plugin property: "pathSuffix" with value: "yyyy-MM-dd-HH-mm" + Then Select dropdown plugin property: "format" with option value: "csv" + Then Validate "File" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate OUT record count is equal to IN record count + Then Validate output file generated by file sink plugin "fileSinkTargetBucket" is equal to expected output file "outputForXMLTest" + + @XmlReader_Source_Required @XMLREADER_DELETE_TEST @FILE_SINK_TEST + Scenario: To verify data is getting transferred from XmlReader to File sink using pattern and delete action + Given Open Datafusion Project to configure pipeline + When Select plugin: "XML Reader" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "File" from the plugins list as: "Sink" + Then Connect plugins: "XMLReader" and "File" to establish connection + Then Navigate to the properties page of plugin: "XMLReader" + Then Enter input plugin property: "referenceName" with value: "ReferenceName" + Then Enter input plugin property: "path" with value: "xmlTestFile" + Then Enter input plugin property: "nodePath" with value: "node" + Then Select dropdown plugin property: "reprocessingRequired" with option value: "No" + Then Enter input plugin property: "pattern" with value: "filePattern" + Then Select dropdown plugin property: "ActionAfterProcessingFile" with option value: "Delete" + Then Validate "XMLReader" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "File" + Then Enter input plugin property: "referenceName" with value: "FileReferenceName" + Then Enter input plugin property: "path" with value: "fileSinkTargetBucket" + Then Replace input plugin property: "pathSuffix" with value: "yyyy-MM-dd-HH-mm" + Then Select dropdown plugin property: "format" with option value: "csv" + Then Validate "File" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate OUT record count is equal to IN record count + Then Validate that file gets successfully deleted from the gcs bucket + Then Validate output file generated by file sink plugin "fileSinkTargetBucket" is equal to expected output file "outputForXMLTest" + + @XMLREADER_TEST @FILE_SINK_TEST + Scenario: To verify data is getting transferred from XmlReader to File sink using move action + Given Open Datafusion Project to configure pipeline + When Select plugin: "XML Reader" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "File" from the plugins list as: "Sink" + Then Connect plugins: "XMLReader" and "File" to establish connection + Then Navigate to the properties page of plugin: "XMLReader" + Then Enter input plugin property: "referenceName" with value: "ReferenceName" + Then Enter input plugin property: "path" with value: "xmlTestFile" + Then Enter input plugin property: "nodePath" with value: "node" + Then Select dropdown plugin property: "reprocessingRequired" with option value: "No" + Then Select dropdown plugin property: "ActionAfterProcessingFile" with option value: "Move" + Then Enter input plugin property: "targetFolder" with value: "folder" + Then Validate "XMLReader" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "File" + Then Enter input plugin property: "referenceName" with value: "FileReferenceName" + Then Enter input plugin property: "path" with value: "fileSinkTargetBucket" + Then Replace input plugin property: "pathSuffix" with value: "yyyy-MM-dd-HH-mm" + Then Select dropdown plugin property: "format" with option value: "csv" + Then Validate "File" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate OUT record count is equal to IN record count + Then Validate that file gets successfully moved to the target location + Then Validate output file generated by file sink plugin "fileSinkTargetBucket" is equal to expected output file "outputForXMLTest" diff --git a/core-plugins/src/e2e-test/features/xmlReader/XmlReaderErrorScenarios.feature b/core-plugins/src/e2e-test/features/xmlReader/XmlReaderErrorScenarios.feature new file mode 100644 index 000000000..187fa2e77 --- /dev/null +++ b/core-plugins/src/e2e-test/features/xmlReader/XmlReaderErrorScenarios.feature @@ -0,0 +1,76 @@ +# Copyright © 2024 Cask Data, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. + +@XmlReader_Source +Feature:File Sink - Verify XML Reader Plugin Error scenarios + + @XMLREADER_DELETE_TEST @FILE_SINK_TEST + Scenario: Verify Pipeline fails when an invalid pattern is entered + Given Open Datafusion Project to configure pipeline + When Select plugin: "XML Reader" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "File" from the plugins list as: "Sink" + Then Connect plugins: "XMLReader" and "File" to establish connection + Then Navigate to the properties page of plugin: "XMLReader" + Then Enter input plugin property: "referenceName" with value: "ReferenceName" + Then Enter input plugin property: "path" with value: "xmlTestFile" + Then Enter input plugin property: "nodePath" with value: "node" + Then Select dropdown plugin property: "reprocessingRequired" with option value: "No" + Then Enter input plugin property: "pattern" with value: "invalidPattern" + Then Validate "XMLReader" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "File" + Then Enter input plugin property: "referenceName" with value: "FileReferenceName" + Then Enter input plugin property: "path" with value: "fileSinkTargetBucket" + Then Replace input plugin property: "pathSuffix" with value: "yyyy-MM-dd-HH-mm" + Then Select dropdown plugin property: "format" with option value: "csv" + Then Validate "File" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Failed" + Then Close the pipeline logs + + @XMLREADER_TEST @FILE_SINK_TEST + Scenario: Verify no data is transferred when an invalid node path is entered + Given Open Datafusion Project to configure pipeline + When Select plugin: "XML Reader" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "File" from the plugins list as: "Sink" + Then Connect plugins: "XMLReader" and "File" to establish connection + Then Navigate to the properties page of plugin: "XMLReader" + Then Enter input plugin property: "referenceName" with value: "ReferenceName" + Then Enter input plugin property: "path" with value: "xmlTestFile" + Then Enter input plugin property: "nodePath" with value: "invalidNode" + Then Select dropdown plugin property: "reprocessingRequired" with option value: "No" + Then Validate "XMLReader" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "File" + Then Enter input plugin property: "referenceName" with value: "FileReferenceName" + Then Enter input plugin property: "path" with value: "fileSinkTargetBucket" + Then Replace input plugin property: "pathSuffix" with value: "yyyy-MM-dd-HH-mm" + Then Select dropdown plugin property: "format" with option value: "csv" + Then Validate "File" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate OUT record count is equal to IN record count diff --git a/core-plugins/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/CorePlugin.java b/core-plugins/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/CorePlugin.java index 9817adb77..a39278ade 100644 --- a/core-plugins/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/CorePlugin.java +++ b/core-plugins/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/CorePlugin.java @@ -16,13 +16,17 @@ package io.cdap.plugin.common.stepsdesign; import com.google.cloud.storage.Blob; +import com.google.cloud.storage.Storage; import com.google.cloud.storage.StorageException; +import com.google.cloud.storage.StorageOptions; import io.cdap.e2e.utils.CdfHelper; import io.cdap.e2e.utils.PluginPropertyUtils; import io.cdap.e2e.utils.StorageClient; import io.cucumber.java.en.Then; import org.apache.directory.api.util.Strings; import org.junit.Assert; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import stepsdesign.BeforeActions; import java.io.BufferedReader; @@ -41,6 +45,7 @@ * Core Plugin Common Step Design. */ public class CorePlugin implements CdfHelper { + private static final Logger LOG = LoggerFactory.getLogger(CorePlugin.class); @Then("Verify the CSV Output File matches the Expected Output File: {string} With Expected Partitions: {string}") public void verifyCSVOutput(String file, String expectedPartitions) { String gcsTargetBucket = PluginPropertyUtils.pluginProp("gcsTargetBucket"); @@ -126,4 +131,49 @@ public void validateOutputOrcFileGeneratedByFileSinkPluginIsEqualToExpectedOutpu PluginPropertyUtils.pluginProp(expectedOutputFilePath)); } } + + @Then("Validate that file gets successfully deleted from the gcs bucket") + public static boolean validateThatFileGetsDeletedFromTheGcsBucket() { + String bucketName = TestSetupHooks.fileSourceBucket; + String fileName = PluginPropertyUtils.pluginProp("xmlFileName"); + // Instantiate a client for Google Cloud Storage + Storage storage = StorageOptions.newBuilder().setProjectId(PluginPropertyUtils.pluginProp("projectId")) + .build().getService(); + // Check if the file exists in the bucket + Blob blob = storage.get(bucketName, fileName); + // If blob is null, the file does not exist + boolean isDeleted = (blob == null); + if (isDeleted) { + LOG.info("The file " + fileName + " has been successfully deleted from the bucket " + bucketName + "."); + } else { + LOG.info("The file " + fileName + " still exists in the bucket " + bucketName + "."); + } + return isDeleted; + } + + @Then("Validate that file gets successfully moved to the target location") + public static boolean verifyFileMovedWithinGCSBucket() { + // Instantiate a client for Google Cloud Storage with the specified project ID + Storage storage = StorageOptions.newBuilder().setProjectId(PluginPropertyUtils.pluginProp("projectId")) + .build().getService(); + String bucketName = TestSetupHooks.fileSourceBucket; + String fileName = PluginPropertyUtils.pluginProp("xmlFileName"); + String targetLocation = PluginPropertyUtils.pluginProp("bucketName"); + // Check if the source file exists + Blob sourceBlob = storage.get(bucketName, fileName); + + // Check if the target file exists + Blob targetBlob = storage.get(targetLocation, fileName); + + // Verify the file has been moved by checking if the source file does not exist and the target file exists + boolean isMoved = sourceBlob == null && targetBlob != null; + if (isMoved) { + LOG.info("The file " + fileName + " was successfully moved to target location in the bucket "); + } else if (sourceBlob != null) { + LOG.info("The source file " + fileName + " still exists in the bucket " + bucketName + "."); + } else { + LOG.info("The target file " + fileName + " does not exist in the bucket " + bucketName + "."); + } + return isMoved; + } } diff --git a/core-plugins/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java b/core-plugins/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java index 76269ab75..cc1271959 100644 --- a/core-plugins/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java +++ b/core-plugins/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java @@ -45,6 +45,7 @@ public class TestSetupHooks { public static String gcsSourceBucketName1 = StringUtils.EMPTY; public static String gcsSourceBucketName2 = StringUtils.EMPTY; public static String gcsTargetBucketName = StringUtils.EMPTY; + public static String fileSourceBucket = StringUtils.EMPTY; public static String fileSourceBucket1 = StringUtils.EMPTY; public static String fileSourceBucket2 = StringUtils.EMPTY; public static String fileSourceBucket3 = StringUtils.EMPTY; @@ -291,6 +292,12 @@ public static void deleteSourceBucketWithFileCSVDataTypeTest1() { fileSourceBucket1 = StringUtils.EMPTY; } + @After(order = 1, value = "@XMLREADER_TEST or @XMLREADER_DELETE_TEST") + public static void deleteSourceBucketWithXmlFile() { + deleteGCSBucket(fileSourceBucket); + fileSourceBucket = StringUtils.EMPTY; + } + @Before(order = 1, value = "@CSV_NO_HEADER_FILE") public static void createBucketWithCSVNoHeaderFile() throws IOException, URISyntaxException { fileSourceBucket2 = createGCSBucketWithFile(PluginPropertyUtils.pluginProp("csvNoHeaderFile")); @@ -455,6 +462,12 @@ private static String createGCSBucketWithFile(String filePath) throws IOExceptio return bucketName; } + private static String createGCSBucketWithXmlFile(String filePath) throws IOException, URISyntaxException { + String bucketName = StorageClient.createBucket("e2e-test-xml").getName(); + StorageClient.uploadObject(bucketName, filePath, filePath); + return bucketName; + } + private static void deleteGCSBucket(String bucketName) { try { for (Blob blob : StorageClient.listObjects(bucketName).iterateAll()) { @@ -494,4 +507,20 @@ public static void createBucketWithExcelFile() throws IOException, URISyntaxExce PluginPropertyUtils.pluginProp("excelFile")); BeforeActions.scenario.write("excel test bucket name - " + fileSourceBucket1); } + + @Before(order = 1, value = "@XMLREADER_TEST") + public static void createBucketWithXmlFile() throws IOException, URISyntaxException { + fileSourceBucket = createGCSBucketWithXmlFile(PluginPropertyUtils.pluginProp("xmlFile")); + PluginPropertyUtils.addPluginProp("xmlTestFile", "gs://" + fileSourceBucket + "/" + + PluginPropertyUtils.pluginProp("xmlFile")); + BeforeActions.scenario.write("xml test bucket name - " + fileSourceBucket); + } + + @Before(order = 1, value = "@XMLREADER_DELETE_TEST") + public static void createBucketWithXmlFileForTestPattern() throws IOException, URISyntaxException { + fileSourceBucket = createGCSBucketWithXmlFile(PluginPropertyUtils.pluginProp("xmlFile")); + PluginPropertyUtils.addPluginProp("xmlTestFile", "gs://" + fileSourceBucket + "/testdata/xmldata/" + + "*"); + BeforeActions.scenario.write("xml test bucket name - " + fileSourceBucket); + } } diff --git a/core-plugins/src/e2e-test/java/io/cdap/plugin/xmlreader/runners/TestRunner.java b/core-plugins/src/e2e-test/java/io/cdap/plugin/xmlreader/runners/TestRunner.java new file mode 100644 index 000000000..1bf8ebf18 --- /dev/null +++ b/core-plugins/src/e2e-test/java/io/cdap/plugin/xmlreader/runners/TestRunner.java @@ -0,0 +1,37 @@ +/* + * Copyright © 2024 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package io.cdap.plugin.xmlreader.runners; + +import io.cucumber.junit.Cucumber; +import io.cucumber.junit.CucumberOptions; +import org.junit.runner.RunWith; + +/** + * Test Runner to execute XmlReader Source plugin testcases. + */ +@RunWith(Cucumber.class) +@CucumberOptions( + features = {"src/e2e-test/features"}, + glue = {"stepsdesign", "io.cdap.plugin.common.stepsdesign"}, + tags = {"@XmlReader_Source"}, + monochrome = true, + plugin = {"pretty", "html:target/cucumber-html-report/XmlReader-source", + "json:target/cucumber-reports/cucumber-xmlreader-source.json", + "junit:target/cucumber-reports/cucumber-xmlreader-source.xml"} +) +public class TestRunner { +} diff --git a/core-plugins/src/e2e-test/java/io/cdap/plugin/xmlreader/runners/TestRunnerRequired.java b/core-plugins/src/e2e-test/java/io/cdap/plugin/xmlreader/runners/TestRunnerRequired.java new file mode 100644 index 000000000..c13e9050f --- /dev/null +++ b/core-plugins/src/e2e-test/java/io/cdap/plugin/xmlreader/runners/TestRunnerRequired.java @@ -0,0 +1,36 @@ +/* + * Copyright © 2024 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package io.cdap.plugin.xmlreader.runners; + +import io.cucumber.junit.Cucumber; +import io.cucumber.junit.CucumberOptions; +import org.junit.runner.RunWith; + +/** + * Test Runner to execute only required XmlReader Source plugin testcases. + */ +@RunWith(Cucumber.class) +@CucumberOptions( + features = {"src/e2e-test/features"}, + glue = {"stepsdesign", "io.cdap.plugin.common.stepsdesign"}, + tags = {"@XmlReader_Source_Required"}, + monochrome = true, + plugin = {"pretty", "html:target/cucumber-html-report/xmlreader-source", + "json:target/cucumber-reports/cucumber-xmlreader-source.json", + "junit:target/cucumber-reports/cucumber-xmlreader-source.xml"} +) +public class TestRunnerRequired { +} diff --git a/core-plugins/src/e2e-test/java/io/cdap/plugin/xmlreader/runners/package-info.java b/core-plugins/src/e2e-test/java/io/cdap/plugin/xmlreader/runners/package-info.java new file mode 100644 index 000000000..629fc7164 --- /dev/null +++ b/core-plugins/src/e2e-test/java/io/cdap/plugin/xmlreader/runners/package-info.java @@ -0,0 +1,19 @@ +/* + * Copyright © 2024 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +/** + * Package contains the runner for the XmlReader source plugin. + */ +package io.cdap.plugin.xmlreader.runners; diff --git a/core-plugins/src/e2e-test/resources/pluginDataCyAttributes.properties b/core-plugins/src/e2e-test/resources/pluginDataCyAttributes.properties index 1339ef272..07bb1c323 100644 --- a/core-plugins/src/e2e-test/resources/pluginDataCyAttributes.properties +++ b/core-plugins/src/e2e-test/resources/pluginDataCyAttributes.properties @@ -49,3 +49,4 @@ errorCodeColumnName=codeField errorEmitterNodeName=stageField recipe=directives onError=on-error +ActionAfterProcessingFile=actionAfterProcess diff --git a/core-plugins/src/e2e-test/resources/pluginParameters.properties b/core-plugins/src/e2e-test/resources/pluginParameters.properties index 0e4fbf0df..20a660036 100644 --- a/core-plugins/src/e2e-test/resources/pluginParameters.properties +++ b/core-plugins/src/e2e-test/resources/pluginParameters.properties @@ -291,6 +291,15 @@ quotedValueDelimitedTestFiles=dummy fileSinkTargetBucket=file-plugin-output recursiveTest=dummy testOnCdap=true +xmlFile=testdata/xmldata/testxmlfile.xml +node=/students/student +outputForXMLTest=e2e-tests/file/expected_outputs/OUTPUT_FOR_XMLREADER_TEST.csv +filePattern=^testxmlfile.xml +folder=gs://e2e-test-xml/ +bucketName=e2e-test-xml +invalidPattern=abcd +invalidNode=dummy +xmlFileName=testxmlfile.xml ## EXCEL-PLUGIN-PROPERTIES-START ## excelTestFile=dummy diff --git a/core-plugins/src/e2e-test/resources/testdata/file/expected_outputs/OUTPUT_FOR_XMLREADER_TEST.csv b/core-plugins/src/e2e-test/resources/testdata/file/expected_outputs/OUTPUT_FOR_XMLREADER_TEST.csv new file mode 100644 index 000000000..356d6ab10 --- /dev/null +++ b/core-plugins/src/e2e-test/resources/testdata/file/expected_outputs/OUTPUT_FOR_XMLREADER_TEST.csv @@ -0,0 +1,15 @@ +3,gs://e2e-test-xml/testdata/xmldata/testxmlfile.xml, + 1 + John Doe +
123 Main Street, Cityville
+
+8,gs://e2e-test-xml/testdata/xmldata/testxmlfile.xml, + 2 + Jane Smith +
456 Elm Street, Townsville
+
+13,gs://e2e-test-xml/testdata/xmldata/testxmlfile.xml, + 3 + Alice Johnson +
789 Oak Street, Villageton
+
diff --git a/core-plugins/src/e2e-test/resources/testdata/xmldata/testxmlfile.xml b/core-plugins/src/e2e-test/resources/testdata/xmldata/testxmlfile.xml new file mode 100644 index 000000000..82b08b6fa --- /dev/null +++ b/core-plugins/src/e2e-test/resources/testdata/xmldata/testxmlfile.xml @@ -0,0 +1,18 @@ + + + + 1 + John Doe +
123 Main Street, Cityville
+
+ + 2 + Jane Smith +
456 Elm Street, Townsville
+
+ + 3 + Alice Johnson +
789 Oak Street, Villageton
+
+
\ No newline at end of file