From 8d5512fa45829289595ba5d2168dcd816564b753 Mon Sep 17 00:00:00 2001 From: AnkitCLI Date: Wed, 22 May 2024 13:15:32 +0530 Subject: [PATCH] e2e-RowDenormalizer-ITN --- .../rowdenormalizer/rowDenormalizer.feature | 60 +++++++++++++++++++ .../common/stepsdesign/TestSetupHooks.java | 10 +++- .../resources/pluginParameters.properties | 8 +++ .../file/CSV_ROWDENORMALIZER_TEST.csv | 10 ++++ .../OUTPUT_FOR_ROWDENO_TEST.csv | 4 ++ 5 files changed, 91 insertions(+), 1 deletion(-) create mode 100644 core-plugins/src/e2e-test/features/rowdenormalizer/rowDenormalizer.feature create mode 100644 core-plugins/src/e2e-test/resources/testdata/file/CSV_ROWDENORMALIZER_TEST.csv create mode 100644 core-plugins/src/e2e-test/resources/testdata/file/expected_outputs/OUTPUT_FOR_ROWDENO_TEST.csv diff --git a/core-plugins/src/e2e-test/features/rowdenormalizer/rowDenormalizer.feature b/core-plugins/src/e2e-test/features/rowdenormalizer/rowDenormalizer.feature new file mode 100644 index 000000000..c3bb94880 --- /dev/null +++ b/core-plugins/src/e2e-test/features/rowdenormalizer/rowDenormalizer.feature @@ -0,0 +1,60 @@ +# Copyright © 2024 Cask Data, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. + +@File_Sink +Feature:File Sink - Verify File source to File sink data transfer using RowDenormalizer analytics + + @ROW_DENO_TEST @FILE_SINK_TEST @File_Sink_Required + Scenario: To verify data is getting transferred from File to File sink plugin successfully with RowDenormalizer plugin + Given Open Datafusion Project to configure pipeline + When Select plugin: "File" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Analytics" + When Select plugin: "RowDenormalizer" from the plugins list as: "Analytics" + Then Connect plugins: "File" and "RowDenormalizer" to establish connection + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "File" from the plugins list as: "Sink" + Then Connect plugins: "RowDenormalizer" and "File2" to establish connection + Then Navigate to the properties page of plugin: "File" + Then Enter input plugin property: "referenceName" with value: "FileReferenceName" + Then Enter input plugin property: "path" with value: "rowDenoTest" + Then Select dropdown plugin property: "format" with option value: "csv" + Then Click plugin property: "skipHeader" + Then Click on the Get Schema button + Then Change datatype of fields in output schema with : "fileDatatypeChange" + Then Validate "File" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "RowDenormalizer" + Then Enter input plugin property: "keyField" with value: "keyValue" + Then Enter input plugin property: "nameField" with value: "nameValue" + Then Enter input plugin property: "valueField" with value: "value" + Then Enter Value for plugin property table key : "outputFields" with values: "fieldValues" + Then Enter key value pairs for plugin property: "fieldAliases" with values from json: "columnValues" + Then Click on the Validate button + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "File2" + Then Enter input plugin property: "referenceName" with value: "FileReferenceName" + Then Enter input plugin property: "path" with value: "fileSinkTargetBucket" + Then Replace input plugin property: "pathSuffix" with value: "yyyy-MM-dd-HH-mm-ss" + Then Select dropdown plugin property: "format" with option value: "csv" + Then Click plugin property: "writeHeader" + Then Validate "File2" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate output file generated by file sink plugin "fileSinkTargetBucket" is equal to expected output file "rowDenoTestOutputFile" diff --git a/core-plugins/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java b/core-plugins/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java index cc1271959..ff161c70d 100644 --- a/core-plugins/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java +++ b/core-plugins/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java @@ -167,7 +167,7 @@ public static void createBucketWithGroupByTest1File() throws IOException, URISyn BeforeActions.scenario.write("Group by bucket name - " + gcsSourceBucketName1); } - @After(order = 1, value = "@GROUP_BY_TEST") + @After(order = 1, value = "@GROUP_BY_TEST or @ROW_DENO_TEST") public static void deleteSourceBucketWithGroupByTest1File() { deleteGCSBucket(gcsSourceBucketName1); gcsSourceBucketName1 = StringUtils.EMPTY; @@ -523,4 +523,12 @@ public static void createBucketWithXmlFileForTestPattern() throws IOException, U + "*"); BeforeActions.scenario.write("xml test bucket name - " + fileSourceBucket); } + + @Before(order = 1, value = "@ROW_DENO_TEST") + public static void createBucketWithRowDenormalizerTestFile() throws IOException, URISyntaxException { + gcsSourceBucketName1 = createGCSBucketWithFile(PluginPropertyUtils.pluginProp("rowDenoGcsCsvFile")); + PluginPropertyUtils.addPluginProp("rowDenoTest", "gs://" + gcsSourceBucketName1 + "/" + + PluginPropertyUtils.pluginProp("rowDenoGcsCsvFile")); + BeforeActions.scenario.write("RowDenormalizer bucket name - " + gcsSourceBucketName1); + } } diff --git a/core-plugins/src/e2e-test/resources/pluginParameters.properties b/core-plugins/src/e2e-test/resources/pluginParameters.properties index 20a660036..667c45eec 100644 --- a/core-plugins/src/e2e-test/resources/pluginParameters.properties +++ b/core-plugins/src/e2e-test/resources/pluginParameters.properties @@ -300,6 +300,14 @@ bucketName=e2e-test-xml invalidPattern=abcd invalidNode=dummy xmlFileName=testxmlfile.xml +rowDenoTest=dummy +rowDenoGcsCsvFile=testdata/file/CSV_ROWDENORMALIZER_TEST.csv +keyValue=id +nameValue=attribute +fieldValues=city +columnValues=[{"key":"city","value":"newCity"}] +rowDenoTestOutputFile=e2e-tests/file/expected_outputs/OUTPUT_FOR_ROWDENO_TEST.csv +fileDatatypeChange=[{"key":"id","value":"string"}] ## EXCEL-PLUGIN-PROPERTIES-START ## excelTestFile=dummy diff --git a/core-plugins/src/e2e-test/resources/testdata/file/CSV_ROWDENORMALIZER_TEST.csv b/core-plugins/src/e2e-test/resources/testdata/file/CSV_ROWDENORMALIZER_TEST.csv new file mode 100644 index 000000000..dffc5be07 --- /dev/null +++ b/core-plugins/src/e2e-test/resources/testdata/file/CSV_ROWDENORMALIZER_TEST.csv @@ -0,0 +1,10 @@ +id,attribute,value +1,name,John +1,age,25 +1,city,New York +2,name,Jane +2,age,30 +2,city,San Francisco +3,name,Alice +3,age,28 +3,city,Los Angeles \ No newline at end of file diff --git a/core-plugins/src/e2e-test/resources/testdata/file/expected_outputs/OUTPUT_FOR_ROWDENO_TEST.csv b/core-plugins/src/e2e-test/resources/testdata/file/expected_outputs/OUTPUT_FOR_ROWDENO_TEST.csv new file mode 100644 index 000000000..e2d422ebb --- /dev/null +++ b/core-plugins/src/e2e-test/resources/testdata/file/expected_outputs/OUTPUT_FOR_ROWDENO_TEST.csv @@ -0,0 +1,4 @@ +id,newCity +2,San Francisco +3,Los Angeles +1,New York \ No newline at end of file