From 1bb71a8f82261bb95207cc7da4e9d4f7d01bdf1b Mon Sep 17 00:00:00 2001 From: AnkitCLI Date: Thu, 4 Apr 2024 11:24:17 +0530 Subject: [PATCH] ITN coverage for SQl engine --- .../bigquery/source/BigQuerySqlEngine.feature | 209 ++++++++++++++++++ .../runners/sinkrunner/TestRunner.java | 3 +- .../sinkrunner/TestRunnerRequired.java | 3 +- .../plugin/bigquery/stepsdesign/BigQuery.java | 10 + .../ValidationHelperSqlEngine.java | 112 ++++++++++ .../common/stepsdesign/TestSetupHooks.java | 44 +++- .../groupby/actions/GroupByActions.java | 78 +++++++ .../plugin/groupby/actions/package-info.java | 4 + .../groupby/locators/GroupByLocators.java | 51 +++++ .../plugin/groupby/locators/package-info.java | 4 + .../plugin/groupby/stepsdesign/GroupBy.java | 31 +++ .../groupby/stepsdesign/package-info.java | 4 + .../resources/pluginParameters.properties | 16 +- .../BQValidationExpectedFiles/bqexpected_Test | 2 + .../deduplicate_Test | 2 + .../BQValidationExpectedFiles/groupby_Test | 2 + .../BigQuery/BigQueryCreateTableJoin.txt | 1 + .../BigQuery/BigQueryCreateTableSql.txt | 1 + .../BigQuery/BigQueryInsertDataJoin.txt | 4 + .../BigQuery/BigQueryInsertTableSql.txt | 5 + 20 files changed, 582 insertions(+), 4 deletions(-) create mode 100644 src/e2e-test/features/bigquery/source/BigQuerySqlEngine.feature create mode 100644 src/e2e-test/java/io/cdap/plugin/bigquery/stepsdesign/ValidationHelperSqlEngine.java create mode 100644 src/e2e-test/java/io/cdap/plugin/groupby/actions/GroupByActions.java create mode 100644 src/e2e-test/java/io/cdap/plugin/groupby/actions/package-info.java create mode 100644 src/e2e-test/java/io/cdap/plugin/groupby/locators/GroupByLocators.java create mode 100644 src/e2e-test/java/io/cdap/plugin/groupby/locators/package-info.java create mode 100644 src/e2e-test/java/io/cdap/plugin/groupby/stepsdesign/GroupBy.java create mode 100644 src/e2e-test/java/io/cdap/plugin/groupby/stepsdesign/package-info.java create mode 100644 src/e2e-test/resources/testdata/BQValidationExpectedFiles/bqexpected_Test create mode 100644 src/e2e-test/resources/testdata/BQValidationExpectedFiles/deduplicate_Test create mode 100644 src/e2e-test/resources/testdata/BQValidationExpectedFiles/groupby_Test create mode 100644 src/e2e-test/resources/testdata/BigQuery/BigQueryCreateTableJoin.txt create mode 100644 src/e2e-test/resources/testdata/BigQuery/BigQueryCreateTableSql.txt create mode 100644 src/e2e-test/resources/testdata/BigQuery/BigQueryInsertDataJoin.txt create mode 100644 src/e2e-test/resources/testdata/BigQuery/BigQueryInsertTableSql.txt diff --git a/src/e2e-test/features/bigquery/source/BigQuerySqlEngine.feature b/src/e2e-test/features/bigquery/source/BigQuerySqlEngine.feature new file mode 100644 index 0000000000..71a68d4dce --- /dev/null +++ b/src/e2e-test/features/bigquery/source/BigQuerySqlEngine.feature @@ -0,0 +1,209 @@ +# Copyright © 2024 Cask Data, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. + +@BigQuery_Sink +Feature: BigQuery sink - Verification of BigQuery to BigQuery successful data transfer + + @BQ_SOURCE_JOINER_TEST @BQ_SOURCE_JOINER2_TEST @BQ_DELETE_JOIN @BQ_SINK_TEST @EXISTING_BQ_CONNECTION + Scenario:Validate successful records transfer from BigQuery source to BigQuery sink using Join + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Analytics" + When Select plugin: "Joiner" from the plugins list as: "Analytics" + Then Navigate to the properties page of plugin: "BigQuery" + Then Click plugin property: "switch-useConnection" + Then Click on the Browse Connections button + Then Select connection: "bqConnectionName" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + And Replace input plugin property: "dataset" with value: "dataset" + And Replace input plugin property: "table" with value: "bqSourceTable" + Then Click on the Get Schema button + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "Joiner" to establish connection + Then Connect plugins: "BigQuery2" and "Joiner" to establish connection + Then Connect plugins: "Joiner" and "BigQuery3" to establish connection + Then Navigate to the properties page of plugin: "BigQuery2" + Then Click plugin property: "useConnection" + Then Click on the Browse Connections button + Then Select connection: "bqConnectionName" + Then Enter input plugin property: "referenceName" with value: "BQRefName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqSourceTable2" + Then Validate "BigQuery2" plugin properties + And Close the Plugin Properties page + Then Navigate to the properties page of plugin: "Joiner" + Then Select radio button plugin property: "conditionType" with value: "basic" + Then Click on the Get Schema button + Then Validate "Joiner" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery3" + Then Click plugin property: "useConnection" + Then Click on the Browse Connections button + Then Select connection: "bqConnectionName" + Then Enter input plugin property: "referenceName" with value: "BQSinkReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Validate "BigQuery3" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Click on "Configure" button + Then Click on "Transformation Pushdown" button + Then Click on "Enable Transformation Pushdown" button + Then Enter input plugin property: "dataset" with value: "test_sqlengine" + Then Click on "Advanced" button + Then Click plugin property: "useConnection" + Then Click on the Browse Connections button + Then Select connection: "bqConnectionName" + Then Click on "Save" button + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Close the pipeline logs + Then Verify the pipeline status is "Succeeded" + Then Validate The Data From BQ To BQ With Actual And Expected File for: "bqExpectedFileJoin" + + @BQ_SOURCE_SQLENGINE_TEST @BQ_SINK_TEST @EXISTING_BQ_CONNECTION + Scenario:Validate successful records transfer from BigQuery source to BigQuery sink using group by + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + When Expand Plugin group in the LHS plugins list: "Analytics" + When Select plugin: "Group By" from the plugins list as: "Analytics" + Then Navigate to the properties page of plugin: "BigQuery" + Then Click plugin property: "switch-useConnection" + Then Click on the Browse Connections button + Then Select connection: "bqConnectionName" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + And Replace input plugin property: "dataset" with value: "dataset" + And Replace input plugin property: "table" with value: "bqSourceTable" + Then Click on the Get Schema button + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + Then Connect plugins: "BigQuery" and "Group By" to establish connection + Then Connect plugins: "Group By" and "BigQuery2" to establish connection + Then Navigate to the properties page of plugin: "Group By" + Then Select dropdown plugin property: "groupByFields" with option value: "groupByValidFirstField" + Then Press Escape Key + Then Select dropdown plugin property: "groupByFields" with option value: "groupByValidSecondField" + Then Press Escape Key + Then Enter GroupBy plugin Fields to be Aggregate "groupByGcsAggregateFields" + Then Click on the Get Schema button + Then Click on the Validate button + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery2" + Then Click plugin property: "useConnection" + Then Click on the Browse Connections button + Then Select connection: "bqConnectionName" + Then Enter input plugin property: "referenceName" with value: "BQSinkReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Click on "Configure" button + Then Click on "Transformation Pushdown" button + Then Click on "Enable Transformation Pushdown" button + Then Enter input plugin property: "dataset" with value: "test_sqlengine" + Then Click on "Advanced" button + Then Click plugin property: "useConnection" + Then Click on the Browse Connections button + Then Select connection: "bqConnectionName" + Then Click on "Save" button + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Close the pipeline logs + Then Verify the pipeline status is "Succeeded" + Then Validate The Data From BQ To BQ With Actual And Expected File for: "groupByTestOutputFile" + + @BQ_SOURCE_SQLENGINE_TEST @BQ_SINK_TEST @EXISTING_BQ_CONNECTION + Scenario:Validate successful records transfer from BigQuery source to BigQuery sink using deduplicate + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + When Expand Plugin group in the LHS plugins list: "Analytics" + When Select plugin: "Deduplicate" from the plugins list as: "Analytics" + Then Navigate to the properties page of plugin: "BigQuery" + Then Click plugin property: "switch-useConnection" + Then Click on the Browse Connections button + Then Select connection: "bqConnectionName" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + And Replace input plugin property: "dataset" with value: "dataset" + And Replace input plugin property: "table" with value: "bqSourceTable" + Then Click on the Get Schema button + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + Then Connect plugins: "BigQuery" and "Deduplicate" to establish connection + Then Connect plugins: "Deduplicate" and "BigQuery2" to establish connection + Then Navigate to the properties page of plugin: "Deduplicate" + Then Select dropdown plugin property: "uniqueFields" with option value: "DeduplicateValidFirstField" + Then Press Escape Key + Then Click on the Validate button + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery2" + Then Click plugin property: "useConnection" + Then Click on the Browse Connections button + Then Select connection: "bqConnectionName" + Then Enter input plugin property: "referenceName" with value: "BQSinkReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Click on "Configure" button + Then Click on "Transformation Pushdown" button + Then Click on "Enable Transformation Pushdown" button + Then Enter input plugin property: "dataset" with value: "test_sqlengine" + Then Click on "Advanced" button + Then Click plugin property: "useConnection" + Then Click on the Browse Connections button + Then Select connection: "bqConnectionName" + Then Click on "Save" button + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Close the pipeline logs + Then Verify the pipeline status is "Succeeded" + Then Validate The Data From BQ To BQ With Actual And Expected File for: "deduplicateTestOutputFile" diff --git a/src/e2e-test/java/io/cdap/plugin/bigquery/runners/sinkrunner/TestRunner.java b/src/e2e-test/java/io/cdap/plugin/bigquery/runners/sinkrunner/TestRunner.java index b6085ccb1e..afe530bd03 100644 --- a/src/e2e-test/java/io/cdap/plugin/bigquery/runners/sinkrunner/TestRunner.java +++ b/src/e2e-test/java/io/cdap/plugin/bigquery/runners/sinkrunner/TestRunner.java @@ -26,7 +26,8 @@ @CucumberOptions( features = {"src/e2e-test/features"}, glue = {"io.cdap.plugin.bigquery.stepsdesign", "io.cdap.plugin.gcs.stepsdesign", - "stepsdesign", "io.cdap.plugin.common.stepsdesign"}, + "stepsdesign", "io.cdap.plugin.common.stepsdesign", "io.cdap.plugin.groupby.actions", + "io.cdap.plugin.groupby.locators", "io.cdap.plugin.groupby.stepsdesign"}, tags = {"@BigQuery_Sink and not @CDAP-20830"}, //TODO: Enable test once issue is fixed https://cdap.atlassian.net/browse/CDAP-20830 monochrome = true, diff --git a/src/e2e-test/java/io/cdap/plugin/bigquery/runners/sinkrunner/TestRunnerRequired.java b/src/e2e-test/java/io/cdap/plugin/bigquery/runners/sinkrunner/TestRunnerRequired.java index 1347921079..cd2f656884 100644 --- a/src/e2e-test/java/io/cdap/plugin/bigquery/runners/sinkrunner/TestRunnerRequired.java +++ b/src/e2e-test/java/io/cdap/plugin/bigquery/runners/sinkrunner/TestRunnerRequired.java @@ -26,7 +26,8 @@ @CucumberOptions( features = {"src/e2e-test/features"}, glue = {"io.cdap.plugin.bigquery.stepsdesign", "io.cdap.plugin.gcs.stepsdesign", - "stepsdesign", "io.cdap.plugin.common.stepsdesign"}, + "stepsdesign", "io.cdap.plugin.common.stepsdesign", "io.cdap.plugin.groupby.actions", + "io.cdap.plugin.groupby.locators", "io.cdap.plugin.groupby.stepsdesign"}, tags = {"@BigQuery_Sink_Required"}, monochrome = true, //TODO: Enable test once issue is fixed https://cdap.atlassian.net/browse/CDAP-20830 diff --git a/src/e2e-test/java/io/cdap/plugin/bigquery/stepsdesign/BigQuery.java b/src/e2e-test/java/io/cdap/plugin/bigquery/stepsdesign/BigQuery.java index 215886662d..7a1f1f6cbd 100644 --- a/src/e2e-test/java/io/cdap/plugin/bigquery/stepsdesign/BigQuery.java +++ b/src/e2e-test/java/io/cdap/plugin/bigquery/stepsdesign/BigQuery.java @@ -25,6 +25,7 @@ import stepsdesign.BeforeActions; import java.io.IOException; +import java.net.URISyntaxException; /** * BigQuery Plugin validation common step design. @@ -44,4 +45,13 @@ public void validateTheValuesOfRecordsTransferredToBQsinkIsEqualToTheValuesFromS Assert.assertTrue("Value of records transferred to the BQ sink should be equal to the value " + "of the records in the source table", recordsMatched); } + + @Then("Validate The Data From BQ To BQ With Actual And Expected File for: {string}") + public void validateTheDataFromBQToBQWithActualAndExpectedFileFor(String expectedFile) throws IOException, + InterruptedException, URISyntaxException { + boolean recordsMatched = ValidationHelperSqlEngine.validateActualDataToExpectedData( + PluginPropertyUtils.pluginProp("bqTargetTable"), + PluginPropertyUtils.pluginProp(expectedFile)); + Assert.assertTrue("Value of records in actual and expected file is equal", recordsMatched); + } } diff --git a/src/e2e-test/java/io/cdap/plugin/bigquery/stepsdesign/ValidationHelperSqlEngine.java b/src/e2e-test/java/io/cdap/plugin/bigquery/stepsdesign/ValidationHelperSqlEngine.java new file mode 100644 index 0000000000..515d8e8011 --- /dev/null +++ b/src/e2e-test/java/io/cdap/plugin/bigquery/stepsdesign/ValidationHelperSqlEngine.java @@ -0,0 +1,112 @@ +/* + * Copyright © 2024 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package io.cdap.plugin.bigquery.stepsdesign; + +import com.esotericsoftware.minlog.Log; +import com.google.cloud.bigquery.FieldValueList; +import com.google.cloud.bigquery.TableResult; +import com.google.gson.Gson; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import io.cdap.e2e.utils.BigQueryClient; +import io.cdap.e2e.utils.PluginPropertyUtils; +import io.cucumber.core.logging.Logger; +import io.cucumber.core.logging.LoggerFactory; + +import java.io.BufferedReader; +import java.io.FileReader; +import java.io.IOException; +import java.net.URISyntaxException; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.HashMap; +import java.util.Map; + +/** + * Validation Helper. + */ +public class ValidationHelperSqlEngine { + + private static final Logger LOG = LoggerFactory.getLogger(ValidationHelperSqlEngine.class); + static Gson gson = new Gson(); + + /** + * Validates the actual data from a BigQuery table against the expected data from a file. + * + * @param table The name of the BigQuery table to fetch data from + * @param fileName The name of the file containing the expected data + * @return True if the actual data matches the expected data, otherwise false + */ + public static boolean validateActualDataToExpectedData(String table, String fileName) throws IOException, + InterruptedException, URISyntaxException { + // Initialize maps to store data from BigQuery and file + Map bigQueryMap = new HashMap<>(); + Map fileMap = new HashMap<>(); + // Get the path of the expected file + Path importExpectedFile = Paths.get(ValidationHelperSqlEngine.class.getResource("/" + fileName).toURI()); + + getBigQueryTableData(table, bigQueryMap); + getFileData(importExpectedFile.toString(), fileMap); + + // Compare the data from BigQuery with the data from the file + boolean isMatched = bigQueryMap.equals(fileMap); + + return isMatched; + } + + public static void getFileData(String fileName, Map fileMap) { + try (BufferedReader br = new BufferedReader(new FileReader(fileName))) { + String line; + while ((line = br.readLine()) != null) { + JsonObject json = gson.fromJson(line, JsonObject.class); + if (json.has("id")) { // Check if the JSON object has the "id" key + JsonElement idElement = json.get("id"); + if (idElement.isJsonPrimitive()) { + String idKey = idElement.getAsString(); + fileMap.put(idKey, json); + } else { + Log.error("ID key not found"); + } + } + } + } catch (IOException e) { + System.err.println("Error reading the file: " + e.getMessage()); + } + } + + private static void getBigQueryTableData(String targetTable, Map bigQueryMap) + throws IOException, InterruptedException { + String dataset = PluginPropertyUtils.pluginProp("dataset"); + String projectId = PluginPropertyUtils.pluginProp("projectId"); + String selectQuery = "SELECT TO_JSON(t) FROM `" + projectId + "." + dataset + "." + targetTable + "` AS t"; + TableResult result = BigQueryClient.getQueryResult(selectQuery); + + for (FieldValueList row : result.iterateAll()) { + JsonObject json = gson.fromJson(row.get(0).getStringValue(), JsonObject.class); + if (json.has("id")) { // Check if the JSON object has the "id" key + JsonElement idElement = json.get("id"); + if (idElement.isJsonPrimitive()) { + String idKey = idElement.getAsString(); + bigQueryMap.put(idKey, json); + } else { + LOG.error("Data Mismatched"); + } + } else { + LOG.error("ID Key not found in JSON object"); + } + } + } +} diff --git a/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java b/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java index 71ead8dd11..68196a8ad9 100644 --- a/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java +++ b/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java @@ -294,7 +294,7 @@ public static void createTempSourceBQTable() throws IOException, InterruptedExce @After(order = 1, value = "@BQ_SOURCE_TEST or @BQ_PARTITIONED_SOURCE_TEST or @BQ_SOURCE_DATATYPE_TEST or " + "@BQ_INSERT_SOURCE_TEST or @BQ_UPDATE_SINK_TEST or @BQ_EXISTING_SOURCE_TEST or @BQ_EXISTING_SINK_TEST or " + - "@BQ_EXISTING_SOURCE_DATATYPE_TEST or @BQ_EXISTING_SINK_DATATYPE_TEST") + "@BQ_EXISTING_SOURCE_DATATYPE_TEST or @BQ_EXISTING_SINK_DATATYPE_TEST or @BQ_SOURCE_SQLENGINE_TEST") public static void deleteTempSourceBQTable() throws IOException, InterruptedException { BigQueryClient.dropBqQuery(bqSourceTable); PluginPropertyUtils.removePluginProp("bqSourceTable"); @@ -1636,4 +1636,46 @@ public static void deleteTargetBqmtTable() throws IOException, InterruptedExcept } } } + @Before(order = 1, value = "@BQ_SOURCE_SQLENGINE_TEST") + public static void createSourceBQTableForSqlEngine() throws IOException, InterruptedException { + createSourceBQTableWithQueries(PluginPropertyUtils.pluginProp("bqCreateTableQueryFileSQL"), + PluginPropertyUtils.pluginProp("bqInsertDataQueryFileSQL")); + } + + @Before(order = 1, value = "@BQ_SOURCE_JOINER_TEST") + public static void createSourceBQTableForJoiner() throws IOException, InterruptedException { + createSourceBQTableWithQueries(PluginPropertyUtils.pluginProp("bqCreateTableQueryFileJoin"), + PluginPropertyUtils.pluginProp("bqInsertDataQueryFileJoin")); + } + + @Before(order = 1, value = "@BQ_SOURCE_JOINER2_TEST") + public static void createsecondBQTableJoin() throws IOException, InterruptedException { + bqSourceTable2 = "E2E_SOURCE_" + UUID.randomUUID().toString().replaceAll("-", "_"); + BigQueryClient.getSoleQueryResult("create table `" + datasetName + "." + bqSourceTable2 + "` " + + "(dept_id INT64, dept_name STRING)"); + try { + BigQueryClient.getSoleQueryResult("INSERT INTO `" + datasetName + "." + bqSourceTable2 + "` " + + "(dept_id, dept_name)" + + "VALUES (123, 'HR'), " + + "(125, 'IT')"); + } catch (NoSuchElementException e) { + // Insert query does not return any record. + // Iterator on TableResult values in getSoleQueryResult method throws NoSuchElementException + BeforeActions.scenario.write("Error inserting the record in the table" + e.getStackTrace()); + } + PluginPropertyUtils.addPluginProp("bqSourceTable2", bqSourceTable2); + BeforeActions.scenario.write("BQ source Table2 " + bqSourceTable2 + " created successfully"); + } + + @After(order = 1, value = "@BQ_DELETE_JOIN") + public static void deleteBQTablesJoin() throws IOException, InterruptedException { + BigQueryClient.dropBqQuery(bqSourceTable); + BigQueryClient.dropBqQuery(bqSourceTable2); + PluginPropertyUtils.removePluginProp("bqSourceTable"); + PluginPropertyUtils.removePluginProp("bqSourceTable2"); + BeforeActions.scenario.write("BQ source Table " + bqSourceTable + " deleted successfully"); + BeforeActions.scenario.write("BQ source Table2 " + bqSourceTable2 + " deleted successfully"); + bqSourceTable = StringUtils.EMPTY; + bqSourceTable2 = StringUtils.EMPTY; + } } diff --git a/src/e2e-test/java/io/cdap/plugin/groupby/actions/GroupByActions.java b/src/e2e-test/java/io/cdap/plugin/groupby/actions/GroupByActions.java new file mode 100644 index 0000000000..1872a263fe --- /dev/null +++ b/src/e2e-test/java/io/cdap/plugin/groupby/actions/GroupByActions.java @@ -0,0 +1,78 @@ +/* + * Copyright © 2024 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package io.cdap.plugin.groupby.actions; + +import io.cdap.e2e.pages.locators.CdfPluginPropertiesLocators; +import io.cdap.e2e.utils.ElementHelper; +import io.cdap.e2e.utils.JsonUtils; +import io.cdap.e2e.utils.PluginPropertyUtils; +import io.cdap.e2e.utils.SeleniumDriver; +import io.cdap.e2e.utils.SeleniumHelper; +import io.cdap.plugin.groupby.locators.GroupByLocators; +import io.cucumber.core.logging.Logger; +import io.cucumber.core.logging.LoggerFactory; +import org.openqa.selenium.ElementClickInterceptedException; + +import java.util.Map; + +/** + * GroupBy Related Actions. + */ +public class GroupByActions { + private static final Logger logger = LoggerFactory.getLogger(GroupByActions.class); + + static { + SeleniumHelper.getPropertiesLocators(GroupByLocators.class); + } + + /** + * Enters aggregate fields and their corresponding functions and aliases. + * + * @param jsonAggreegatesFields JSON string containing aggregate fields and their functions + * and aliases in key-value pairs + */ + public static void enterAggregates(String jsonAggreegatesFields) { + // Convert JSON string to a map of fields and their functions + Map fieldsMapping = + JsonUtils.convertKeyValueJsonArrayToMap(PluginPropertyUtils.pluginProp(jsonAggreegatesFields)); + int index = 0; + for (Map.Entry entry : fieldsMapping.entrySet()) { + // Enter field name + ElementHelper.sendKeys(GroupByLocators.field(index), entry.getKey().split("#")[0]); + ElementHelper.clickOnElement(GroupByLocators.fieldFunction(index)); + int attempts = 0; + while (attempts < 5) { + try { + ElementHelper.clickOnElement(SeleniumDriver.getDriver(). + findElement(CdfPluginPropertiesLocators.locateDropdownListItem + (entry.getKey().split("#")[1]))); + break; + } catch (ElementClickInterceptedException e) { + if (attempts == 4) { + throw e; + } + } + attempts++; + } + if (entry.getKey().split("#")[1].contains("If")) { + ElementHelper.sendKeys(GroupByLocators.fieldFunctionCondition(index), entry.getKey().split("#")[2]); + } + ElementHelper.sendKeys(GroupByLocators.fieldFunctionAlias(index), entry.getValue()); + ElementHelper.clickOnElement(GroupByLocators.fieldAddRowButton(index)); + index++; + } + } +} diff --git a/src/e2e-test/java/io/cdap/plugin/groupby/actions/package-info.java b/src/e2e-test/java/io/cdap/plugin/groupby/actions/package-info.java new file mode 100644 index 0000000000..ee3c8e2204 --- /dev/null +++ b/src/e2e-test/java/io/cdap/plugin/groupby/actions/package-info.java @@ -0,0 +1,4 @@ +/** + * Package contains the step actions for the groupby features. + */ +package io.cdap.plugin.groupby.actions; diff --git a/src/e2e-test/java/io/cdap/plugin/groupby/locators/GroupByLocators.java b/src/e2e-test/java/io/cdap/plugin/groupby/locators/GroupByLocators.java new file mode 100644 index 0000000000..8afea1abce --- /dev/null +++ b/src/e2e-test/java/io/cdap/plugin/groupby/locators/GroupByLocators.java @@ -0,0 +1,51 @@ +/* + * Copyright © 2024 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package io.cdap.plugin.groupby.locators; + +import io.cdap.e2e.utils.SeleniumDriver; +import org.openqa.selenium.By; +import org.openqa.selenium.WebElement; + +/** + * GroupBy Related Locators. + */ +public class GroupByLocators { + + public static WebElement field(int row) { + String xpath = "//div[@data-cy='aggregates']//div[@data-cy= '" + row + "']//input[@placeholder='field']"; + return SeleniumDriver.getDriver().findElement(By.xpath(xpath)); + } + + public static WebElement fieldFunction(int row) { + String xpath = "//div[@data-cy='aggregates']//div[@data-cy= '" + row + "']"; + return SeleniumDriver.getDriver().findElement(By.xpath(xpath)); + } + + public static WebElement fieldFunctionAlias(int row) { + String xpath = "//div[@data-cy='aggregates']//div[@data-cy= '" + row + "']//input[@placeholder='alias']"; + return SeleniumDriver.getDriver().findElement(By.xpath(xpath)); + } + + public static WebElement fieldAddRowButton(int row) { + String xpath = "//*[@data-cy='aggregates']//*[@data-cy='" + row + "']//button[@data-cy='add-row']"; + return SeleniumDriver.getDriver().findElement(By.xpath(xpath)); + } + + public static WebElement fieldFunctionCondition(int row) { + String xpath = "//div[@data-cy='aggregates']//div[@data-cy= '" + row + "']//input[@placeholder='condition']"; + return SeleniumDriver.getDriver().findElement(By.xpath(xpath)); + } +} diff --git a/src/e2e-test/java/io/cdap/plugin/groupby/locators/package-info.java b/src/e2e-test/java/io/cdap/plugin/groupby/locators/package-info.java new file mode 100644 index 0000000000..809ea77947 --- /dev/null +++ b/src/e2e-test/java/io/cdap/plugin/groupby/locators/package-info.java @@ -0,0 +1,4 @@ +/** + * Package contains the locators for the groupby features. + */ +package io.cdap.plugin.groupby.locators; diff --git a/src/e2e-test/java/io/cdap/plugin/groupby/stepsdesign/GroupBy.java b/src/e2e-test/java/io/cdap/plugin/groupby/stepsdesign/GroupBy.java new file mode 100644 index 0000000000..0f47477ac0 --- /dev/null +++ b/src/e2e-test/java/io/cdap/plugin/groupby/stepsdesign/GroupBy.java @@ -0,0 +1,31 @@ +/* + * Copyright © 2024 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +package io.cdap.plugin.groupby.stepsdesign; + +import io.cdap.plugin.groupby.actions.GroupByActions; +import io.cucumber.java.en.Then; + +/** + * GroupBy related common stepDesigns. + */ +public class GroupBy { + + @Then("Enter GroupBy plugin Fields to be Aggregate {string}") + public void enterGroupByPluginFieldsToBeAggregate(String jsonAggregateField) { + GroupByActions.enterAggregates(jsonAggregateField); + } +} diff --git a/src/e2e-test/java/io/cdap/plugin/groupby/stepsdesign/package-info.java b/src/e2e-test/java/io/cdap/plugin/groupby/stepsdesign/package-info.java new file mode 100644 index 0000000000..56cc4eb37e --- /dev/null +++ b/src/e2e-test/java/io/cdap/plugin/groupby/stepsdesign/package-info.java @@ -0,0 +1,4 @@ +/** + * This package contains stepDesigns for GroupBy Plugin. + */ +package io.cdap.plugin.groupby.stepsdesign; diff --git a/src/e2e-test/resources/pluginParameters.properties b/src/e2e-test/resources/pluginParameters.properties index e17200842b..371510fad8 100644 --- a/src/e2e-test/resources/pluginParameters.properties +++ b/src/e2e-test/resources/pluginParameters.properties @@ -1,6 +1,6 @@ projectId=cdf-athena datasetprojectId=cdf-athena -dataset=testing_bqmt +dataset=test_sqlengine wrongSourcePath=gs://00000000-e2e-0014a44f-81be-4501-8360-0ddca192492 serviceAccountType=filePath serviceAccount=auto-detect @@ -354,6 +354,20 @@ bqTargetTable=tabA bqTargetTable2=tabB ## BQMT-PLUGIN-PROPERTIES-END +## SQLENGINE-PLUGIN-PROPERTIES-START +bqCreateTableQueryFileSQL=testdata/BigQuery/BigQueryCreateTableSql.txt +bqInsertDataQueryFileSQL=testdata/BigQuery/BigQueryInsertTableSql.txt +groupByValidFirstField=name +groupByValidSecondField=id +DeduplicateValidFirstField=name +groupByGcsAggregateFields=[{"key":"name#Count","value":"namecount"}] +deduplicateTestOutputFile=testdata/BQValidationExpectedFiles/deduplicate_Test +groupByTestOutputFile=testdata/BQValidationExpectedFiles/groupby_Test +bqExpectedFileJoin=testdata/BQValidationExpectedFiles/bqexpected_Test +bqCreateTableQueryFileJoin=testdata/BigQuery/BigQueryCreateTableJoin.txt +bqInsertDataQueryFileJoin=testdata/BigQuery/BigQueryInsertDataJoin.txt +## SQLENGINE-PLUGIN-PROPERTIES-END + ##CLOUDBIGTABLE-PLUGIN-PROPERTIES-START cbtsourceMappings=[{"key":"cf1:boolean_column","value":"boolean_column"},{"key":"cf2:bytes_column","value":"bytes_column"},\ {"key":"cf1:double_column","value":"double_column"},\ diff --git a/src/e2e-test/resources/testdata/BQValidationExpectedFiles/bqexpected_Test b/src/e2e-test/resources/testdata/BQValidationExpectedFiles/bqexpected_Test new file mode 100644 index 0000000000..46285555ff --- /dev/null +++ b/src/e2e-test/resources/testdata/BQValidationExpectedFiles/bqexpected_Test @@ -0,0 +1,2 @@ +{"dept_id":null,"dept_name":null,"firstname":"joe","id":1,"lastname":"root"} +{"dept_id":null,"dept_name":null,"firstname":"douglas","id":2,"lastname":"john"} \ No newline at end of file diff --git a/src/e2e-test/resources/testdata/BQValidationExpectedFiles/deduplicate_Test b/src/e2e-test/resources/testdata/BQValidationExpectedFiles/deduplicate_Test new file mode 100644 index 0000000000..2e48460aab --- /dev/null +++ b/src/e2e-test/resources/testdata/BQValidationExpectedFiles/deduplicate_Test @@ -0,0 +1,2 @@ +{"address":"ppu","id":13,"name":"root"} +{"address":"ggn","id":12,"name":"joe"} \ No newline at end of file diff --git a/src/e2e-test/resources/testdata/BQValidationExpectedFiles/groupby_Test b/src/e2e-test/resources/testdata/BQValidationExpectedFiles/groupby_Test new file mode 100644 index 0000000000..580ed7cec0 --- /dev/null +++ b/src/e2e-test/resources/testdata/BQValidationExpectedFiles/groupby_Test @@ -0,0 +1,2 @@ +{"id":12,"name":"joe","namecount":2} +{"id":13,"name":"root","namecount":1} \ No newline at end of file diff --git a/src/e2e-test/resources/testdata/BigQuery/BigQueryCreateTableJoin.txt b/src/e2e-test/resources/testdata/BigQuery/BigQueryCreateTableJoin.txt new file mode 100644 index 0000000000..0655b5f4e0 --- /dev/null +++ b/src/e2e-test/resources/testdata/BigQuery/BigQueryCreateTableJoin.txt @@ -0,0 +1 @@ +create table `DATASET.TABLE_NAME` (id INT64, firstname STRING, lastname STRING ) \ No newline at end of file diff --git a/src/e2e-test/resources/testdata/BigQuery/BigQueryCreateTableSql.txt b/src/e2e-test/resources/testdata/BigQuery/BigQueryCreateTableSql.txt new file mode 100644 index 0000000000..1ca9c2e140 --- /dev/null +++ b/src/e2e-test/resources/testdata/BigQuery/BigQueryCreateTableSql.txt @@ -0,0 +1 @@ +create table `DATASET.TABLE_NAME` (name STRING, address STRING, id INTEGER ) \ No newline at end of file diff --git a/src/e2e-test/resources/testdata/BigQuery/BigQueryInsertDataJoin.txt b/src/e2e-test/resources/testdata/BigQuery/BigQueryInsertDataJoin.txt new file mode 100644 index 0000000000..d19cadf028 --- /dev/null +++ b/src/e2e-test/resources/testdata/BigQuery/BigQueryInsertDataJoin.txt @@ -0,0 +1,4 @@ +INSERT INTO DATASET.TABLE_NAME (id, firstname, lastname) +VALUES +(1,'joe','root'), +(2,'douglas','john'); diff --git a/src/e2e-test/resources/testdata/BigQuery/BigQueryInsertTableSql.txt b/src/e2e-test/resources/testdata/BigQuery/BigQueryInsertTableSql.txt new file mode 100644 index 0000000000..ea5e5b0a20 --- /dev/null +++ b/src/e2e-test/resources/testdata/BigQuery/BigQueryInsertTableSql.txt @@ -0,0 +1,5 @@ +INSERT INTO DATASET.TABLE_NAME (name, address, id) +VALUES +('joe', 'ggn', 12), +('root', 'ppu', 13), +('joe', 'ggn', 12); \ No newline at end of file