Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

NameSpaceAdmin feature design and Runtime scenarios addition. #11

Open
wants to merge 1 commit into
base: develop
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
105 changes: 105 additions & 0 deletions .github/workflows/e2e.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,105 @@
# Copyright © 2023 Cask Data, Inc.
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.

# This workflow will build a Java project with Maven
# For more information see: https://help.github.com/actions/language-and-framework-guides/building-and-testing-java-with-maven
# Note: Any changes to this workflow would be used only after merging into develop
name: Build e2e tests

on:
push:
branches: [ develop ]
pull_request:
branches: [ develop ]
types: [ opened, synchronize, reopened, labeled ]
workflow_dispatch:

jobs:
build:
runs-on: k8s-runner-e2e
# We allow builds:
# 1) When triggered manually
# 2) When it's a merge into a branch
# 3) For PRs that are labeled as build and
# - It's a code change
# - A build label was just added
# A bit complex, but prevents builds when other labels are manipulated
if: >
github.event_name == 'workflow_dispatch'
|| github.event_name == 'push'
|| (contains(github.event.pull_request.labels.*.name, 'build')
&& (github.event.action != 'labeled' || github.event.label.name == 'build')
)
strategy:
matrix:
module: [cdap-e2e-tests]
fail-fast: false

steps:
# Pinned 1.0.0 version
- uses: actions/checkout@v3
with:
path: plugin
submodules: 'recursive'
ref: ${{ github.event.workflow_run.head_sha }}

- uses: dorny/paths-filter@b2feaf19c27470162a626bd6fa8438ae5b263721
if: github.event_name != 'workflow_dispatch' && github.event_name != 'push'
id: filter
with:
working-directory: plugin
filters: |
e2e-test:
- '${{ matrix.module }}/**/e2e-test/**'

- name: Checkout e2e test repo
uses: actions/checkout@v3
with:
repository: cdapio/cdap-e2e-tests
path: e2e

- name: Cache
uses: actions/cache@v3
with:
path: ~/.m2/repository
key: ${{ runner.os }}-maven-${{ github.workflow }}-${{ hashFiles('**/pom.xml') }}
restore-keys: |
${{ runner.os }}-maven-${{ github.workflow }}

- name: Run required e2e tests
if: github.event_name != 'workflow_dispatch' && github.event_name != 'push' && steps.filter.outputs.e2e-test == 'false'
run: python3 e2e/src/main/scripts/run_e2e_test.py --module ${{ matrix.module }} --testRunner TestRunnerRequired.java

- name: Run all e2e tests
if: github.event_name == 'workflow_dispatch' || github.event_name == 'push' || steps.filter.outputs.e2e-test == 'true'
run: python3 e2e/src/main/scripts/run_e2e_test.py --module ${{ matrix.module }}

- name: Upload report
uses: actions/upload-artifact@v3
if: always()
with:
name: Cucumber report - ${{ matrix.module }}
path: ./**/target/cucumber-reports

- name: Upload debug files
uses: actions/upload-artifact@v3
if: always()
with:
name: Debug files - ${{ matrix.module }}
path: ./**/target/e2e-debug

- name: Upload files to GCS
uses: google-github-actions/upload-cloud-storage@v0
if: always()
with:
path: ./plugin
destination: e2e-tests-cucumber-reports/${{ github.event.repository.name }}/${{ github.ref }}
glob: '**/target/cucumber-reports/**'
Original file line number Diff line number Diff line change
@@ -0,0 +1,123 @@
#
# Copyright © 2023 Cask Data, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
#
@Namespaceadmin
Feature: NameSpaceAdmin - Validate nameSpace admin design time scenarios

@Namespaceadmin
Scenario:Verify user is able to click on the namespace admin tab and successfully navigates to the page
Given Open Datafusion Project to configure pipeline
When Click on the Hamburger bar on the left panel
Then Click on NameSpace Admin link from the menu
Then Verify if user successfully navigated to namespace admin page

@Namespaceadmin
Scenario:Validate user is able to open compute profile page and create a profile for selected a provisioner
Given Open Datafusion Project to configure pipeline
Then Click on the Hamburger bar on the left panel
Then Click on NameSpace Admin link from the menu
Then Click on create profile button for "default" Namespace
Then Select a provisioner: "remoteHadoopProvisioner" for the compute profile
Then Verify the Create a Profile page is loaded for selected provisioner
Then Enter input plugin property: "profileLabel" with value: "validProfile"
Then Enter textarea plugin property: "profileDescription" with value: "validDescription"
Then Enter input plugin property: "host" with value: "testHost"
Then Enter input plugin property: "user" with value: "testUser"
Then Enter textarea plugin property: "sshKey" with value: "testSSHKey"
Then Click on: "Create" button in the properties
Then Verify the created compute profile: "validProfile" is displayed in system compute profile list


@Namespaceadmin
Scenario: Validate user is able to create new namespace preferences and able to delete the added namespace preferences successfully
Given Open Datafusion Project to configure pipeline
Then Click on the Hamburger bar on the left panel
Then Click on NameSpace Admin link from the menu
Then Click "preferences" tab from Configuration page for "default" Namespace
Then Click on edit namespace preferences to set namespace preferences
Then Set namespace preferences with key: "keyValue" and value: "nameSpacePreferences1"
Then Click on the Save & Close preferences button
Then Click on edit namespace preferences to set namespace preferences
Then Delete the preferences
Then Click on the Save & Close preferences button

Scenario: Validate user is able to add multiple namespace preferences inside namespace admin successfully
Given Open Datafusion Project to configure pipeline
Then Click on the Hamburger bar on the left panel
Then Click on NameSpace Admin link from the menu
Then Click "preferences" tab from Configuration page for "default" Namespace
Then Click on edit namespace preferences to set namespace preferences
Then Set namespace preferences with key: "keyValue" and value: "nameSpacePreferences2"
Then Click on the Save & Close preferences button
Then Click on edit namespace preferences to set namespace preferences
Then Delete the preferences
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Instead of calling this method 2 times to delete a namespace is it possible to create a step which accepts the name of namespaces as an argument and delete the namespaces by iterating on them ?

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes Vipin , This logic we will try to implement in Phase 3 developement .

Then Delete the preferences
Then Click on the Save & Close preferences button

Scenario: Validate user is able reset the namespace preferences added inside namespace admin successfully
Given Open Datafusion Project to configure pipeline
Then Click on the Hamburger bar on the left panel
Then Click on NameSpace Admin link from the menu
Then Click "preferences" tab from Configuration page for "default" Namespace
Then Click on edit namespace preferences to set namespace preferences
Then Set namespace preferences with key: "keyValue" and value: "nameSpacePreferences1"
Then Reset the preferences
Then Verify the reset is successful for added preferences

Scenario: To verify the validation error message with invalid cluster name
Given Open Datafusion Project to configure pipeline
Then Click on the Hamburger bar on the left panel
Then Click on NameSpace Admin link from the menu
Then Click on create profile button for "default" Namespace
Then Select a provisioner: "existingDataProc" for the compute profile
Then Enter input plugin property: "profileLabel" with value: "validProfile"
Then Enter textarea plugin property: "profileDescription" with value: "validDescription"
Then Enter input plugin property: "clusterName" with value: "invalidClusterName"
Then Click on: "Create" button in the properties
Then Verify that the compute profile is displaying an error message: "errorInvalidClusterName" on the footer

Scenario:To verify the validation error message with invalid profile name
Given Open Datafusion Project to configure pipeline
Then Click on the Hamburger bar on the left panel
Then Click on NameSpace Admin link from the menu
Then Click on create profile button for "default" Namespace
Then Select a provisioner: "existingDataProc" for the compute profile
Then Enter input plugin property: "profileLabel" with value: "invalidProfile"
Then Enter textarea plugin property: "profileDescription" with value: "validDescription"
Then Enter input plugin property: "clusterName" with value: "validClusterName"
Then Click on: "Create" button in the properties
Then Verify that the compute profile is displaying an error message: "errorInvalidProfileName" on the footer

Scenario:To verify the validation error message with invalid namespace name
Given Open Datafusion Project to configure pipeline
Then Click on the Hamburger bar on the left panel
Then Click on Namespace dropdown button
Then Click on the Add Namespace tab
Then Enter the New Namespace Name with value: "invalidNamespaceName"
Then Enter the Namespace Description with value: "validNamespaceDescription"
Then Click on: "Finish" button in the properties
Then Verify the failed error message: "errorInvalidNamespace" displayed on dialog box

Scenario: Validate user is able to create new namespace from hamburger menu and switch to newly created namespace
Given Open Datafusion Project to configure pipeline
Then Click on the Hamburger bar on the left panel
Then Click on Namespace dropdown button
Then Click on the Add Namespace tab
Then Enter the New Namespace Name with value: "validNamespaceName"
Then Enter the Namespace Description with value: "validNamespaceDescription"
Then Click on: "Finish" button in the properties
Then Switch to the newly created Namespace
Then Click on the Hamburger bar on the left panel
Then Verify the namespace is switched to "validNamespaceName" successfully
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
#
# Copyright © 2023 Cask Data, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
#
@Namespaceadmin
Feature: NameSpaceAdmin - Validate nameSpace admin run time scenarios

@BQ_SOURCE_TEST @BQ_SINK_TEST
Scenario:To verify if user is able to run a pipeline successfully using the namespace preferences
Given Open Datafusion Project to configure pipeline
Then Click on the Hamburger bar on the left panel
Then Click on NameSpace Admin link from the menu
Then Click "preferences" tab from Configuration page for "default" Namespace
Then Click on edit namespace preferences to set namespace preferences
Then Set namespace preferences with key: "keyValue" and value: "nameSpacePreferences2"
Then Click on the Save & Close preferences button
Then Click on the Hamburger bar on the left panel
Then Select navigation item: "studio" from the Hamburger menu list
When Expand Plugin group in the LHS plugins list: "Source"
When Select plugin: "BigQuery" from the plugins list as: "Source"
When Expand Plugin group in the LHS plugins list: "Sink"
When Select plugin: "BigQuery" from the plugins list as: "Sink"
Then Connect plugins: "BigQuery" and "BigQuery2" to establish connection
Then Navigate to the properties page of plugin: "BigQuery"
Then Enter input plugin property: "referenceName" with value: "BQReferenceName"
Then Click on the Macro button of Property: "projectId" and set the value to: "projectId"
Then Click on the Macro button of Property: "datasetProjectId" and set the value to: "datasetprojectId"
Then Enter input plugin property: "dataset" with value: "dataset"
Then Enter input plugin property: "table" with value: "bqSourceTable"
Then Validate "BigQuery" plugin properties
Then Close the Plugin Properties page
Then Navigate to the properties page of plugin: "BigQuery2"
Then Enter input plugin property: "referenceName" with value: "BQReferenceName"
Then Click on the Macro button of Property: "projectId" and set the value to: "projectId"
Then Click on the Macro button of Property: "datasetProjectId" and set the value to: "datasetprojectId"
Then Enter input plugin property: "dataset" with value: "dataset"
Then Enter input plugin property: "table" with value: "bqSourceTable"
Then Validate "BigQuery" plugin properties
Then Close the Plugin Properties page
Then Save the pipeline
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We should also add Preview steps before deploying..

Copy link
Collaborator Author

@rahuldash171 rahuldash171 Oct 31, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We have not added the preview steps because we just try to execute the pipline by creating the preferences inside any of the system/namespace level admin module . The verification is to use the created preferences /connections inside properties of source/sink and see a pipline run successfully .

Then Deploy the pipeline
Then Run the Pipeline in Runtime
Then Wait till pipeline is in running state
Then Open and capture logs
Then Verify the pipeline status is "Succeeded"

@BQ_SOURCE_TEST @BQ_SINK_TEST
Scenario: To verify if user is able to create a connection from namespace admin and configure it for required plugins
Given Open Datafusion Project to configure pipeline
Then Click on the Hamburger bar on the left panel
Then Click on NameSpace Admin link from the menu
Then Click "connections" tab from Configuration page for "default" Namespace
Then Click on the Add Connection button
Then Add connection type as "bqConnection" and provide a "ConnectionName"
Then Click on the Test Connection button
Then Click on the Create button
Then Click on the Hamburger bar on the left panel
Then Select navigation item: "studio" from the Hamburger menu list
When Select plugin: "BigQuery" from the plugins list as: "Source"
When Expand Plugin group in the LHS plugins list: "Sink"
When Select plugin: "BigQuery" from the plugins list as: "Sink"
Then Connect plugins: "BigQuery" and "BigQuery2" to establish connection
Then Navigate to the properties page of plugin: "BigQuery"
Then Click plugin property: "switch-useConnection"
Then Click on the Browse Connections button
Then Select connection: "ConnectionName"
Then Enter input plugin property: "referenceName" with value: "BQReferenceName"
Then Click on the Browse button inside plugin properties
Then Click SELECT button inside connection data row with name: "dataset"
Then Wait till connection data loading completes with a timeout of 60 seconds
Then Enter input plugin property: "table" with value: "bqSourceTable"
Then Validate "BigQuery" plugin properties
Then Close the Plugin Properties page
Then Navigate to the properties page of plugin: "BigQuery2"
Then Click plugin property: "useConnection"
Then Click on the Browse Connections button
Then Select connection: "ConnectionName"
Then Enter input plugin property: "referenceName" with value: "BQSinkReferenceName"
Then Click on the Browse button inside plugin properties
Then Click SELECT button inside connection data row with name: "dataset"
Then Wait till connection data loading completes with a timeout of 60 seconds
Then Verify input plugin property: "dataset" contains value: "dataset"
Then Enter input plugin property: "table" with value: "bqTargetTable"
Then Click plugin property: "truncateTable"
Then Validate "BigQuery" plugin properties
Then Close the Plugin Properties page
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Add the preview steps here as well

Copy link
Collaborator Author

@rahuldash171 rahuldash171 Oct 31, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We have not added the preview steps because we just try to execute the pipline by creating the preferences inside any of the system/namespace level admin module . The verification is to use the created preferences /connections inside properties of source/sink and see a pipline run successfully .

Then Save the pipeline
Then Deploy the pipeline
Then Run the Pipeline in Runtime
Then Wait till pipeline is in running state
Then Open and capture logs
Then Verify the pipeline status is "Succeeded"
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
/*
* Copyright © 2023 Cask Data, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/

package io.cdap.cdap.namespaceadmin;

import io.cucumber.junit.Cucumber;
import io.cucumber.junit.CucumberOptions;
import org.junit.runner.RunWith;

/**
* Test Runner to execute namespace admin related test cases.
*/
@RunWith(Cucumber.class)
@CucumberOptions(
features = {"src/e2e-test/features"},
glue = {"io.cdap.cdap.stepsdesign", "stepsdesign"},
tags = {"@NameSpaceadmin"},
plugin = {"pretty", "html:target/cucumber-html-report/namespaceadmin",
"json:target/cucumber-reports/cucumber-namespaceadmin.json",
"junit:target/cucumber-reports/cucumber-namespaceadmin.xml"}
)
public class TestRunner {
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
/*
* Copyright © 2023 Cask Data, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/

/**
* Package contains the runners for nameSpace admin features.
*/
package io.cdap.cdap.namespaceadmin;
Loading