diff --git a/PyTest-KMS-HDFS/pytest.ini b/PyTest-KMS-HDFS/pytest.ini new file mode 100644 index 0000000000..c32b2c06df --- /dev/null +++ b/PyTest-KMS-HDFS/pytest.ini @@ -0,0 +1,4 @@ +[pytest] +markers = + cleanEZ: clean up the encryption zone + createEZ: create encryption zone \ No newline at end of file diff --git a/PyTest-KMS-HDFS/readme.md b/PyTest-KMS-HDFS/readme.md new file mode 100644 index 0000000000..09692d5bcd --- /dev/null +++ b/PyTest-KMS-HDFS/readme.md @@ -0,0 +1,66 @@ +# KMS API & HDFS Encryption Pytest Suite + + +This test suite validates REST API endpoints for KMS (Key Management Service) and tests HDFS encryption functionalities including key management and file operations within encryption zones. + +**test_kms :** contains test cases for checking KMS API functionality + +**test_hdfs :** contains test cases for checking KMS functionality through hdfs encryption lifecycle + +## ๐Ÿ“‚ Directory Structure + +``` +test_directory/ +โ”œโ”€โ”€ test_kms/ # Tests on KMS API + โ”œโ”€โ”€ test_keys.py # Key creation and key name validation + โ”œโ”€โ”€ test_keys_02.py # Extra test cases on key operation + โ”œโ”€โ”€ test_keyDetails.py # getKeyName, getKeyMetadata, getKeyVersion checks + โ”œโ”€โ”€ test_keyOps.py # Key operations: Roll-over, generate DEK, Decrypt EDEK + โ”œโ”€โ”€ test_keyOps_policy.py # validate key operation based on policy enforcement + โ”œโ”€โ”€ conftest.py # Reusable fixtures and setup + โ”œโ”€โ”€ utils.py # Utility methods + โ”œโ”€โ”€ readme.md +โ”œโ”€โ”€ test_hdfs/ # Tests on HDFS encryption cycle + โ”œโ”€โ”€ test_encryption.py # test file 1 + โ”œโ”€โ”€ test_encryption02.py # test file 2 + โ”œโ”€โ”€ test_encryption03.py # test file 3 + โ”œโ”€โ”€ test_config.py # stores all constants and HDFS commands + โ”œโ”€โ”€ conftest.py # sets up the environment + โ”œโ”€โ”€ readme.md + โ”œโ”€โ”€ utils.py # Utility methods + +โ”œโ”€โ”€ pytest.ini # Registers custom pytest markers +โ”œโ”€โ”€ requirements.txt +โ”œโ”€โ”€ README.md # This file +``` + +## โš™๏ธ Setup Instructions +Bring up KMS container and any dependent containers using Docker. + +Create a virtual environment and install the necessary packages through requirements.txt + +## Run test cases + +**Navigate to PyTest-KMS-HDFS directory** + +**to run tests in test_kms folder** +> pytest -vs test_kms/ + +to run with report included +> pytest -vs test_kms/ --html=kms-report.html + + +**to run tests in test_hdfs folder** + +> pytest -vs -k "test_encryption" +or +>pytest -vs test_hdfs/ + +to run with report included +>pytest -vs test_hdfs/ --html=hdfs-report.html + +๐Ÿ“Œ Notes + +Ensure Docker containers for KMS and HDFS are running before executing tests. + +Reports generated using --html can be viewed in any browser for detailed test results. diff --git a/PyTest-KMS-HDFS/requirements.txt b/PyTest-KMS-HDFS/requirements.txt new file mode 100644 index 0000000000..deec233bd6 --- /dev/null +++ b/PyTest-KMS-HDFS/requirements.txt @@ -0,0 +1,20 @@ +annotated-types==0.7.0 +certifi==2025.1.31 +charset-normalizer==3.4.1 +docker==7.1.0 +idna==3.10 +iniconfig==2.0.0 +Jinja2==3.1.6 +MarkupSafe==3.0.2 +packaging==24.2 +pluggy==1.5.0 +pydantic==2.11.0 +pydantic_core==2.33.0 +pytest==8.3.5 +pytest-html==4.1.1 +pytest-metadata==3.1.1 +python-on-whales==0.76.1 +requests==2.32.3 +typing-inspection==0.4.0 +typing_extensions==4.13.0 +urllib3==2.3.0 diff --git a/PyTest-KMS-HDFS/test_hdfs/conftest.py b/PyTest-KMS-HDFS/test_hdfs/conftest.py new file mode 100644 index 0000000000..2c1df19317 --- /dev/null +++ b/PyTest-KMS-HDFS/test_hdfs/conftest.py @@ -0,0 +1,103 @@ +import docker +import pytest +import time +from test_config import (HADOOP_CONTAINER, HDFS_USER,KMS_PROPERTY,CORE_SITE_XML_PATH,SET_PATH_CMD) + +# Setup Docker Client +client = docker.from_env() + +@pytest.fixture(scope="module") +def hadoop_container(): + container = client.containers.get(HADOOP_CONTAINER) #to get hadoop container instance + return container + +# polling method to wait until container gets restarted +def wait_for_hdfs(container, user='hdfs', timeout=30, interval=2): + + print("Waiting for HDFS to become available...") + start_time = time.time() + + while time.time() - start_time < timeout: + exit_code, _ = container.exec_run("hdfs dfs -ls /", user=user) + if exit_code == 0: + print("HDFS is ready.") + return True + else: + print("โณ HDFS not ready yet, retrying...") + time.sleep(interval) + + raise TimeoutError("HDFS did not become ready within the timeout period.") + + +def configure_kms_property(hadoop_container): + # Check if KMS property already exists + check_cmd = f"grep 'hadoop.security.key.provider.path' {CORE_SITE_XML_PATH}" + exit_code, _ = hadoop_container.exec_run(check_cmd, user='root') + + if exit_code != 0: + # Insert KMS property + insert_cmd = f"sed -i '/<\\/configuration>/i {KMS_PROPERTY}' {CORE_SITE_XML_PATH}" + exit_code, output = hadoop_container.exec_run(insert_cmd, user='root') + print(f"KMS property inserted. Exit code: {exit_code}") + + # Debug: Show updated file + cat_cmd = f"cat {CORE_SITE_XML_PATH}" + _, file_content = hadoop_container.exec_run(cat_cmd, user='root') + print("Updated core-site.xml:\n", file_content.decode()) + + # Restart the container to apply the config changes + print("Restarting Hadoop container to apply changes...") + hadoop_container.restart() + wait_for_hdfs(hadoop_container, user=HDFS_USER) # Wait for container to fully restart + # time.sleep(10) + print("Hadoop container restarted and ready.") + + else: + print("KMS provider already present. No need to update config.") + + # # Leave safe mode if active + # print("Exiting safe mode (if active)...") + # leave_safe_mode_cmd = "hdfs dfsadmin -safemode leave" + # exit_code, output = hadoop_container.exec_run(leave_safe_mode_cmd, user=HDFS_USER) + # print(output.decode()) # For debugging + + +def ensure_user_exists(hadoop_container, username): + # Ensure keyadmin user exists + print("Ensuring keyadmin user exists...") + user_check_cmd = f"id -u {username}" + exit_code, _ = hadoop_container.exec_run(user_check_cmd, user='root') + + if exit_code != 0: + # Create the keyadmin user if not already present + create_user_cmd = f"useradd {username}" + exit_code, output = hadoop_container.exec_run(create_user_cmd, user='root') + print(f"keyadmin user created. Exit code: {exit_code}") + + # Assign necessary permissions to the user + assign_permissions_cmd = f"usermod -aG hadoop {username}" + exit_code, output = hadoop_container.exec_run(assign_permissions_cmd, user='root') + print(f"Permissions assigned to keyadmin. Exit code: {exit_code}") + else: + print("keyadmin user already exists. No need to create.") + + + +# Automatically setup environment before tests run +@pytest.fixture(scope="module", autouse=True) +def setup_environment(hadoop_container): + + set_path_cmd = SET_PATH_CMD + hadoop_container.exec_run(set_path_cmd, user='root') + + configure_kms_property(hadoop_container) + ensure_user_exists(hadoop_container,"keyadmin") + + # Exit Safe Mode + print("Exiting HDFS Safe Mode...") + hadoop_container.exec_run("hdfs dfsadmin -safemode leave", user=HDFS_USER) + + yield # Run tests + + # Post-test cleanup + print("Tests completed.") diff --git a/PyTest-KMS-HDFS/test_hdfs/readme.md b/PyTest-KMS-HDFS/test_hdfs/readme.md new file mode 100644 index 0000000000..4c40140b64 --- /dev/null +++ b/PyTest-KMS-HDFS/test_hdfs/readme.md @@ -0,0 +1,107 @@ +# This is the main directory for testing HDFS encryption cycle + +## Structure +``` +test_hdfs/ +โ”œโ”€โ”€ test_encryption.py +โ”œโ”€โ”€ test_encryption02.py +โ”œโ”€โ”€ test_encryption03.py +โ”œโ”€โ”€ test_config.py #stores all constants and HDFS commands +โ”œโ”€โ”€ conftest.py #sets up the environment +โ”œโ”€โ”€ utils.py #utility methods + +``` + +--- + +## Extra Features + +- **Markers:** + Markers have been used to selectively run specific test cases, improving test efficiency and organization. + +--- + +### `setup_environment` + +Handled in `Conftest.py` file +Before running the test cases, some environment configurations are needed: +- HDFS must communicate with KMS to fetch key details. +- Specific KMS properties are added to the `core-site.xml` file. +- Containers are restarted to apply the changes effectively. + +--- + +### Utility Methods + +- **get_error_logs:** + Fetches logs from both KMS and HDFS containers. Helps in identifying issues when errors or exceptions occur during testing. + +- **run_command:** + Executes all necessary HDFS commands inside the containers. + +--- + +## `test_encryption.py` + +Handles the **full HDFS encryption cycle**, including setup, positive and negative test scenarios, and cleanup. + +### Main Highlights: +- Encryption Zone (EZ) creation in HDFS. +- Granting permissions to specific users for read/write operations within the EZ. +- Validating read/write attempts by unauthorized users inside the EZ. + + +## Test Cases + +### โœ… Positive Test Cases + +1. **test_create_key:** + Creates an Encryption Zone (EZ) Key which is required to create an EZ. + +2. **test_create_encryption_zone:** + Creates an Encryption Zone (EZ) using an existing EZ key. + +3. **test_grant_permissions:** + Grants read-write permissions to a specific user (e.g., HIVE) within the EZ. + +4. **test_hive_user_write_read:** + Performs write and read operations inside the EZ using the authorized HIVE user. + +--- + +### โŒ Negative Test Cases + +1. **test_unauthorized_write:** + Attempts to write inside the EZ using an unauthorized user (e.g., HBASE). Validates expected denial of access. + +2. **test_unauthorized_read:** + Attempts to read inside the EZ using an unauthorized user. Validates expected denial of access. + +--- + +### ๐Ÿงน Cleanup + +- **test_cleanup:** + Cleans up the Encryption Zone and all files created during testing. + Deletes the EZ key created earlier. + Ensures the test environment is reset for clean re-runs. + +--- + +## `test_encryption02.py` + +Handles the **Check if after key roll over old files can be read or not** + **Check if after key roll over new files can be written and read too** + **Check read operation on file after key deletion** + +--- + +## `test_encryption03.py` + +Handles the **Test case on cross Encryption zone operations** + + + +## Summary + +This test suite ensures that **HDFS encryption and access control mechanisms** function as expected, validating both authorized and unauthorized access scenarios while maintaining a clean and reusable test environment. diff --git a/PyTest-KMS-HDFS/test_hdfs/test_config.py b/PyTest-KMS-HDFS/test_hdfs/test_config.py new file mode 100644 index 0000000000..d088ae91cb --- /dev/null +++ b/PyTest-KMS-HDFS/test_hdfs/test_config.py @@ -0,0 +1,79 @@ + +##Contains all constant values regarding USER, PATH, HDFS Commands---------------------- + + +HDFS_USER = "hdfs" +HIVE_USER = "hive" +HBASE_USER= "hbase" +KEY_ADMIN="keyadmin" +HEADERS={"Content-Type": "application/json","Accept":"application/json"} +PARAMS={"user.name":"keyadmin"} +BASE_URL="http://localhost:9292/kms/v1" +HADOOP_CONTAINER = "ranger-hadoop" +HDFS_USER = "hdfs" +KMS_CONTAINER = "ranger-kms" + +#KMS configs that needs to be added in XML file------------add more if needed +KMS_PROPERTY = """hadoop.security.key.provider.pathkms://http@host.docker.internal:9292/kms""" + +CORE_SITE_XML_PATH = "/opt/hadoop/etc/hadoop/core-site.xml" + + # Ensure PATH is set for /opt/hadoop/bin +SET_PATH_CMD="echo 'export PATH=/opt/hadoop/bin:$PATH' >> /etc/profile && export PATH=/opt/hadoop/bin:$PATH" + +HADOOP_NAMENODE_LOG_PATH="/opt/hadoop/logs/hadoop-hdfs-namenode-ranger-hadoop.example.com.log" + +KMS_LOG_PATH="/var/log/ranger/kms/ranger-kms-ranger-kms.example.com-root.log" + + +# HDFS Commands---------------------------------------------------- +CREATE_KEY_COMMAND = "hadoop key create {key_name} -size 128 -provider kms://http@host.docker.internal:9292/kms" + +VALIDATE_KEY_COMMAND = "hadoop key list -provider kms://http@host.docker.internal:9292/kms" + +CREATE_EZ_COMMANDS = [ + "hdfs dfs -mkdir /{ez_name}", + "hdfs crypto -createZone -keyName {key_name} -path /{ez_name}", + "hdfs crypto -listZones" +] + +GRANT_PERMISSIONS_COMMANDS = [ + "hdfs dfs -chmod -R 700 /{ez_name}", + "hdfs dfs -chown -R {user}:{user} /{ez_name}" +] + +CREATE_FILE_COMMAND = [ 'echo "{filecontent}" > /home/{user}/{filename}.txt && ls -l /home/{user}/{filename}.txt' ] + +ACTIONS_COMMANDS = [ + "hdfs dfs -put /home/{user}/{filename}.txt /{ez_name}/", + "hdfs dfs -ls /{ez_name}/", + "hdfs dfs -cat /{ez_name}/{filename}.txt" +] + +CROSS_EZ_ACTION_COMMANDS = [ + "hdfs dfs -put /home/{user}/{filename}.txt /{ez_name}/{dirname}/", + "hdfs dfs -ls /{ez_name}/", + "hdfs dfs -cat /{ez_name}/{dirname}/{filename}.txt" +] + +READ_EZ_FILE=[ + "hdfs dfs -cat /{ez_name}/{filename}.txt" +] + +UNAUTHORIZED_WRITE_COMMAND = 'hdfs dfs -put /home/{user}/{filename}.txt /{ez_name}/' + +UNAUTHORIZED_READ_COMMAND = "hdfs dfs -cat /{ez_name}/{filename}.txt" + +CLEANUP_COMMANDS = [ + "hdfs dfs -rm /{ez_name}/{filename}.txt", + "hdfs dfs -rm -R /{ez_name}" +] +CLEANUP_EZ = [ + "hdfs dfs -rm -R /{ez_name}" +] +CLEANUP_EZ_FILE = [ + "hdfs dfs -rm /{ez_name}/{filename}.txt" +] +KEY_DELETION_CMD = "bash -c \"echo 'Y' | hadoop key delete {key_name} -provider kms://http@host.docker.internal:9292/kms\"" + + diff --git a/PyTest-KMS-HDFS/test_hdfs/test_encryption.py b/PyTest-KMS-HDFS/test_hdfs/test_encryption.py new file mode 100644 index 0000000000..f139902ad0 --- /dev/null +++ b/PyTest-KMS-HDFS/test_hdfs/test_encryption.py @@ -0,0 +1,129 @@ +import pytest +from utils import run_command,get_error_logs +from test_config import (HDFS_USER,HIVE_USER,HBASE_USER,KEY_ADMIN, + CREATE_KEY_COMMAND, VALIDATE_KEY_COMMAND, CREATE_EZ_COMMANDS,GRANT_PERMISSIONS_COMMANDS, + UNAUTHORIZED_WRITE_COMMAND, ACTIONS_COMMANDS, + UNAUTHORIZED_READ_COMMAND,KEY_DELETION_CMD, + CLEANUP_COMMANDS,CREATE_FILE_COMMAND) + +key_name="hdfs-key" +ez_name="secure_zone" +filename="hdfs-test-file" +filecontent="Welcome to hdfs encryption" + +#EZ key creation before creating an EZ--------------------- +def test_create_key(hadoop_container): + + create_key_cmd= CREATE_KEY_COMMAND.format(key_name=key_name) + # Run the command as keyadmin user + output = run_command(hadoop_container,create_key_cmd, KEY_ADMIN) + print("Key Creation Output:", output) + + # Validate if the key was created successfully + validation_output = run_command(hadoop_container, VALIDATE_KEY_COMMAND, KEY_ADMIN) + + print("Key List Output:", validation_output) + + # Check if key is present + if key_name not in validation_output: + error_logs = get_error_logs() # Fetch logs on failure + pytest.fail(f"Key creation failed. Logs:\n{error_logs}") + + + +# Create Encryption Zone ---------------------------------------------------- +@pytest.mark.createEZ +def test_create_encryption_zone(hadoop_container): + + create_ez_commands = [cmd.format(ez_name=ez_name, key_name=key_name) for cmd in CREATE_EZ_COMMANDS] + + for cmd in create_ez_commands: + output = run_command(hadoop_container, cmd, HDFS_USER) + print(output) + + +#Grant Permissions to 'Hive' User -------------------------------------------- +def test_grant_permissions(hadoop_container): + grant_permission_commands= [cmd.format(ez_name=ez_name, user=HIVE_USER) for cmd in GRANT_PERMISSIONS_COMMANDS] + + for cmd in grant_permission_commands: + output = run_command(hadoop_container,cmd,HDFS_USER) + print(output) + +#testing read write permission for hive user----------------------------------------- +def test_hive_user_write_read(hadoop_container): + + #create file as 'hive' user + create_file_cmd = [cmd.format( + filename=filename, + filecontent=filecontent, + user=HIVE_USER + ) for cmd in CREATE_FILE_COMMAND] + + run_command(hadoop_container, ["bash", "-c", create_file_cmd[0]], HIVE_USER) + + #read-write using 'hive' user + read_write_cmd= [cmd.format(filename=filename, ez_name=ez_name, user=HIVE_USER) for cmd in ACTIONS_COMMANDS] + for cmd in read_write_cmd: + run_command(hadoop_container,cmd,HIVE_USER) + + +#Negative Test - Unauthorized User Cannot Write 'HBASE'------------------------------ +def test_unauthorized_write(hadoop_container): + + filename2="hdfs-test-file2" #writing new file into EZ + failure_detected = False + + unauth_write_cmd= UNAUTHORIZED_WRITE_COMMAND.format(filename=filename2,user=HBASE_USER,ez_name=ez_name) + output,exit_code= run_command(hadoop_container,unauth_write_cmd,HBASE_USER,fail_on_error=False,return_exit_code=True) + + print(f"Command Output:\n{output}") + + # Check for known failure indicators in output + if exit_code != 0: + failure_detected = True + + #assert that failure was detected as expected + assert failure_detected, "Expected failure due to no permission on EZ, but command succeeded." + + #if want to fail for report purpose run_command(hadoop_container,unauth_write_cmd,HBASE_USER) + +# Negative Test - Unauthorized User 'HBASE' Cannot Read ---------------------------------- +def test_unauthorized_read(hadoop_container): + + unauth_read= UNAUTHORIZED_READ_COMMAND.format(filename=filename, ez_name=ez_name, user=HBASE_USER) + output,exit_code = run_command(hadoop_container,unauth_read,HBASE_USER,fail_on_error=False,return_exit_code=True) + + print(f"Command Output:\n{output}") + + # Check for known failure indicators in output + if exit_code != 0: + failure_detected = True + + #assert that failure was detected as expected + assert failure_detected, "Expected failure due to no permission on EZ, but command succeeded." + + #run_command(hadoop_container,unauth_read,HBASE_USER) + + +# Clean Up - Remove Test file and EZ +@pytest.mark.cleanEZ +def test_cleanup(hadoop_container): + + cleanup_cmd=[cmd.format(filename=filename, ez_name=ez_name) for cmd in CLEANUP_COMMANDS] + for cmd in cleanup_cmd: + output=run_command(hadoop_container,cmd,HDFS_USER) + + print(output) + + #clean EZ key + key_deletion_cmd=KEY_DELETION_CMD.format(key_name=key_name) + output=run_command(hadoop_container,key_deletion_cmd,KEY_ADMIN) + print(output) + + + + + + + diff --git a/PyTest-KMS-HDFS/test_hdfs/test_encryption02.py b/PyTest-KMS-HDFS/test_hdfs/test_encryption02.py new file mode 100644 index 0000000000..6f54a4da72 --- /dev/null +++ b/PyTest-KMS-HDFS/test_hdfs/test_encryption02.py @@ -0,0 +1,242 @@ +import pytest +import requests +from utils import run_command,get_error_logs +from test_config import (HDFS_USER,HIVE_USER,HEADERS,PARAMS,BASE_URL, + CREATE_EZ_COMMANDS ,GRANT_PERMISSIONS_COMMANDS, + CREATE_FILE_COMMAND, ACTIONS_COMMANDS,READ_EZ_FILE, + CLEANUP_COMMANDS) + +# ****** ********************Test Case 01 ******************************************** +# ***** Check if after key roll over old files can be read or not +# *********************************************************************************** +def test_read_old_file_after_rollover(hadoop_container): + + key_name="test-key1" + ez_name = "secure_zone1" + filename="testfile1" + filecontent="Hello Human" + + + #create EZ key------- + key_data={ + "name":key_name + } + response=requests.post(f"{BASE_URL}/keys",json=key_data,params=PARAMS,headers=HEADERS) + assert response.status_code == 201, f"Key creation failed: {response.text}" + + # create EZ ------------ + create_ez_commands = [cmd.format(ez_name=ez_name, key_name=key_name) for cmd in CREATE_EZ_COMMANDS] + + for cmd in create_ez_commands: + output = run_command(hadoop_container, cmd, HDFS_USER) + print(output) + + #grant permissions for 'hive' user------------ + grant_permission_commands= [cmd.format(ez_name=ez_name, user=HIVE_USER) for cmd in GRANT_PERMISSIONS_COMMANDS] + + for cmd in grant_permission_commands: + output = run_command(hadoop_container,cmd,HDFS_USER) + print(output) + + #create file as 'hive' user------- + create_file_cmd = [cmd.format( + filename=filename, + filecontent=filecontent, + user=HIVE_USER + ) for cmd in CREATE_FILE_COMMAND] + + run_command(hadoop_container, ["bash", "-c", create_file_cmd[0]], HIVE_USER) + + #read-write using 'hive' user------- + read_write_cmd= [cmd.format(filename=filename, ez_name=ez_name, user=HIVE_USER) for cmd in ACTIONS_COMMANDS] + for cmd in read_write_cmd: + run_command(hadoop_container,cmd,HIVE_USER) + + + #roll-over of key--------- + response=requests.post(f"{BASE_URL}/key/{key_name}", json={}, headers=HEADERS, params=PARAMS) + assert response.status_code == 200, f"Key roll over failed: {response.text}" + + + #read same file after roll over--------- + read_ez_file=[cmd.format(filename=filename, ez_name=ez_name) for cmd in READ_EZ_FILE] + for cmd in read_ez_file: + run_command(hadoop_container,cmd,HIVE_USER) + + #cleanup EZ and EZ file-------- + cleanup_cmd=[cmd.format(filename=filename, ez_name=ez_name) for cmd in CLEANUP_COMMANDS] + for cmd in cleanup_cmd: + run_command(hadoop_container,cmd,HDFS_USER) + + #delete EZ key ---------- + delete_output2=requests.delete(f"{BASE_URL}/key/{key_name}", params=PARAMS) + print(delete_output2) + + + + # #key creation through hadoop command + # output = run_command(hadoop_container,CREATE_KEY_COMMAND, KEY_ADMIN) + # print("Key Creation Output:", output) + + # delete_output=run_command(hadoop_container,KEY_DELETION_CMD,KEY_ADMIN) + # print(delete_output) + + +# ****** ********************Test Case 02 ******************************************** +# ***** Check if after key roll over new files can be written and read too +# *********************************************************************************** +def test_writeAndRead_Newfile_after_rollover(hadoop_container): + + key_name="test-key2" + ez_name = "secure_zone1" + filename="testfile2" + filename2="testfile3" + filecontent="Hello Robot" + filecontent2="Hello Second Robo" + + + #create EZ key------- + key_data={ + "name":key_name + } + response=requests.post(f"{BASE_URL}/keys",json=key_data,params=PARAMS,headers=HEADERS) + assert response.status_code == 201, f"Key creation failed: {response.text}" + + # create EZ ------------ + create_ez_commands = [cmd.format(ez_name=ez_name, key_name=key_name) for cmd in CREATE_EZ_COMMANDS] + + for cmd in create_ez_commands: + output = run_command(hadoop_container, cmd, HDFS_USER) + print(output) + + #grant permissions for 'hive' user------------ + grant_permission_commands= [cmd.format(ez_name=ez_name, user=HIVE_USER) for cmd in GRANT_PERMISSIONS_COMMANDS] + + for cmd in grant_permission_commands: + output = run_command(hadoop_container,cmd,HDFS_USER) + print(output) + + #create file in EZ as 'hive' user------- + create_file_cmd = [cmd.format( + filename=filename, + filecontent=filecontent, + user=HIVE_USER + ) for cmd in CREATE_FILE_COMMAND] + + run_command(hadoop_container, ["bash", "-c", create_file_cmd[0]], HIVE_USER) + + #read-write using 'hive' user------- + read_write_cmd= [cmd.format(filename=filename, ez_name=ez_name, user=HIVE_USER) for cmd in ACTIONS_COMMANDS] + for cmd in read_write_cmd: + output=run_command(hadoop_container,cmd,HIVE_USER) + print(output) + + + #roll-over of key--------- + response=requests.post(f"{BASE_URL}/key/{key_name}", json={}, headers=HEADERS, params=PARAMS) + assert response.status_code == 200, f"Key roll over failed: {response.text}" + + + #write new file after rollover + create_file_cmd = [cmd.format( + filename=filename2, + filecontent=filecontent2, + user=HIVE_USER + ) for cmd in CREATE_FILE_COMMAND] + + run_command(hadoop_container, ["bash", "-c", create_file_cmd[0]], HIVE_USER) + + + #read-write new file now + read_write_cmd= [cmd.format(filename=filename2, ez_name=ez_name, user=HIVE_USER) for cmd in ACTIONS_COMMANDS] + for cmd in read_write_cmd: + output=run_command(hadoop_container,cmd,HIVE_USER) + print(output) + + + #cleanup EZ and EZ file-------- + cleanup_cmd=[cmd.format(filename=filename, ez_name=ez_name) for cmd in CLEANUP_COMMANDS] + for cmd in cleanup_cmd: + run_command(hadoop_container,cmd,HDFS_USER) + + #delete EZ key ---------- + delete_output2=requests.delete(f"{BASE_URL}/key/{key_name}", params=PARAMS) + print(delete_output2) + + +# ****** ********************Test Case 03 ******************************************** +# ***** Check read operation on file after key deletion +# *********************************************************************************** +def test_Readfile_after_keyDeletion(hadoop_container): + + key_name="test-key3" + ez_name = "secure_zone1" + filename="testfile4" + filename2="testfile5" + filecontent="You are reading it before key deletion" + filecontent2="You can't read me" + + + #create EZ key------- + key_data={ + "name":key_name + } + response=requests.post(f"{BASE_URL}/keys",json=key_data,params=PARAMS,headers=HEADERS) + assert response.status_code == 201, f"Key creation failed: {response.text}" + + # create EZ ------------ + create_ez_commands = [cmd.format(ez_name=ez_name, key_name=key_name) for cmd in CREATE_EZ_COMMANDS] + + for cmd in create_ez_commands: + output = run_command(hadoop_container, cmd, HDFS_USER) + print(output) + + #grant permissions for 'hive' user------------ + grant_permission_commands= [cmd.format(ez_name=ez_name, user=HIVE_USER) for cmd in GRANT_PERMISSIONS_COMMANDS] + + for cmd in grant_permission_commands: + output = run_command(hadoop_container,cmd,HDFS_USER) + print(output) + + #create file in EZ as 'hive' user------- + create_file_cmd = [cmd.format( + filename=filename, + filecontent=filecontent, + user=HIVE_USER + ) for cmd in CREATE_FILE_COMMAND] + + run_command(hadoop_container, ["bash", "-c", create_file_cmd[0]], HIVE_USER) + + #read-write using 'hive' user------- + read_write_cmd= [cmd.format(filename=filename, ez_name=ez_name, user=HIVE_USER) for cmd in ACTIONS_COMMANDS] + for cmd in read_write_cmd: + output=run_command(hadoop_container,cmd,HIVE_USER) + print(output) + + + #delete EZ key ---------- + delete_output2=requests.delete(f"{BASE_URL}/key/{key_name}", params=PARAMS) + print(delete_output2) + + + #read-write file after key deletion -------------- + read_write_cmd= [cmd.format(filename=filename, ez_name=ez_name, user=HIVE_USER) for cmd in READ_EZ_FILE] + failure_detected = False + + for cmd in read_write_cmd: + output = run_command(hadoop_container, cmd, HIVE_USER, fail_on_error=False) + print(f"Command Output:\n{output}") + + # Check for known failure indicators in output + if any(err in output.lower() for err in ["error", "exception", "failed", "not found"]): + failure_detected = True + + #assert that failure was detected as expected + assert failure_detected, "Expected failure due to deleted EZ key, but command succeeded." + + + #cleanup EZ and EZ file-------- + cleanup_cmd=[cmd.format(filename=filename, ez_name=ez_name) for cmd in CLEANUP_COMMANDS] + for cmd in cleanup_cmd: + run_command(hadoop_container,cmd,HDFS_USER) + diff --git a/PyTest-KMS-HDFS/test_hdfs/test_encryption03.py b/PyTest-KMS-HDFS/test_hdfs/test_encryption03.py new file mode 100644 index 0000000000..87e6e88a21 --- /dev/null +++ b/PyTest-KMS-HDFS/test_hdfs/test_encryption03.py @@ -0,0 +1,125 @@ +import pytest +import requests +from utils import run_command,get_error_logs +from test_config import (HDFS_USER,HIVE_USER,HEADERS,PARAMS,BASE_URL, + CREATE_EZ_COMMANDS ,GRANT_PERMISSIONS_COMMANDS, + CREATE_FILE_COMMAND, ACTIONS_COMMANDS,READ_EZ_FILE, + CLEANUP_COMMANDS,CROSS_EZ_ACTION_COMMANDS,CLEANUP_EZ) + + + + +# ****** ********************Test Case 01 ******************************************** +# ***** Cross EZ operation where one user has given access to one EZ and does operation on that zone and another second zone where he has no permission +# *********************************************************************************** +def test_cross_EZ_operations(hadoop_container): + + key_name="cross-key" + key_name2="cross-key2" + + ez_name = "secure_zone1" + ez_name2 = "secure_zone2" + + filename="testfile1" + filecontent="Cross operation on Encryption zone" + + dirname="dir1" + dirname2="dir2" + + + #create 2 EZ key------- + key_data1={ + "name":key_name + } + key_data2={ + "name":key_name2 + } + response=requests.post(f"{BASE_URL}/keys",json=key_data1,params=PARAMS,headers=HEADERS) + assert response.status_code == 201, f"Key creation failed: {response.text}" + + response2=requests.post(f"{BASE_URL}/keys",json=key_data2,params=PARAMS,headers=HEADERS) + assert response2.status_code == 201, f"Key creation failed: {response2.text}" + + # create 2 EZ ------------ + create_ez_commands = [cmd.format(ez_name=ez_name, key_name=key_name) for cmd in CREATE_EZ_COMMANDS] + + for cmd in create_ez_commands: + output = run_command(hadoop_container, cmd, HDFS_USER) + print(output) + + create_ez_commands = [cmd.format(ez_name=ez_name2, key_name=key_name2) for cmd in CREATE_EZ_COMMANDS] + + for cmd in create_ez_commands: + output = run_command(hadoop_container, cmd, HDFS_USER) + print(output) + + # Create the subdirectories inside the encryption zone as HDFS user + create_dirs_cmds = [ + f"hdfs dfs -mkdir -p /{ez_name}/{dirname}", + f"hdfs dfs -mkdir -p /{ez_name}/{dirname2}" + ] + for cmd in create_dirs_cmds: + run_command(hadoop_container, cmd, HDFS_USER) + + + #grant permissions for 'hive' user on 1st EZ------------ + grant_permission_commands= [cmd.format(ez_name=ez_name, user=HIVE_USER) for cmd in GRANT_PERMISSIONS_COMMANDS] + + for cmd in grant_permission_commands: + output = run_command(hadoop_container,cmd,HDFS_USER) + print(output) + + #create file as 'hive' user------- + create_file_cmd = [cmd.format( + filename=filename, + filecontent=filecontent, + user=HIVE_USER + ) for cmd in CREATE_FILE_COMMAND] + + run_command(hadoop_container, ["bash", "-c", create_file_cmd[0]], HIVE_USER) + + + #write it to dir1 in EZ1 using 'hive' user and read it ------- + read_write_cmd= [cmd.format(filename=filename, ez_name=ez_name,dirname=dirname, user=HIVE_USER) for cmd in CROSS_EZ_ACTION_COMMANDS] + for cmd in read_write_cmd: + run_command(hadoop_container,cmd,HIVE_USER) + + #write it to dir2 in EZ1 using 'hive' user and read it ------- + read_write_cmd= [cmd.format(filename=filename, ez_name=ez_name,dirname=dirname2, user=HIVE_USER) for cmd in CROSS_EZ_ACTION_COMMANDS] + for cmd in read_write_cmd: + run_command(hadoop_container,cmd,HIVE_USER) + + #try to write in EZ2 now as HIVE user- should fail as has no permission on EZ2----------------------- + failure_detected = False + read_write_cmd= [cmd.format(filename=filename, ez_name=ez_name2, user=HIVE_USER) for cmd in ACTIONS_COMMANDS] + + for cmd in read_write_cmd: + output,exit_code=run_command(hadoop_container,cmd,HIVE_USER, fail_on_error=False,return_exit_code=True) + print(f"Command Output:\n{output}") + + # Check for known failure indicators in output + if exit_code != 0: + failure_detected = True + break + + #assert that failure was detected as expected + assert failure_detected, "Expected failure due to no permission on EZ, but command succeeded." + + + + #cleanup EZ and EZ file------------------------------------------------------------------------------ + cleanup_cmd=[cmd.format(ez_name=ez_name) for cmd in CLEANUP_EZ] + for cmd in cleanup_cmd: + run_command(hadoop_container,cmd,HDFS_USER) + + cleanup_cmd=[cmd.format(ez_name=ez_name2) for cmd in CLEANUP_EZ] + for cmd in cleanup_cmd: + run_command(hadoop_container,cmd,HDFS_USER) + + #delete EZ key ---------- + delete_output2=requests.delete(f"{BASE_URL}/key/{key_name}", params=PARAMS) + print(delete_output2) + + delete_output2=requests.delete(f"{BASE_URL}/key/{key_name2}", params=PARAMS) + print(delete_output2) + diff --git a/PyTest-KMS-HDFS/test_hdfs/utils.py b/PyTest-KMS-HDFS/test_hdfs/utils.py new file mode 100644 index 0000000000..b54b9ba397 --- /dev/null +++ b/PyTest-KMS-HDFS/test_hdfs/utils.py @@ -0,0 +1,53 @@ +import pytest +import docker +from test_config import (KMS_CONTAINER,HADOOP_NAMENODE_LOG_PATH,KMS_LOG_PATH) + +# Setup Docker Client +client = docker.from_env() + +#to run all HDFS commands +def run_command(container, cmd, user, fail_on_error=True,return_exit_code=False): + exit_code, output = container.exec_run(cmd, user=user) + output_response = output.decode() + + if exit_code != 0 and fail_on_error: + kms_container = client.containers.get(KMS_CONTAINER) + hadoop_logs, kms_logs = get_error_logs(container, kms_container) + + pytest.fail(f""" + Command failed: {cmd} + Exit Code: {exit_code} + + Output: + {output_response} + + Hadoop Container Logs: + {hadoop_logs} + + KMS Container Logs: + {kms_logs} + """) + if return_exit_code: + return output_response, exit_code + + return output_response + + +#fetch logs from hadoop and KMS file +def get_error_logs(hadoop_container, kms_container): + + # Get Hadoop NameNode logs + hadoop_log_cmd = f"tail -n 50 {HADOOP_NAMENODE_LOG_PATH}" + _, hadoop_logs = hadoop_container.exec_run(hadoop_log_cmd, user='hdfs') + hadoop_logs_decoded = hadoop_logs.decode() + hadoop_error_lines = [line for line in hadoop_logs_decoded.split("\n") if "ERROR" in line or "Exception" in line or "WARN" in line] + hadoop_error_text = "\n".join(hadoop_error_lines) if hadoop_error_lines else "No recent errors in Hadoop Namenode logs." + + # Get KMS logs + kms_log_cmd = f"tail -n 50 {KMS_LOG_PATH}" + _, kms_logs = kms_container.exec_run(kms_log_cmd, user='root') + kms_logs_decoded = kms_logs.decode() + kms_error_lines = [line for line in kms_logs_decoded.split("\n") if "ERROR" in line or "Exception" in line or "WARN" in line] + kms_error_text = "\n".join(kms_error_lines) if kms_error_lines else "No recent errors in KMS logs." + + return hadoop_error_text, kms_error_text diff --git a/PyTest-KMS-HDFS/test_kms/conftest.py b/PyTest-KMS-HDFS/test_kms/conftest.py new file mode 100644 index 0000000000..753b7dee63 --- /dev/null +++ b/PyTest-KMS-HDFS/test_kms/conftest.py @@ -0,0 +1,36 @@ + +import pytest +import requests + +from utils import fetch_logs + +BASE_URL="http://localhost:9292/kms/v1" +PARAMS={"user.name":"keyadmin"} +HEADERS={"Content-Type": "application/json","Accept":"application/json"} + + +@pytest.fixture(scope="session") +def headers(): + return HEADERS + + +@pytest.fixture(scope="class") +def create_test_key(headers): + data={ + "name":"key1", + "cipher": "AES/CTR/NoPadding", #material can be provided (optional) + "length": 128, + "description": "Test key" + } + + key_creation_response=requests.post(f"{BASE_URL}/keys",headers=headers,json=data,params=PARAMS) + + if key_creation_response.status_code != 201: + error_logs = fetch_logs() # Fetch logs on failure + pytest.fail(f"Key creation failed. API Response: {key_creation_response.text}\nLogs:\n{error_logs}") + + yield data + requests.delete(f"{BASE_URL}/key/key1",params=PARAMS) + + + diff --git a/PyTest-KMS-HDFS/test_kms/readme.md b/PyTest-KMS-HDFS/test_kms/readme.md new file mode 100644 index 0000000000..abb1cb6214 --- /dev/null +++ b/PyTest-KMS-HDFS/test_kms/readme.md @@ -0,0 +1,105 @@ +# This is the main directory for running KMS API functionality tests + +## Structure +``` +test_kms/ +โ”œโ”€โ”€ test_keys.py +โ”œโ”€โ”€ test_keys_02.py +โ”œโ”€โ”€ test_keyDetails.py +โ”œโ”€โ”€ test_keyOps.py +โ”œโ”€โ”€ test_keyOps_policy.py +โ”œโ”€โ”€ conftest.py +โ”œโ”€โ”€ utils.py +``` + + +## Extra Features and Functionalities Used: + +- **Parametrization:** For running multiple test cases handling the same functionality in a single method. + +- **fetch_logs:** Fetches errors or exceptions from logs when something goes wrong. + +- **cleanup:** Cleans up all resources used while testing, ensuring re-runs of test cases. + +--- + +## `conftest.py` + +Special file used to define fixtures and shared configurations that pytest can automatically discover and use across tests. +Pytest automatically loads this file, aiding code reusability. + +--- + +## `utils.py` + +Consists of helper functions or classes used in tests. +You need to import it wherever required. + +--- + +## `test_keys.py` + +Handles **key creation operations**. +Contains a class `TestKeyManagement` with two methods: + +1. **test_create_key:** + Used to create a key with the necessary payload, checks for errors, and cleans up the created key. + +2. **test_key_name_validation:** + Validates creation of a key with different valid and invalid name formats. + +3. **test_duplicate_key_creation:** + Checks for creation of duplicate EZ key and checks if it's failing or not. + +> Similarly, other validations can be implemented on keys. + +--- + +## `test_keys_02.py` + +Handles **Bulk key opeartions and other extra cases**. + +--- + +## `test_keyDetails.py` + +Handles **retrieval of key-related data**. +Contains a class `TestKeyDetails` with three methods: + +1. **test_get_key_names:** + Fetches all created keys and checks the presence of a specific key. + +2. **test_get_key_metadata:** + Checks metadata of existing and non-existing keys and validates the response. + +3. **test_get_key_versions:** + Checks key versions for existing and non-existing keys. + +--- + +## `test_keyOps.py` + +Handles **operations on keys**. +Contains a class `TestKeyOperations` with four methods: + +1. **test_temp_key:** + Creates a temporary key used for further roll-over functionality. + +2. **test_roll_over_key:** + Handles proper roll-over of the key. + +3. **test_roll_over_new_material:** + Checks whether the rolled-over key has new material. + +4. **test_generate_data_key_and_decrypt:** + - Generation of data key from EZ key and checks for presence of EDEK and DEK. + - Decryption of EDEK to get back DEK. + +--- + +## `test_keyOps_policy.py` + +Handles **operations on keys based on policy enforcement**. +Checks Key operation by giving incremental access to each opeartion one by one +i.e `create, rollover, getKeyVersion, getMetadata, generateeek, decrypteek, delete` + diff --git a/PyTest-KMS-HDFS/test_kms/test_keyDetails.py b/PyTest-KMS-HDFS/test_kms/test_keyDetails.py new file mode 100644 index 0000000000..23f51d1c43 --- /dev/null +++ b/PyTest-KMS-HDFS/test_kms/test_keyDetails.py @@ -0,0 +1,99 @@ +import requests +import pytest +from utils import fetch_logs + +BASE_URL = "http://localhost:9292/kms/v1" +PARAMS = {"user.name": "keyadmin"} + +class TestKeyDetails: + + @pytest.fixture(autouse=True) + def setup_class(self, create_test_key): + self.test_key = create_test_key + + # *********************************************************************************** + # Get key names + # *********************************************************************************** + def test_get_key_names(self): + response = requests.get(f"{BASE_URL}/keys/names",params=PARAMS) + + if response.status_code!=200: #log check + logs=fetch_logs() + pytest.fail(f"Get key operation failed. API Response: {response.text}\nLogs:\n{logs}") + + print(response.json()) + assert self.test_key["name"] in response.json() + + + # *********************************************************************************** + # Parametrized Get key metadata check for existent and non existent key + # *********************************************************************************** + @pytest.mark.parametrize("key_name, expected_status, expected_response", [ + ("my_key", 200, "valid"), # Key exists, should return valid metadata + ("non-existent-key", 200, "invalid"), # Key does not exist but returns 200 with [] should give 404 + ]) + def test_get_key_metadata(self, headers, key_name, expected_status, expected_response): + + response = requests.get(f"{BASE_URL}/key/{key_name}/_metadata", headers=headers, params=PARAMS) + + if response.status_code!=expected_status: #log check + logs=fetch_logs() + pytest.fail(f"Get key metadata operation failed. API Response: {response.text}\nLogs:\n{logs}") + + if expected_response == "invalid": + assert response.text.strip() in ["", "[ ]", "{ }"], f"Expected blank response for non-existent key, got: {response.text}" + + print(response.json()) + + # *********************************************************************************** + # Parametrized Get Key version for existent and non existent key + # *********************************************************************************** + @pytest.mark.parametrize("key_name, expected_status, expected_response", [ + ("my_key", 200, "valid"), # Key exists + ("non-existent-key", 200,"invalid"), # Misleading response for non-existent key gives 200 should've given 404 + ]) + def test_get_key_versions(self, headers, key_name, expected_status,expected_response): + + response = requests.get(f"{BASE_URL}/key/{key_name}/_versions", headers=headers, params=PARAMS) + + if response.status_code != expected_status: #log check + logs=fetch_logs() + pytest.fail(f"Get key version operation failed. API Response: {response.text}\nLogs:\n{logs}") + + if expected_response == "invalid": + assert response.text.strip() in ["", "[ ]", "{ }"], f"Expected blank response for non-existent key, got: {response.text}" + + + + + + + + + + + + + + + + + + # try: + # json_response = response.json() + # except requests.exceptions.JSONDecodeError: + + # json_response = response.text # Store raw text response instead + + # if expected_response == "invalid": + # assert json_response =="key not found", f"Unexpected response: {json_response}" + + + + #this below was in testKeyMetadata------------- + # response_data = response.text #The error occurs because the API returns plain text ("key not found") instead of a JSON response when querying a non-existent key. + #works fine if text is used instead json bcoz of my changes + # if expected_response == "valid": + # assert response_data != "key not found" + # elif expected_response == "invalid": + # assert response_data == "key not found" diff --git a/PyTest-KMS-HDFS/test_kms/test_keyOps.py b/PyTest-KMS-HDFS/test_kms/test_keyOps.py new file mode 100644 index 0000000000..e4f8788e66 --- /dev/null +++ b/PyTest-KMS-HDFS/test_kms/test_keyOps.py @@ -0,0 +1,136 @@ +import requests +import pytest +from utils import fetch_logs + +BASE_URL = "http://localhost:9292/kms/v1" +PARAMS = {"user.name": "keyadmin"} + +@pytest.mark.usefixtures("create_test_key") +class TestKeyOperations: + + # Temporary key for testing roll over + def test_temp_key(self, headers): + data = { + "name": "rollover-key", + "cipher": "AES/CTR/NoPadding", + "length": 128, + "description": "Key to check roll over functionality" + } + key_creation_response = requests.post(f"{BASE_URL}/keys", headers=headers, json=data, params=PARAMS) + + if key_creation_response.status_code != 201: #log check + logs=fetch_logs() + pytest.fail(f"Create key operation failed. API Response: {key_creation_response.text}\nLogs:\n{logs}") + + + # *********************************************************************************** + # Parametrized Roll over of key + # *********************************************************************************** + @pytest.mark.parametrize("key_name, expected_status", [ + ("rollover-key", 200), # Valid key rollover + ("non-existent-key", 500) # Rollover on a non-existent key + ]) + + def test_roll_over_key(self, headers, key_name, expected_status): + response = requests.post(f"{BASE_URL}/key/{key_name}", json={}, headers=headers, params=PARAMS) + + if response.status_code != expected_status: #log check + logs=fetch_logs() + pytest.fail(f"Rollover key operation failed. API Response: {response.text}\nLogs:\n{logs}") + + # Cleanup after test + requests.delete(f"{BASE_URL}/key/rollover-key", params=PARAMS) + + + # *********************************************************************************** + # Test for checking roll overed key has new material + # *********************************************************************************** + def test_roll_over_new_material(self, headers): + old_metadata = requests.get(f"{BASE_URL}/key/key1/_metadata", headers=headers, params=PARAMS) + print("Old Metadata:", old_metadata.json()) + + requests.post(f"{BASE_URL}/key/key1", json={}, headers=headers, params=PARAMS) #roll-over here + + new_metadata = requests.get(f"{BASE_URL}/key/key1/_metadata", headers=headers, params=PARAMS) + print("New Metadata:", new_metadata.json()) + + assert old_metadata.json() != new_metadata.json(), "Key rollover should create new key material." + + + # *********************************************************************************** + # Data key generation and decrypting EDEK to get DEK + # *********************************************************************************** + def test_generate_data_key_and_decrypt(self, headers, create_test_key): + # Generate Data Key + key_name=create_test_key["name"] + response = requests.get(f"{BASE_URL}/key/{key_name}/_dek", headers=headers, params=PARAMS) + + if response.status_code != 200: #log check + logs=fetch_logs() + pytest.fail(f"generation of data key operation failed. API Response: {response.text}\nLogs:\n{logs}") + + + data_key_response = response.json() + dek = data_key_response.get("dek") + edek = data_key_response.get("edek") + + print(dek) + print(edek) + + assert dek is not None, "Generated DEK should not be None" + assert edek is not None, "Generated EDEK should not be None" + + # Extracting details for decryption from EDEK + encrypted_key_version = edek.get("encryptedKeyVersion") + encrypted_material = encrypted_key_version.get("material") + name = encrypted_key_version.get("name") + version_name = edek.get("versionName") + iv = edek.get("iv") + + decrypt_payload = { + + "name":name, + "iv": iv, + "material": encrypted_material, + } + + DECRYPT_PARAMS = {"user.name": "keyadmin","eek_op":"decrypt"} + decrypt_response = requests.post(f"{BASE_URL}/keyversion/{version_name}/_eek", json=decrypt_payload, headers=headers, params=DECRYPT_PARAMS) + + if decrypt_response.status_code != 200: #log check + logs=fetch_logs() + pytest.fail(f"Decryption of key operation failed. API Response: {response.text}\nLogs:\n{logs}") + + decrypted_data = decrypt_response.json() + print("Decrypted Data:", decrypted_data) # check decrypted data + + # checking the decrypted key matches the original DEK + assert decrypted_data == dek, "Decrypted DEK should match the original DEK" + + + #invalidate cache use -----optional maybe + # def test_generate_data_key_after_invalidate_cache(self, headers, create_test_key): + + # key_name=create_test_key["name"] + # requests.delete(f"{BASE_URL}/key/{key_name}", params=PARAMS) + + + # data = { + # "name": "key1", + # "cipher": "AES/CTR/NoPadding", + # "length": 128, + # "description": "Test key" + # } + # key_creation_response = requests.post(f"{BASE_URL}/keys", headers=headers, json=data, params=PARAMS) + # assert key_creation_response.status_code == 201, "Key already exists" + + + # response = requests.get(f"{BASE_URL}/key/key1/_dek", headers=headers, params=PARAMS) #hit dek but there is no @1 version + # assert response.status_code == 500, "version @1 found" #gives 500 error bcz of above reason + + # invalidate_cache_param={"user.name": "keyadmin","action":"invalidateCache"} #invalidateCache + # invalidate_response = requests.post(f"{BASE_URL}/key/key1", json={}, headers=headers, params=invalidate_cache_param) + + + # response_after_invalidateCache = requests.get(f"{BASE_URL}/key/key1/_dek", headers=headers, params=PARAMS) #now it works + # assert response_after_invalidateCache.status_code == 200, "version @1 not found" diff --git a/PyTest-KMS-HDFS/test_kms/test_keyOps_policy.py b/PyTest-KMS-HDFS/test_kms/test_keyOps_policy.py new file mode 100644 index 0000000000..47f809bbd9 --- /dev/null +++ b/PyTest-KMS-HDFS/test_kms/test_keyOps_policy.py @@ -0,0 +1,407 @@ +import requests +import pytest +import time +from utils import fetch_logs + +BASE_URL = "http://localhost:9292/kms/v1" + +BASE_URL_RANGER = "http://localhost:6080/service/public/v2/api/policy" +PARAMS={"user.name":"keyadmin"} + +RANGER_AUTH = ('keyadmin', 'rangerR0cks!') # Ranger key admin user +KMS_SERVICE_NAME = "dev_kms" + +# create base policy +@pytest.fixture(scope="function", autouse=True) +def create_initial_kms_policy(): + policy_data = { + "policyName": "pytest-policy", + "service": KMS_SERVICE_NAME, + "resources": { + "keyname": { + "values": ["pytest-*"], # All keys starting with 'pytest-' + "isExcludes": False, + "isRecursive": False + } + }, + "policyItems": [] + } + + # Create policy + response = requests.post(BASE_URL_RANGER, auth=RANGER_AUTH, json=policy_data) + if response.status_code != 200 and response.status_code != 201: + raise Exception(f"Failed to create initial policy: {response.text}") + + created_policy = response.json() + policy_id = created_policy["id"] + yield policy_id + + # Optionally delete policy after tests + requests.delete(f"{BASE_URL_RANGER}/{policy_id}", auth=RANGER_AUTH) + +# method to update policy +def update_kms_policy(policy_id, username, accesses): + update_url = f"{BASE_URL_RANGER}/{policy_id}" + + # Fetch existing policy + response = requests.get(update_url, auth=RANGER_AUTH) + if response.status_code != 200: + raise Exception(f"Failed to fetch policy: {response.text}") + + policy_data = response.json() + + # Ensure policyItems key exists + if "policyItems" not in policy_data: + policy_data["policyItems"] = [] + + # Only add policy item if accesses are provided + if accesses: + policy_data["policyItems"].append({ + "accesses": [{"type": access, "isAllowed": True} for access in accesses], + "users": [username], + "delegateAdmin": False + }) + + # Update the policy + response = requests.put(update_url, auth=RANGER_AUTH, json=policy_data) + time.sleep(30) # Reduced wait time; increase only if propagation is slow + if response.status_code != 200: + raise Exception(f"Failed to update policy: {response.text}") + + + + +# ****** ********************Test Case 01 ******************************************** +# ***** user has "create" access only +# *********************************************************************************** +def test_policy_01(create_initial_kms_policy, headers): + policy_id = create_initial_kms_policy + username = "nobody" + + + # Update policy for this test + update_kms_policy(policy_id, username, accesses=["create"]) + + key_name = "pytest-key-01" + + # create key + response = requests.post(f"{BASE_URL}/keys", json={"name": key_name}, params={"user.name": username}, headers=headers) + assert response.status_code == 201, f"Key creation failed: {response.text}" + + #get current version + response = requests.get(f"{BASE_URL}/key/{key_name}/_currentversion",params={"user.name": username}, headers=headers) + assert response.status_code == 403, f"Get current version failed: {response.text}" + + # Try getting key metadata + response = requests.get(f"{BASE_URL}/key/{key_name}/_metadata", params={"user.name": username}, headers=headers) + assert response.status_code == 403, f"Expected 403 but got {response.status_code}: {response.text}" + + # Try rollover + response = requests.post(f"{BASE_URL}/key/{key_name}", json={}, params={"user.name": username}, headers=headers) + assert response.status_code == 403, f"Expected 403 but got {response.status_code}: {response.text}" + + #generate DEK + response = requests.get(f"{BASE_URL}/key/{key_name}/_dek",params={"user.name": username}) + assert response.status_code == 500, f"Expected 403 but got {response.status_code}: {response.text}" + + #delete key + response= requests.delete(f"{BASE_URL}/key/{key_name}",params={"user.name": username}) + assert response.status_code == 403, f"Expected 403 but got :{response.text}" + + #cleanup + requests.delete(f"{BASE_URL}/key/{key_name}",params=PARAMS) + + + +# ****** ********************Test Case 02 ******************************************** +# ***** user has "create, delete" access only +# *********************************************************************************** +def test_policy_02(create_initial_kms_policy, headers): + policy_id = create_initial_kms_policy + username = "nobody" + + + #Update policy for this test + update_kms_policy(policy_id, username, accesses=["create","delete"]) + + key_name = "pytest-key-02" + + # create key + response = requests.post(f"{BASE_URL}/keys", json={"name": key_name}, params={"user.name": username}, headers=headers) + assert response.status_code == 201, f"Key creation failed: {response.text}" + + #get current version + response = requests.get(f"{BASE_URL}/key/{key_name}/_currentversion",params={"user.name": username}, headers=headers) + assert response.status_code == 403, f"Get current version failed: {response.text}" + + # Try getting key metadata + response = requests.get(f"{BASE_URL}/key/{key_name}/_metadata", params={"user.name": username}, headers=headers) + assert response.status_code == 403, f"Expected 403 but got {response.status_code}: {response.text}" + + # Try rollover + response = requests.post(f"{BASE_URL}/key/{key_name}", json={}, params={"user.name": username}, headers=headers) + assert response.status_code == 403, f"Expected 403 but got {response.status_code}: {response.text}" + + #generate DEK + response = requests.get(f"{BASE_URL}/key/{key_name}/_dek",params={"user.name": username}) + assert response.status_code == 500, f"Expected 500 but got {response.status_code}: {response.text}" + + #delete key + response= requests.delete(f"{BASE_URL}/key/{key_name}",params={"user.name": username}) + assert response.status_code == 200, f"Key deletion failed :{response.text}" + + +# ****** ********************Test Case 03 ******************************************** +# ***** user has "create, rollover, delete" access only +# *********************************************************************************** +def test_policy_03(create_initial_kms_policy, headers): + policy_id = create_initial_kms_policy + username = "nobody" + + + #Update policy for this test + update_kms_policy(policy_id, username, accesses=["create","delete","rollover"]) + + key_name = "pytest-key-03" + + # create key + response = requests.post(f"{BASE_URL}/keys", json={"name": key_name}, params={"user.name": username}, headers=headers) + assert response.status_code == 201, f"Key creation failed: {response.text}" + + # Try rollover + response = requests.post(f"{BASE_URL}/key/{key_name}", json={}, params={"user.name": username}, headers=headers) + assert response.status_code == 200, f"Expected 200 but got {response.status_code}: {response.text}" + + #get current version + response = requests.get(f"{BASE_URL}/key/{key_name}/_currentversion",params={"user.name": username}, headers=headers) + assert response.status_code == 403, f"Get current version failed: {response.text}" + + # Try getting key metadata + response = requests.get(f"{BASE_URL}/key/{key_name}/_metadata", params={"user.name": username}, headers=headers) + assert response.status_code == 403, f"Expected 403 but got {response.status_code}: {response.text}" + + #generate DEK + response = requests.get(f"{BASE_URL}/key/{key_name}/_dek",params={"user.name": username}) + assert response.status_code == 500, f"Expected 403 but got {response.status_code}: {response.text}" + + #delete key + response= requests.delete(f"{BASE_URL}/key/{key_name}",params={"user.name": username}) + assert response.status_code == 200, f"Key deletion failed :{response.text}" + + +# ****** ********************Test Case 04 ******************************************** +# ***** user has "create, rollover, getKeyVersion, delete" access only +# *********************************************************************************** +def test_policy_04(create_initial_kms_policy, headers): + policy_id = create_initial_kms_policy + username = "nobody" + + + #Update policy for this test + update_kms_policy(policy_id, username, accesses=["create","delete","rollover","get"]) + + key_name = "pytest-key-04" + + # create key + response = requests.post(f"{BASE_URL}/keys", json={"name": key_name}, params={"user.name": username}, headers=headers) + assert response.status_code == 201, f"Key creation failed: {response.text}" + + # Try rollover + response = requests.post(f"{BASE_URL}/key/{key_name}", json={}, params={"user.name": username}, headers=headers) + assert response.status_code == 200, f"Expected 200 but got {response.status_code}: {response.text}" + + #get current version + response = requests.get(f"{BASE_URL}/key/{key_name}/_currentversion",params={"user.name": username}, headers=headers) + assert response.status_code == 200, f"Get current version failed: {response.text}" + + # Try getting key metadata + response = requests.get(f"{BASE_URL}/key/{key_name}/_metadata", params={"user.name": username}, headers=headers) + assert response.status_code == 403, f"Expected 403 but got {response.status_code}: {response.text}" + + #generate DEK + response = requests.get(f"{BASE_URL}/key/{key_name}/_dek",params={"user.name": username}) + assert response.status_code == 500, f"Expected 403 but got {response.status_code}: {response.text}" + + #delete key + response= requests.delete(f"{BASE_URL}/key/{key_name}",params={"user.name": username}) + assert response.status_code == 200, f"Key deletion failed :{response.text}" + + + +# ****** ********************Test Case 05 ******************************************** +# ***** user has "create, rollover, getKeyVersion, getMetadata, delete" access only +# *********************************************************************************** +def test_policy_05(create_initial_kms_policy, headers): + policy_id = create_initial_kms_policy + username = "nobody" + + + #Update policy for this test + update_kms_policy(policy_id, username, accesses=["create","delete","rollover","get","getmetadata"]) + + key_name = "pytest-key-05" + + # create key + response = requests.post(f"{BASE_URL}/keys", json={"name": key_name}, params={"user.name": username}, headers=headers) + assert response.status_code == 201, f"Key creation failed: {response.text}" + + # Try rollover + response = requests.post(f"{BASE_URL}/key/{key_name}", json={}, params={"user.name": username}, headers=headers) + assert response.status_code == 200, f"Expected 200 but got {response.status_code}: {response.text}" + + #get current version + response = requests.get(f"{BASE_URL}/key/{key_name}/_currentversion",params={"user.name": username}, headers=headers) + assert response.status_code == 200, f"Get current version failed: {response.text}" + + # Try getting key metadata + response = requests.get(f"{BASE_URL}/key/{key_name}/_metadata", params={"user.name": username}, headers=headers) + assert response.status_code == 200, f"Expected 403 but got {response.status_code}: {response.text}" + + #generate DEK + response = requests.get(f"{BASE_URL}/key/{key_name}/_dek",params={"user.name": username}) + assert response.status_code == 500, f"Expected 403 but got {response.status_code}: {response.text}" + + #delete key + response= requests.delete(f"{BASE_URL}/key/{key_name}",params={"user.name": username}) + assert response.status_code == 200, f"Key deletion failed :{response.text}" + + + +# ****** ********************Test Case 06 ******************************************** +# ***** user has "create, rollover, getKeyVersion, getMetadata, generateeek, delete" access only +# *********************************************************************************** +def test_policy_06(create_initial_kms_policy, headers): + policy_id = create_initial_kms_policy + username = "nobody" + + + #Update policy for this test + update_kms_policy(policy_id, username, accesses=["create","delete","rollover","get","getmetadata","generateeek"]) + + key_name = "pytest-key-06" + + # create key + response = requests.post(f"{BASE_URL}/keys", json={"name": key_name}, params={"user.name": username}, headers=headers) + assert response.status_code == 201, f"Key creation failed: {response.text}" + + # Try rollover + response = requests.post(f"{BASE_URL}/key/{key_name}", json={}, params={"user.name": username}, headers=headers) + assert response.status_code == 200, f"Expected 200 but got {response.status_code}: {response.text}" + + #get current version + response = requests.get(f"{BASE_URL}/key/{key_name}/_currentversion",params={"user.name": username}, headers=headers) + assert response.status_code == 200, f"Get current version failed: {response.text}" + + # Try getting key metadata + response = requests.get(f"{BASE_URL}/key/{key_name}/_metadata", params={"user.name": username}, headers=headers) + assert response.status_code == 200, f"Expected 200 but got {response.status_code}: {response.text}" + + #generate DEK + DEK_PARAMS= {"eek_op":"generate","num_keys":1,"user.name":username} + response = requests.get(f"{BASE_URL}/key/{key_name}/_eek",params=DEK_PARAMS) + assert response.status_code == 200, f"Expected 200 but got {response.status_code}: {response.text}" + + #delete key + response= requests.delete(f"{BASE_URL}/key/{key_name}",params={"user.name": username}) + assert response.status_code == 200, f"Key deletion failed :{response.text}" + + + +# ****** ********************Test Case 07 ******************************************** +# ***** user has all access "create, rollover, getKeyVersion, getMetadata, generateeek, decrypteek, delete" access +# *********************************************************************************** +def test_policy_07(create_initial_kms_policy, headers): + policy_id = create_initial_kms_policy + username = "nobody" + + + #Update policy for this test + update_kms_policy(policy_id, username, accesses=["create","delete","rollover","get","getmetadata","generateeek","decrypteek"]) + + key_name = "pytest-key-07" + + # create key + response = requests.post(f"{BASE_URL}/keys", json={"name": key_name}, params={"user.name": username}, headers=headers) + assert response.status_code == 201, f"Key creation failed: {response.text}" + + # Try rollover + response = requests.post(f"{BASE_URL}/key/{key_name}", json={}, params={"user.name": username}, headers=headers) + assert response.status_code == 200, f"Expected 200 but got {response.status_code}: {response.text}" + + #get current version + response = requests.get(f"{BASE_URL}/key/{key_name}/_currentversion",params={"user.name": username}, headers=headers) + assert response.status_code == 200, f"Get current version failed: {response.text}" + + # Try getting key metadata + response = requests.get(f"{BASE_URL}/key/{key_name}/_metadata", params={"user.name": username}, headers=headers) + assert response.status_code == 200, f"Expected 403 but got {response.status_code}: {response.text}" + + #generate DEK + DEK_PARAMS= {"eek_op":"generate","num_keys":1,"user.name":username} + response = requests.get(f"{BASE_URL}/key/{key_name}/_eek",params=DEK_PARAMS) + assert response.status_code == 200, f"Expected 200 but got {response.status_code}: {response.text}" + + #decrypt generated EDEK + eek_response= response.json()[0] + + material = eek_response["encryptedKeyVersion"]["material"] + name = eek_response["encryptedKeyVersion"]["name"] + iv = eek_response["iv"] + version_name = eek_response["versionName"] + + decrypt_payload = { + + "name":name, + "iv": iv, + "material": material, + } + + DECRYPT_PARAMS= {"eek_op":"decrypt","user.name":username} + decrypt_response= requests.post(f"{BASE_URL}/keyversion/{version_name}/_eek",params=DECRYPT_PARAMS,headers=headers,json=decrypt_payload) + assert decrypt_response.status_code == 200, f"Decryption of EDEK got failed {decrypt_response.status_code}: {decrypt_response.text}" + + + #delete key + response= requests.delete(f"{BASE_URL}/key/{key_name}",params={"user.name": username}) + assert response.status_code == 200, f"Key deletion failed :{response.text}" + + + +# ****** ********************Test Case 08 ******************************************** +# ***** user has no access +# *********************************************************************************** +def test_policy_08(create_initial_kms_policy, headers): + policy_id = create_initial_kms_policy + username = "nobody" + + + #Update policy for this test + update_kms_policy(policy_id, username, accesses=None) + + key_name = "pytest-key-08" + + # create key + response = requests.post(f"{BASE_URL}/keys", json={"name": key_name}, params={"user.name": username}, headers=headers) + assert response.status_code == 403, f"Creation of key, Expected 403 but got {response.text}" + + # Try rollover + response = requests.post(f"{BASE_URL}/key/{key_name}", json={}, params={"user.name": username}, headers=headers) + assert response.status_code == 403, f"Rollover of key, Expected 403 but got {response.status_code}: {response.text}" + + #get current version + response = requests.get(f"{BASE_URL}/key/{key_name}/_currentversion",params={"user.name": username}, headers=headers) + assert response.status_code == 403, f"Get current version, Expected 403 but got: {response.text}" + + # Try getting key metadata + response = requests.get(f"{BASE_URL}/key/{key_name}/_metadata", params={"user.name": username}, headers=headers) + assert response.status_code == 403, f"Get keyMetaData, Expected 403 but got {response.status_code}: {response.text}" + + #generate DEK + DEK_PARAMS= {"eek_op":"generate","num_keys":1,"user.name":username} + response = requests.get(f"{BASE_URL}/key/{key_name}/_eek",params=DEK_PARAMS) + assert response.status_code == 500, f"Generate DEK, Expected 500 but got {response.status_code}: {response.text}" + + + #delete key + response= requests.delete(f"{BASE_URL}/key/{key_name}",params={"user.name": username}) + assert response.status_code == 403, f"Delete key, Expected 403 but got :{response.text}" \ No newline at end of file diff --git a/PyTest-KMS-HDFS/test_kms/test_keys.py b/PyTest-KMS-HDFS/test_kms/test_keys.py new file mode 100644 index 0000000000..d632bf5c07 --- /dev/null +++ b/PyTest-KMS-HDFS/test_kms/test_keys.py @@ -0,0 +1,75 @@ +import requests +import pytest +from utils import fetch_logs + +BASE_URL = "http://localhost:9292/kms/v1" +PARAMS={"user.name":"keyadmin"} + +class TestKeyManagement: + + @pytest.fixture(autouse=True) + def setup_class(self, create_test_key): + self.test_key = create_test_key + + def test_create_key(self,headers): + key_data = { + "name": "key2", + "cipher": "AES/CTR/NoPadding", + "length": 128, + "description": "New key for checking key creation functionality" + } + response = requests.post(f"{BASE_URL}/keys",headers=headers, json=key_data,params=PARAMS) + + if response.status_code != 201: + error_logs = fetch_logs() # Fetch logs on failure + pytest.fail(f"Key creation failed. API Response: {response.text}\nLogs:\n{error_logs}") + + requests.delete(f"{BASE_URL}/key/key2",params=PARAMS) #cleanup key2 + + #---------------------------------creation key validation------------------------------ + @pytest.mark.parametrize("name, expected_status", [ + ("valid-key", 201), + ("", 400), # Invalid case: Empty name + ("@invalid!", 400), # Invalid case: Special characters + ("invalid--key",400) #-- or __ or _- -_ not allowed + ]) + def test_key_name_validation(self, headers, name, expected_status): + key_data = { + "name": name, + "cipher": "AES/CTR/NoPadding", + "length": 128, + "description": "Validation test" + } + response = requests.post(f"{BASE_URL}/keys", json=key_data, headers=headers,params=PARAMS) + + if response.status_code != expected_status: + error_logs = fetch_logs() # Fetch logs on failure + pytest.fail(f"Key validation failed. API Response: {response.text}\nLogs:\n{error_logs}") + + if expected_status == 201: + requests.delete(f"{BASE_URL}/key/{name}", params=PARAMS) + + # Negative test----duplicate key creation test ---------------------------------------------- + def test_duplicate_key_creation(self, headers): + key_name = "duplicate-key" + key_data = { + "name": key_name, + "cipher": "AES/CTR/NoPadding", + "length": 128, + "description": "Testing duplicate key creation" + } + + response1 = requests.post(f"{BASE_URL}/keys", headers=headers, json=key_data, params=PARAMS) + assert response1.status_code == 201, f"Initial key creation failed: {response1.text}" + + # creating the same key again + response2 = requests.post(f"{BASE_URL}/keys", headers=headers, json=key_data, params=PARAMS) + + assert response2.status_code == 500, f"Duplicate key got created, expected to fail" + + # Cleanup + requests.delete(f"{BASE_URL}/key/{key_name}", params=PARAMS) + + + + diff --git a/PyTest-KMS-HDFS/test_kms/test_keys_02.py b/PyTest-KMS-HDFS/test_kms/test_keys_02.py new file mode 100644 index 0000000000..23bf024e71 --- /dev/null +++ b/PyTest-KMS-HDFS/test_kms/test_keys_02.py @@ -0,0 +1,151 @@ +import requests +import pytest +from utils import fetch_logs + +BASE_URL = "http://localhost:9292/kms/v1" +PARAMS={"user.name":"keyadmin"} + +# *********************************************************************************** +# Test to check after key roll over -> new version= old version+1 +# *********************************************************************************** +def test_versionIncrement_after_rollover(headers): + + key_name="key_roll" + key_data={ + "name":key_name + } + #create key + response=requests.post(f"{BASE_URL}/keys",json=key_data,params=PARAMS,headers=headers) + assert response.status_code == 201, f"Key creation failed: {response.text}" + + #check version before roll over + response_before= requests.get(f"{BASE_URL}/key/{key_name}/_currentversion", headers=headers, params=PARAMS) + assert response_before.status_code == 200, f"Failed to get current version. Response: {response_before.text}" + + #extract version number + version_before = response_before.json().get("versionName") # e.g "test-key@0" + version_num_before = int(version_before.split("@")[1]) + print(f"version before: {version_num_before}" ) + + #roll over + response = requests.post(f"{BASE_URL}/key/{key_name}", json={}, headers=headers, params=PARAMS) + assert response.status_code==200, f"failed to perform roll over . Response:{response.text}" + + #check version after roll over + response_after= requests.get(f"{BASE_URL}/key/{key_name}/_currentversion", headers=headers, params=PARAMS) + assert response_after.status_code == 200, f"Failed to get current version. Response: {response_after.text}" + + #extract new version number + version_after = response_after.json().get("versionName") + version_num_after = int(version_after.split("@")[1]) + print(f"version after: {version_num_after}") + + assert version_num_after == version_num_before + 1 , ( + f"Expected version to increment. Before: {version_before}, After: {version_after}" + ) + + # Cleanup key after test + requests.delete(f"{BASE_URL}/key/{key_name}", params=PARAMS) + + +# *********************************************************************************** +# Test to check if material which is used to create key matches material from get key material +# *********************************************************************************** +def test_key_material(headers): + + key_name="test-key" + key_material="G90ZtTKOWIICXG_wpqx0tA" + + key_data={ + "name":key_name, + "material":key_material + } + + #create key + response=requests.post(f"{BASE_URL}/keys",json=key_data,params=PARAMS,headers=headers) + assert response.status_code == 201, f"Key creation failed: {response.text}" + + #check material from currentversion + version_response= requests.get(f"{BASE_URL}/key/{key_name}/_currentversion", headers=headers, params=PARAMS) + assert version_response.status_code == 200, f"Failed to get current version. Response: {version_response.text}" + + response_keyMaterial= version_response.json() + response_keyMaterial=response_keyMaterial["material"] + + assert key_material== response_keyMaterial, f"Key material not matching. Passed key material: {key_material}, Got Key material: {response_keyMaterial}" + + # Cleanup key after test + requests.delete(f"{BASE_URL}/key/{key_name}", params=PARAMS) + + + +# *********************************************************************************** +# Tests key is not present after deletion +# *********************************************************************************** +def test_deleted_key_not_in_list(headers): + + key_name="Delete-key" + + key_data={ + "name":key_name, + } + + #create key + response=requests.post(f"{BASE_URL}/keys",json=key_data,params=PARAMS,headers=headers) + assert response.status_code == 201, f"Key creation failed: {response.text}" + + # Delete key + requests.delete(f"{BASE_URL}/key/{key_name}", params=PARAMS) + + list_response= requests.get(f"{BASE_URL}/keys/names",params=PARAMS) + + key_list= list_response.json() + + assert key_name not in key_list, f"Deleted key still exists, Deletion might have failed" + + + +# *********************************************************************************** +# Test to check key operations in bulk +# *********************************************************************************** +def test_bulk_key_operation(headers): + + key_names = [f"key{i}" for i in range(5)] + created_keys = [] + + # Create 5 EZ keys + for name in key_names: + key_data = { + "name": name, + } + + response = requests.post(f"{BASE_URL}/keys", json=key_data, params=PARAMS, headers=headers) + assert response.status_code == 201, f"Failed to create key {name}: {response.text}" + created_keys.append(name) + + # Get all keys and verify they exist + list_response = requests.get(f"{BASE_URL}/keys/names", headers=headers, params=PARAMS) + assert list_response.status_code == 200, f"Fetching key list failed: {list_response.text}" + + all_keys = list_response.json() + + for name in created_keys: + assert name in all_keys, f"Key '{name}' not found in key list." + + # Get metadata for each key + for name in created_keys: + meta_response = requests.get(f"{BASE_URL}/key/{name}", headers=headers, params=PARAMS) + assert meta_response.status_code == 200, f"Failed to get metadata for key {name}" + + # Delete all 5 keys + for name in created_keys: + del_response = requests.delete(f"{BASE_URL}/key/{name}", params=PARAMS) + assert del_response.status_code==200, f"Failed to delete key {name}: {del_response.text}" + + # Verify keys are deleted + final_list_response = requests.get(f"{BASE_URL}/keys/names", headers=headers, params=PARAMS) + assert final_list_response.status_code == 200, f"Fetching key list after deletion failed" + final_keys = final_list_response.json() + + for name in created_keys: + assert name not in final_keys, f"Deleted key '{name}' still found in key list" diff --git a/PyTest-KMS-HDFS/test_kms/utils.py b/PyTest-KMS-HDFS/test_kms/utils.py new file mode 100644 index 0000000000..f16a9a01fe --- /dev/null +++ b/PyTest-KMS-HDFS/test_kms/utils.py @@ -0,0 +1,14 @@ +#no scope mismatch due to utils +import subprocess + +KMS_CONTAINER_NAME = "ranger-kms" # Replace with your actual KMS container name +KMS_LOG_FILE = "/var/log/ranger/kms/ranger-kms-ranger-kms.example.com-root.log" + +def fetch_logs(): + try: + cmd = f"docker exec {KMS_CONTAINER_NAME} tail -n 100 {KMS_LOG_FILE}" + logs = subprocess.check_output(cmd, shell=True, text=True) + error_logs = [line for line in logs.split("\n") if "ERROR" in line or "Exception" in line] + return "\n".join(error_logs) if error_logs else "No recent errors in logs." + except Exception as e: + return f"Failed to fetch logs from container: {str(e)}" diff --git a/test-pytest.sh b/test-pytest.sh new file mode 100755 index 0000000000..e595be4f85 --- /dev/null +++ b/test-pytest.sh @@ -0,0 +1,110 @@ +#!/bin/bash + +#handle input +if [ $# -lt 1 ]; then + echo "no arguments passed , using default DB: postgres" + DB_TYPE=postgres + EXTRA_SERVICES=() + +else + DB_TYPE=$1 + shift + EXTRA_SERVICES=("$@") +fi + +# Remove all containers and clean up docker space +docker rm -f $(docker ps -aq --filter "name=ranger") +docker system prune --all --force --volumes + +#path setup +RANGER_DOCKER_PATH="dev-support/ranger-docker" +TESTS_PATH="$HOME/Desktop/PyTest-KMS-EP" +cd "$RANGER_DOCKER_PATH"|| exit 1 + +# Download archives +if [${#EXTRA_SERVICES[@]} -gt 0]; then + ./download-archives.sh "${EXTRA_SERVICES[@]}" +fi + +export DOCKER_BUILDKIT=1 +export COMPOSE_DOCKER_CLI_BUILD=1 +export RANGER_DB_TYPE="$DB_TYPE" + +# Build base image +docker-compose -f docker-compose.ranger-base.yml build + +# Build Apache Ranger +docker-compose -f docker-compose.ranger-base.yml -f docker-compose.ranger-build.yml up --build + +# Bring up basic services +DOCKER_FILES=( + "-f" "docker-compose.ranger-${RANGER_DB_TYPE}.yml" + "-f" "docker-compose.ranger.yml" + "-f" "docker-compose.ranger-usersync.yml" + "-f" "docker-compose.ranger-tagsync.yml" + "-f" "docker-compose.ranger-kms.yml" + ) + +# add extra service from input +for service in "${EXTRA_SERVICES[@]}" ; do + DOCKER_FILES+=("-f" "docker-compose.ranger-${service}.yml") +done + +#start containers +docker compose "${DOCKER_FILES[@]}" up -d --build + +echo "๐Ÿ• Waiting for containers to start..." +sleep 60 + +echo "โœ… Checking container status..." +BASE_SERVICES=(ranger ranger-${RANGER_DB_TYPE} ranger-zk ranger-solr ranger-usersync ranger-tagsync ranger-kms) +ALL_SERVICES=("${BASE_SERVICES[@]}") + +for service in "${EXTRA_SERVICES[@]}"; do + ALL_SERVICES+=("ranger-${service}") +done + + +flag=true +for container in "${ALL_SERVICES[@]}"; do + if [[ $(docker inspect -f '{{.State.Running}}' "$container" 2>/dev/null) == "true" ]]; then + echo "โœ”๏ธ Container $container is running!" + else + echo "โŒ Container $container is NOT running!" + flag=false + fi +done + +#RUN TESTS-------- +if [[ $flag == true ]]; then + echo "๐Ÿš€ All required containers are up. Running test cases..." + cd "$TESTS_PATH" || exit 1 # Switch to the tests directory + + python -m venv myenv # Create a new environment + source myenv/bin/activate # Activate it + pip install -r requirements.txt # Install dependencies + + pytest -vs test_kms/ --html=report_kms.html # Runs all tests in the tests directory + pytest -vs test_hdfs/ --html=report_hdfs.html +else + echo "โš ๏ธ Some containers failed to start. Exiting..." + docker stop $(docker ps -q --filter "name=ranger") && docker rm $(docker ps -aq --filter "name=ranger") + exit 1 +fi + +echo "๐Ÿงน Cleaning up containers..." +docker stop $(docker ps -q --filter "name=ranger") && docker rm $(docker ps -aq --filter "name=ranger") + +# Open the generated reports +echo " Opening test reports..." +if command -v xdg-open &> /dev/null; then + xdg-open report_kms.html # Opens the test_kms report + xdg-open report_hdfs.html # Opens the test_hdfs report +elif command -v open &> /dev/null; then + open report_kms.html # macOS command + open report_hdfs.html # macOS command +fi + +echo "โœ… Test execution complete and environment cleaned up!" +exit 0 +