Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Parallelize tests #651

Draft
wants to merge 29 commits into
base: main
Choose a base branch
from
Draft
Changes from 1 commit
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
aeb8987
initial commit for parallization; introced parallel_id to Runner argu…
dan-mm Jan 11, 2024
f5d6cc0
changed conftest.py test cleanup to be per actual test session, not w…
dan-mm Jan 12, 2024
0057a4e
CI jobs to use parallel tests
dan-mm Jan 12, 2024
6d2d1b1
fix for test_uri_local_dir
dan-mm Jan 12, 2024
5c60e38
add parallel_id to networks; revert uri test fix; try different metho…
dan-mm Jan 12, 2024
30137e8
fix network check test
dan-mm Jan 15, 2024
66b2987
Merge branch 'main' into parallelize-tests
dan-mm Jan 16, 2024
ec5356f
- conftest cleanup now applies only after all workers finish
dan-mm Jan 16, 2024
e41731b
update pytest command for workflow
dan-mm Jan 18, 2024
d93d5d3
update workflow test command ;split test action into regular gmt test…
dan-mm Jan 18, 2024
148517c
changed to | tee -a so we can see the output
dan-mm Jan 18, 2024
d5a2fb8
WIP parallelize rewrite to not edit runner.py, but use parallel_id on…
dan-mm Jan 29, 2024
0cb02f4
remove parallel_id from runner; move custom loader and supporting fun…
dan-mm Feb 8, 2024
fa3e708
- updated pytest runstring; setup tests to use new test_functions wit…
dan-mm Feb 8, 2024
c160fac
smoke tests now use temp directory instead of stress-application dire…
dan-mm Feb 9, 2024
d5564dc
parallelize the tmp docker image
dan-mm Feb 9, 2024
3f4d7df
serialize failing test
dan-mm Feb 9, 2024
c9f28d5
removed leftover unneeded fstring
dan-mm Feb 9, 2024
a67cdf3
fix typo in test_volume_loading_subdirectories_root failing test (san…
dan-mm Feb 9, 2024
365bc6e
don't run examples directory tests
dan-mm Feb 9, 2024
693fe74
debug statement
dan-mm Feb 9, 2024
309a330
- found tests that were being run without no_build flag when they sho…
dan-mm Feb 13, 2024
eb537e7
corrected workflow input check syntax
dan-mm Feb 13, 2024
d860a33
capitalization
dan-mm Feb 13, 2024
5c8f3f0
github workflow inputs arent real bools
dan-mm Feb 13, 2024
0d8f26a
fix test_jobs (improper cleanup after insert job test); cleanup test …
dan-mm Feb 16, 2024
5cbc0b0
updated tests Readme
dan-mm Feb 16, 2024
d5087ed
removed unneeded dummy cpu util provider; renamed RUN_NAME -> name fo…
dan-mm Feb 16, 2024
c71ca90
Merge branch 'main' into parallelize-tests
dan-mm Feb 16, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
smoke tests now use temp directory instead of stress-application dire…
…ctory; - isntead of _ for parallel id; depends_on now properly writes yaml in test parallelization; setup_runner doesn't override uri if its passed in; yml_parsing tests serialized;
dan-mm committed Feb 9, 2024

Verified

This commit was created on GitHub.com and signed with GitHub’s verified signature. The key has expired.
commit c160fac05916aa40fef7c87fb62dbc13661069f2
2 changes: 0 additions & 2 deletions runner.py
Original file line number Diff line number Diff line change
@@ -72,7 +72,6 @@ def __init__(self,
self._uri_type = uri_type
self._original_filename = filename
self._branch = branch
#DMM:MARK
self._tmp_folder = "/tmp/green-metrics-tool"
self._usage_scenario = {}
self._architecture = utils.get_architecture()
@@ -280,7 +279,6 @@ def check_running_containers(self):
check=True, encoding='UTF-8')
for line in result.stdout.splitlines():
for running_container in line.split(','): # if docker container has multiple tags, they will be split by comma, so we only want to
#DMM:MARK
for service_name in self._usage_scenario.get('services', {}):
if 'container_name' in self._usage_scenario['services'][service_name]:
container_name = self._usage_scenario['services'][service_name]['container_name']
10 changes: 8 additions & 2 deletions tests/smoke_test.py
Original file line number Diff line number Diff line change
@@ -3,6 +3,7 @@
import subprocess
import re
import pytest
import shutil

CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))

@@ -21,15 +22,20 @@
# Runs once per file before any test(
#pylint: disable=expression-not-assigned
def setup_module():
parallel_id = utils.randomword(12)
test_case_path=os.path.join(CURRENT_DIR, 'stress-application/')
tmp_dir_path=os.path.join(CURRENT_DIR, 'tmp', parallel_id)
shutil.copytree(test_case_path, tmp_dir_path)

out = io.StringIO()
err = io.StringIO()
GlobalConfig(config_name='test-config.yml').config
with redirect_stdout(out), redirect_stderr(err):
uri = os.path.abspath(os.path.join(CURRENT_DIR, 'stress-application/'))
uri = os.path.abspath(tmp_dir_path)
subprocess.run(['docker', 'compose', '-f', uri+'/compose.yml', 'build'], check=True)

# Run the application
runner = Tests.setup_runner(usage_scenario="stress_application.yml", name=RUN_NAME, uri=uri, uri_type='folder', dev_no_build=True, dev_no_sleeps=True, dev_no_metrics=False, skip_system_checks=False)
runner = Tests.setup_runner(name=RUN_NAME, uri=uri, uri_type='folder', dev_no_build=True, dev_no_sleeps=True, dev_no_metrics=False, skip_system_checks=False, create_tmp_directory=False, parallel_id=parallel_id)
runner.run()

#pylint: disable=global-statement
32 changes: 20 additions & 12 deletions tests/test_functions.py
Original file line number Diff line number Diff line change
@@ -36,7 +36,7 @@ def replace_include_in_usage_scenario(usage_scenario_path, docker_compose_filena
file.write(data)

def parallelize_runner_folders(runner, parallel_id):
runner._tmp_folder = f"/tmp/gmt_tests_{parallel_id}/green-metrics-tool/"
runner._tmp_folder = f"/tmp/gmt_tests-{parallel_id}/green-metrics-tool/"
runner._folder = f"{runner._tmp_folder}/repo"

def edit_yml_with_id(yml_path, parallel_id):
@@ -46,36 +46,42 @@ def edit_yml_with_id(yml_path, parallel_id):
# Update services
services_copy = dict(yml_data.get('services', {}))
for service_name, service_info in services_copy.items():
new_service_name = f"{service_name}_{parallel_id}"
new_service_name = f"{service_name}-{parallel_id}"
yml_data['services'][new_service_name] = service_info
del yml_data['services'][service_name]

# Update networks within service
service_networks = service_info.get('networks')
if service_networks:
if isinstance(service_networks, list):
service_info['networks'] = [f"{network}_{parallel_id}" for network in service_networks]
service_info['networks'] = [f"{network}-{parallel_id}" for network in service_networks]
elif isinstance(service_networks, dict):
service_info['networks'] = {f"{key}_{parallel_id}": value for key, value in service_networks.items()}
service_info['networks'] = {f"{key}-{parallel_id}": value for key, value in service_networks.items()}

if 'container_name' in service_info:
service_info['container_name'] = f"{service_info['container_name']}_{parallel_id}"
service_info['container_name'] = f"{service_info['container_name']}-{parallel_id}"

if 'depends_on' in service_info:
service_info['depends_on'] = [f"{dep}_{parallel_id}" for dep in service_info['depends_on']]
if isinstance(service_info['depends_on'], list):
service_info['depends_on'] = [f"{dep}-{parallel_id}" for dep in service_info['depends_on']]
elif isinstance(service_info['depends_on'], dict):
service_info['depends_on'] = {f"{key}-{parallel_id}": value for key, value in service_info['depends_on'].items()}
else:
service_info['depends_on'] = f"{service_info['depends_on']}-{parallel_id}"


# top level networks
networks = yml_data.get('networks')
if networks:
if isinstance(networks, list):
yml_data['networks'] = [f"{network}_{parallel_id}" for network in networks]
yml_data['networks'] = [f"{network}-{parallel_id}" for network in networks]
elif isinstance(networks, dict):
yml_data['networks'] = {f"{key}_{parallel_id}": value for key, value in networks.items()}
yml_data['networks'] = {f"{key}-{parallel_id}": value for key, value in networks.items()}

# Update container names in the flow section
for item in yml_data.get('flow', []):
if 'container' in item:
item['container'] = f"{item['container']}_{parallel_id}"
item['container'] = f"{item['container']}-{parallel_id}"

# Save the updated YAML file
with open(yml_path, 'w', encoding='utf-8') as fp:
@@ -116,17 +122,19 @@ def setup_runner(name=None, usage_scenario="usage_scenario.yml", docker_compose=
usage_scenario_path = os.path.join(CURRENT_DIR, 'data/usage_scenarios/', usage_scenario)
make_proj_dir(dir_name=dir_name, usage_scenario_path=usage_scenario_path, docker_compose_path=docker_compose_path)

uri = os.path.join(CURRENT_DIR, 'tmp/', dir_name)
tmp_dir_path = os.path.join(CURRENT_DIR, 'tmp/', dir_name)
if uri == 'default':
uri = tmp_dir_path
if do_parallelize_files:
parallelize_files(uri, usage_scenario, docker_compose, parallel_id)
parallelize_files(tmp_dir_path, usage_scenario, docker_compose, parallel_id)
elif uri_type == 'URL':
if uri[0:8] != 'https://' and uri[0:7] != 'http://':
raise ValueError("Invalid uri for URL")
else:
raise ValueError("Invalid uri_type")

if name is None:
name = f'test_{parallel_id}'
name = f'test-{parallel_id}'

runner = Runner(name=name, uri=uri, uri_type=uri_type, filename=usage_scenario, branch=branch,
debug_mode=debug_mode, allow_unsafe=allow_unsafe, no_file_cleanup=no_file_cleanup,
Loading