From c92c3138dc0ee7cc2fc843b47099c0bd4f684cda Mon Sep 17 00:00:00 2001 From: rsuplina Date: Thu, 18 Jan 2024 12:08:14 +0000 Subject: [PATCH 01/18] Add Import_cluster_template module Signed-off-by: rsuplina --- plugins/modules/cm_import_cluster_template.py | 171 ++++++++++++++++++ .../test_cm_import_cluster_template.py | 46 +++++ 2 files changed, 217 insertions(+) create mode 100644 plugins/modules/cm_import_cluster_template.py create mode 100644 tests/unit/plugins/modules/cm_import_cluster_template/test_cm_import_cluster_template.py diff --git a/plugins/modules/cm_import_cluster_template.py b/plugins/modules/cm_import_cluster_template.py new file mode 100644 index 00000000..b0465811 --- /dev/null +++ b/plugins/modules/cm_import_cluster_template.py @@ -0,0 +1,171 @@ +# Copyright 2024 Cloudera, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ansible_collections.cloudera.cluster.plugins.module_utils.cm_utils import ( + ClouderaManagerModule, +) +from cm_client.rest import ApiException +from cm_client import ClouderaManagerResourceApi +from cm_client import ClustersResourceApi +import json + +ANSIBLE_METADATA = { + "metadata_version": "1.1", + "status": ["preview"], + "supported_by": "community", +} + +DOCUMENTATION = r""" +--- +module: cm_import_cluster_template +short_description: Create a cluster based on the provided cluster template +description: + - Searches for a template file. + - The search for the file starts at the root directory where the Ansible playbook is executed. By default, the template is expected to be placed inside the './files' directory. + - Imports the template file and uses it to create the cluster. + - This module ensures that the cluster is created according to the specified template. +author: + - "Ronald Suplina (@rsuplina)" +requirements: + - cm_client +""" + +EXAMPLES = r""" +--- +- name: Create a cluster on Cloudera Manager host + cloudera.cluster.cm_import_cluster_template: + host: example.cloudera.com + username: "jane_smith" + password: "S&peR4Ec*re" + port: "7180" + template: "./files/cluster-template.json" +""" + +RETURN = r""" +--- +cloudera_manager: + description: Details about Cloudera Manager Cluster + type: dict + contains: + cluster_type: + description: The type of cluster created from template. + type: str + returned: optional + cluster_url: + description: Url of Cloudera Manager cluster. + type: str + returned: optional + display_name: + description: The name of the cluster displayed on the site. + type: str + returned: optional + entity_status: + description: Health status of the cluster. + type: str + returned: optional + full_version: + description: Version of the cluster installed. + type: str + returned: optional + hosts_url: + description: Url of all the hosts on which cluster is installed. + type: str + returned: optional + maintenance_mode: + description: Maintance mode of Cloudera Manager Cluster. + type: bool + returned: optional + maintenance_owners: + description: List of Maintance owners for Cloudera Manager Cluster. + type: list + returned: optional + name: + description: The name of the cluster created. + type: str + returned: optional + tags: + description: List of tags for Cloudera Manager Cluster. + type: list + returned: optional + uuid: + description: Unique ID of created cluster. + type: bool + returned: optional +""" + + +class ClusterTemplate(ClouderaManagerModule): + def __init__(self, module): + super(ClusterTemplate, self).__init__(module) + self.template = self.get_param("template") + self.process() + + @ClouderaManagerModule.handle_process + def process(self): + + try: + api_instance = ClouderaManagerResourceApi(self.api_client) + cluster_api_instance = ClustersResourceApi(self.api_client) + + with open(self.template, 'r') as file: + template_json = json.load(file) + + import_template_request = api_instance.import_cluster_template(body=template_json).to_dict() + command_id = import_template_request['id'] + + self.wait_for_command_state(command_id=command_id,polling_interval=60) + + self.cm_cluster_template_output = cluster_api_instance.read_clusters().to_dict() + self.changed = True + self.file_not_found = False + + except ApiException as e: + if e.status == 400: + self.cm_cluster_template_output = json.loads(e.body) + self.changed = False + self.file_not_found = False + + except FileNotFoundError: + self.cm_cluster_template_output = (f"Error: File '{self.template}' not found.") + self.file_not_found = True +def main(): + module = ClouderaManagerModule.ansible_module( + + argument_spec=dict( + template=dict(required=True, type="path"), + ), + supports_check_mode=False + ) + + result = ClusterTemplate(module) + + if result.file_not_found: + module.fail_json(msg=str(result.cm_cluster_template_output)) + + changed = result.changed + + output = dict( + changed=changed, + cloudera_manager=result.cm_cluster_template_output, + ) + + if result.debug: + log = result.log_capture.getvalue() + output.update(debug=log, debug_lines=log.split("\n")) + + module.exit_json(**output) + + +if __name__ == "__main__": + main() diff --git a/tests/unit/plugins/modules/cm_import_cluster_template/test_cm_import_cluster_template.py b/tests/unit/plugins/modules/cm_import_cluster_template/test_cm_import_cluster_template.py new file mode 100644 index 00000000..27e22609 --- /dev/null +++ b/tests/unit/plugins/modules/cm_import_cluster_template/test_cm_import_cluster_template.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- + +# Copyright 2023 Cloudera, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type +import os +import logging +import pytest + +from ansible_collections.cloudera.cluster.plugins.modules import cm_import_cluster_template +from ansible_collections.cloudera.cluster.tests.unit import AnsibleExitJson, AnsibleFailJson + +LOG = logging.getLogger(__name__) + +def test_pytest_cm_import_cluster_template(module_args): + module_args( + { + "username": os.getenv('CM_USERNAME'), + "password": os.getenv('CM_PASSWORD'), + "host": os.getenv('CM_HOST'), + "port": "7180", + "verify_tls": "no", + "debug": "no", + "template": "./files/cluster-template.json", + } + ) + + with pytest.raises(AnsibleExitJson) as e: + cm_import_cluster_template.main() + + # LOG.info(str(e.value)) + LOG.info(str(e.value.cloudera_manager)) From 5ad8f7482da880f3f2c51d73a2440952436f2273 Mon Sep 17 00:00:00 2001 From: rsuplina Date: Tue, 23 Jan 2024 16:55:21 +0000 Subject: [PATCH 02/18] Add add_repositories Parametar Signed-off-by: rsuplina --- plugins/modules/cm_import_cluster_template.py | 31 +++++++++++++++++-- .../test_cm_import_cluster_template.py | 3 +- 2 files changed, 31 insertions(+), 3 deletions(-) diff --git a/plugins/modules/cm_import_cluster_template.py b/plugins/modules/cm_import_cluster_template.py index b0465811..db9eba7b 100644 --- a/plugins/modules/cm_import_cluster_template.py +++ b/plugins/modules/cm_import_cluster_template.py @@ -37,6 +37,19 @@ - This module ensures that the cluster is created according to the specified template. author: - "Ronald Suplina (@rsuplina)" +options: + template: + description: + - Path to template file which defines the cluster + type: path + elements: str + required: True + add_repositories: + description: + - Install parcel repositories in parcel directory + type: bool + required: False + default: False requirements: - cm_client """ @@ -50,6 +63,15 @@ password: "S&peR4Ec*re" port: "7180" template: "./files/cluster-template.json" + +- name: Create a cluster and install the repositories defined in template + cloudera.cluster.cm_import_cluster_template: + host: example.cloudera.com + username: "jane_smith" + password: "S&peR4Ec*re" + port: "7180" + template: "./files/cluster-template.json" + add_repositories: "True" """ RETURN = r""" @@ -99,7 +121,7 @@ type: list returned: optional uuid: - description: Unique ID of created cluster. + description: Unique ID of created cluster type: bool returned: optional """ @@ -109,6 +131,7 @@ class ClusterTemplate(ClouderaManagerModule): def __init__(self, module): super(ClusterTemplate, self).__init__(module) self.template = self.get_param("template") + self.add_repositories = self.get_param("add_repositories") self.process() @ClouderaManagerModule.handle_process @@ -120,8 +143,11 @@ def process(self): with open(self.template, 'r') as file: template_json = json.load(file) + if self.add_repositories: + import_template_request = api_instance.import_cluster_template(add_repositories=True,body=template_json).to_dict() + else: + import_template_request = api_instance.import_cluster_template(body=template_json).to_dict() - import_template_request = api_instance.import_cluster_template(body=template_json).to_dict() command_id = import_template_request['id'] self.wait_for_command_state(command_id=command_id,polling_interval=60) @@ -144,6 +170,7 @@ def main(): argument_spec=dict( template=dict(required=True, type="path"), + add_repositories=dict(required=False, type="bool", default=False), ), supports_check_mode=False ) diff --git a/tests/unit/plugins/modules/cm_import_cluster_template/test_cm_import_cluster_template.py b/tests/unit/plugins/modules/cm_import_cluster_template/test_cm_import_cluster_template.py index 27e22609..9fd62a1c 100644 --- a/tests/unit/plugins/modules/cm_import_cluster_template/test_cm_import_cluster_template.py +++ b/tests/unit/plugins/modules/cm_import_cluster_template/test_cm_import_cluster_template.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Cloudera, Inc. All Rights Reserved. +# Copyright 2024 Cloudera, Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,6 +36,7 @@ def test_pytest_cm_import_cluster_template(module_args): "verify_tls": "no", "debug": "no", "template": "./files/cluster-template.json", + "add_repositories": "True" } ) From 8f386196d7564972f6af0c84d8b8b61df0fd6fbf Mon Sep 17 00:00:00 2001 From: rsuplina Date: Wed, 24 Jan 2024 16:32:42 +0000 Subject: [PATCH 03/18] Add Requested changes Signed-off-by: rsuplina --- ...port_cluster_template.py => cm_cluster.py} | 56 +++++++++++-------- ...cluster_template.py => test_cm_cluster.py} | 9 +-- 2 files changed, 39 insertions(+), 26 deletions(-) rename plugins/modules/{cm_import_cluster_template.py => cm_cluster.py} (78%) rename tests/unit/plugins/modules/cm_import_cluster_template/{test_cm_import_cluster_template.py => test_cm_cluster.py} (88%) diff --git a/plugins/modules/cm_import_cluster_template.py b/plugins/modules/cm_cluster.py similarity index 78% rename from plugins/modules/cm_import_cluster_template.py rename to plugins/modules/cm_cluster.py index db9eba7b..3ac8c74f 100644 --- a/plugins/modules/cm_import_cluster_template.py +++ b/plugins/modules/cm_cluster.py @@ -28,7 +28,7 @@ DOCUMENTATION = r""" --- -module: cm_import_cluster_template +module: cm_cluster short_description: Create a cluster based on the provided cluster template description: - Searches for a template file. @@ -50,6 +50,12 @@ type: bool required: False default: False + clusterName: + description: + - Name of Cloudera Manager Cluster + type: str + required: False + default: False requirements: - cm_client """ @@ -57,15 +63,16 @@ EXAMPLES = r""" --- - name: Create a cluster on Cloudera Manager host - cloudera.cluster.cm_import_cluster_template: + cloudera.cluster.cm_cluster: host: example.cloudera.com username: "jane_smith" + clusterName: "OneNodeCluster" password: "S&peR4Ec*re" port: "7180" template: "./files/cluster-template.json" - name: Create a cluster and install the repositories defined in template - cloudera.cluster.cm_import_cluster_template: + cloudera.cluster.cm_cluster: host: example.cloudera.com username: "jane_smith" password: "S&peR4Ec*re" @@ -127,11 +134,12 @@ """ -class ClusterTemplate(ClouderaManagerModule): +class Cluster(ClouderaManagerModule): def __init__(self, module): - super(ClusterTemplate, self).__init__(module) + super(Cluster, self).__init__(module) self.template = self.get_param("template") self.add_repositories = self.get_param("add_repositories") + self.clusterName = self.get_param("clusterName") self.process() @ClouderaManagerModule.handle_process @@ -143,42 +151,46 @@ def process(self): with open(self.template, 'r') as file: template_json = json.load(file) - if self.add_repositories: - import_template_request = api_instance.import_cluster_template(add_repositories=True,body=template_json).to_dict() - else: - import_template_request = api_instance.import_cluster_template(body=template_json).to_dict() - - command_id = import_template_request['id'] - - self.wait_for_command_state(command_id=command_id,polling_interval=60) - - self.cm_cluster_template_output = cluster_api_instance.read_clusters().to_dict() - self.changed = True - self.file_not_found = False + + if not self.module.check_mode: + if self.add_repositories: + import_template_request = api_instance.import_cluster_template(add_repositories=True,body=template_json).to_dict() + else: + import_template_request = api_instance.import_cluster_template(body=template_json).to_dict() + + command_id = import_template_request['id'] + self.wait_for_command_state(command_id=command_id,polling_interval=60) + + if self.clusterName: + self.cm_cluster_template_output = cluster_api_instance.read_cluster(cluster_name=self.clusterName).to_dict() + self.changed = True + else: + self.cm_cluster_template_output = cluster_api_instance.read_clusters().to_dict() + self.changed = True except ApiException as e: + # If cluster already exsists it will reply with 400 Error if e.status == 400: self.cm_cluster_template_output = json.loads(e.body) self.changed = False - self.file_not_found = False except FileNotFoundError: self.cm_cluster_template_output = (f"Error: File '{self.template}' not found.") - self.file_not_found = True + self.module.fail_json(msg=str(self.cm_cluster_template_output)) + def main(): module = ClouderaManagerModule.ansible_module( argument_spec=dict( template=dict(required=True, type="path"), add_repositories=dict(required=False, type="bool", default=False), + clusterName=dict(required=False, type="str"), ), supports_check_mode=False ) - result = ClusterTemplate(module) + result = Cluster(module) - if result.file_not_found: - module.fail_json(msg=str(result.cm_cluster_template_output)) changed = result.changed diff --git a/tests/unit/plugins/modules/cm_import_cluster_template/test_cm_import_cluster_template.py b/tests/unit/plugins/modules/cm_import_cluster_template/test_cm_cluster.py similarity index 88% rename from tests/unit/plugins/modules/cm_import_cluster_template/test_cm_import_cluster_template.py rename to tests/unit/plugins/modules/cm_import_cluster_template/test_cm_cluster.py index 9fd62a1c..95b99cb0 100644 --- a/tests/unit/plugins/modules/cm_import_cluster_template/test_cm_import_cluster_template.py +++ b/tests/unit/plugins/modules/cm_import_cluster_template/test_cm_cluster.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Cloudera, Inc. All Rights Reserved. +# Copyright 2023 Cloudera, Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -21,12 +21,12 @@ import logging import pytest -from ansible_collections.cloudera.cluster.plugins.modules import cm_import_cluster_template +from ansible_collections.cloudera.cluster.plugins.modules import cm_cluster from ansible_collections.cloudera.cluster.tests.unit import AnsibleExitJson, AnsibleFailJson LOG = logging.getLogger(__name__) -def test_pytest_cm_import_cluster_template(module_args): +def test_pytest_cm_cluster(module_args): module_args( { "username": os.getenv('CM_USERNAME'), @@ -34,6 +34,7 @@ def test_pytest_cm_import_cluster_template(module_args): "host": os.getenv('CM_HOST'), "port": "7180", "verify_tls": "no", + "clusterName": "OneNodeCluster", "debug": "no", "template": "./files/cluster-template.json", "add_repositories": "True" @@ -41,7 +42,7 @@ def test_pytest_cm_import_cluster_template(module_args): ) with pytest.raises(AnsibleExitJson) as e: - cm_import_cluster_template.main() + cm_cluster.main() # LOG.info(str(e.value)) LOG.info(str(e.value.cloudera_manager)) From ccdd56d7e17757237443c570f622ce64b1f1c86e Mon Sep 17 00:00:00 2001 From: Webster Mudge Date: Fri, 26 Jan 2024 09:26:28 -0500 Subject: [PATCH 04/18] Relocate cluster template merge logic to cm_utils. Add pytest resources. Add unit tests for assemble_cluster_template module Signed-off-by: Webster Mudge --- plugins/action/assemble_cluster_template.py | 68 ++-------------- plugins/module_utils/cm_utils.py | 77 +++++++++++++++++-- plugins/modules/assemble_cluster_template.py | 73 +++--------------- tests/config.yml | 2 +- tests/unit/__init__.py | 33 ++++++++ tests/unit/conftest.py | 48 +++++++++++- .../filtered/ignore.txt | 14 ++++ .../filtered/keep.json | 0 .../fragments/01.json | 4 + .../fragments/02.json | 4 + .../invalid_src.json | 3 + .../test_assemble_cluster_template.py | 67 ++++++++++++++++ tests/unit/requirements.txt | 4 +- 13 files changed, 260 insertions(+), 137 deletions(-) create mode 100644 tests/unit/__init__.py create mode 100644 tests/unit/plugins/modules/assemble_cluster_template/filtered/ignore.txt create mode 100644 tests/unit/plugins/modules/assemble_cluster_template/filtered/keep.json create mode 100644 tests/unit/plugins/modules/assemble_cluster_template/fragments/01.json create mode 100644 tests/unit/plugins/modules/assemble_cluster_template/fragments/02.json create mode 100644 tests/unit/plugins/modules/assemble_cluster_template/invalid_src.json create mode 100644 tests/unit/plugins/modules/assemble_cluster_template/test_assemble_cluster_template.py diff --git a/plugins/action/assemble_cluster_template.py b/plugins/action/assemble_cluster_template.py index 655682be..e1b0e22e 100644 --- a/plugins/action/assemble_cluster_template.py +++ b/plugins/action/assemble_cluster_template.py @@ -29,72 +29,16 @@ from ansible.plugins.action import ActionBase from ansible.utils.hashing import checksum_s +from ansible_collections.cloudera.cluster.plugins.module_utils.cm_utils import ClusterTemplate class ActionModule(ActionBase): TRANSFERS_FILES = True MERGED = {} - IDEMPOTENT_IDS = ["refName", "name", "clusterName", "hostName", "product"] - UNIQUE_IDS = ["repositories"] - - def update_object(self, base, template, breadcrumbs=""): - if isinstance(base, dict) and isinstance(template, dict): - self.update_dict(base, template, breadcrumbs) - return True - elif isinstance(base, list) and isinstance(template, list): - self.update_list(base, template, breadcrumbs) - return True - return False - - def update_dict(self, base, template, breadcrumbs=""): - for key, value in template.items(): - crumb = breadcrumbs + "/" + key - - if key in self.IDEMPOTENT_IDS: - if base[key] != value: - self._display.error( - "Objects with distinct IDs should not be merged: " + crumb - ) - continue - - if key not in base: - base[key] = value - elif not self.update_object(base[key], value, crumb) and base[key] != value: - self._display.warning( - f"Value being overwritten for key [{crumb}]], Old: [{base[key]}], New: [{value}]" - ) - base[key] = value - - if key in self.UNIQUE_IDS: - base[key] = list(set(base[key])) - - def update_list(self, base, template, breadcrumbs=""): - for item in template: - if isinstance(item, dict): - for attr in self.IDEMPOTENT_IDS: - if attr in item: - idempotent_id = attr - break - else: - idempotent_id = None - if idempotent_id: - namesake = [ - i for i in base if i[idempotent_id] == item[idempotent_id] - ] - if namesake: - self.update_dict( - namesake[0], - item, - breadcrumbs - + "/[" - + idempotent_id - + "=" - + item[idempotent_id] - + "]", - ) - continue - base.append(item) - base.sort(key=lambda x: json.dumps(x, sort_keys=True)) + + def __init__(self, task, connection, play_context, loader, templar, shared_loader_obj): + super().__init__(task, connection, play_context, loader, templar, shared_loader_obj) + self.TEMPLATE = ClusterTemplate(warn_fn=self._display.warning, error_fn=self._display.error) def assemble_fragments( self, assembled_file, src_path, regex=None, ignore_hidden=True, decrypt=True @@ -121,7 +65,7 @@ def assemble_fragments( encoding="utf-8", ) as fragment_file: try: - self.update_object(self.MERGED, json.loads(fragment_file.read())) + self.TEMPLATE.update_object(self.MERGED, json.loads(fragment_file.read())) except json.JSONDecodeError as e: raise AnsibleActionFail( message=f"JSON parsing error: {to_text(e.msg)}", diff --git a/plugins/module_utils/cm_utils.py b/plugins/module_utils/cm_utils.py index c0d8af01..07a96130 100644 --- a/plugins/module_utils/cm_utils.py +++ b/plugins/module_utils/cm_utils.py @@ -38,6 +38,74 @@ __maintainer__ = ["wmudge@cloudera.com"] +class ClusterTemplate(object): + + IDEMPOTENT_IDS = ["refName", "name", "clusterName", "hostName", "product"] + UNIQUE_IDS = ["repositories"] + + def __init__(self, warn_fn, error_fn) -> None: + self._warn = warn_fn + self._error = error_fn + + def update_object(self, base, template, breadcrumbs="") -> bool: + if isinstance(base, dict) and isinstance(template, dict): + self.update_dict(base, template, breadcrumbs) + return True + elif isinstance(base, list) and isinstance(template, list): + self.update_list(base, template, breadcrumbs) + return True + return False + + def update_dict(self, base, template, breadcrumbs="") -> None: + for key, value in template.items(): + crumb = breadcrumbs + "/" + key + + if key in self.IDEMPOTENT_IDS: + if key in base and base[key] != value: + self._error( + "Objects with distinct IDs should not be merged: " + crumb + ) + + if key not in base: + base[key] = value + elif not self.update_object(base[key], value, crumb) and base[key] != value: + self._warn( + f"Value being overwritten for key [{crumb}]], Old: [{base[key]}], New: [{value}]" + ) + base[key] = value + + if key in self.UNIQUE_IDS: + base[key] = list(set(base[key])) + + def update_list(self, base, template, breadcrumbs="") -> None: + for item in template: + if isinstance(item, dict): + for attr in self.IDEMPOTENT_IDS: + if attr in item: + idempotent_id = attr + break + else: + idempotent_id = None + if idempotent_id: + namesake = [ + i for i in base if i[idempotent_id] == item[idempotent_id] + ] + if namesake: + self.update_dict( + namesake[0], + item, + breadcrumbs + + "/[" + + idempotent_id + + "=" + + item[idempotent_id] + + "]", + ) + continue + base.append(item) + base.sort(key=lambda x: json.dumps(x, sort_keys=True)) + + class ClouderaManagerModule(object): """Base Ansible Module for API access to Cloudera Manager.""" @@ -60,13 +128,12 @@ def _add_log(err): err = dict( msg="API error: " + to_text(ae.reason), status_code=ae.status, - body=ae.body.decode("utf-8"), ) - if err["body"] != "": + if ae.body: try: - err.update(body=json.loads(err["body"])) - except Exception as te: - pass + err.update(body=json.loads(ae.body)) + except Exception: + err.update(body=ae.body.decode("utf-8")), self.module.fail_json(**_add_log(err)) except MaxRetryError as maxe: diff --git a/plugins/modules/assemble_cluster_template.py b/plugins/modules/assemble_cluster_template.py index 7710c6d5..e4bd73e5 100644 --- a/plugins/modules/assemble_cluster_template.py +++ b/plugins/modules/assemble_cluster_template.py @@ -138,13 +138,13 @@ import tempfile from ansible.module_utils.basic import AnsibleModule -from ansible.module_utils.common.text.converters import to_native +from ansible.module_utils.common.text.converters import to_native, to_text + +from ansible_collections.cloudera.cluster.plugins.module_utils.cm_utils import ClusterTemplate class AssembleClusterTemplate(object): MERGED = {} - IDEMPOTENT_IDS = ["refName", "name", "clusterName", "hostName", "product"] - UNIQUE_IDS = ["repositories"] def __init__(self, module): self.module = module @@ -166,69 +166,14 @@ def __init__(self, module): # Initialize internal values self.compiled = None + self.template = ClusterTemplate( + warn_fn=self.module.warn, + error_fn=self.module.fail_json + ) # Execute the logic self.process() - def update_object(self, base, template, breadcrumbs=""): - if isinstance(base, dict) and isinstance(template, dict): - self.update_dict(base, template, breadcrumbs) - return True - elif isinstance(base, list) and isinstance(template, list): - self.update_list(base, template, breadcrumbs) - return True - return False - - def update_dict(self, base, template, breadcrumbs=""): - for key, value in template.items(): - crumb = breadcrumbs + "/" + key - - if key in self.IDEMPOTENT_IDS: - if base[key] != value: - self.module.warn( - f"Objects with distinct IDs should not be merged: {crumb}" - ) - continue - - if key not in base: - base[key] = value - elif not self.update_object(base[key], value, crumb) and base[key] != value: - self.module.warn( - f"Value being overwritten for key [{crumb}]; Old: [{base[key]}], New: [{value}]" - ) - base[key] = value - - if key in self.UNIQUE_IDS: - base[key] = list(set(base[key])) - - def update_list(self, base, template, breadcrumbs=""): - for item in template: - if isinstance(item, dict): - for attr in self.IDEMPOTENT_IDS: - if attr in item: - idempotent_id = attr - break - else: - idempotent_id = None - if idempotent_id: - namesake = [ - i for i in base if i[idempotent_id] == item[idempotent_id] - ] - if namesake: - self.update_dict( - namesake[0], - item, - breadcrumbs - + "/[" - + idempotent_id - + "=" - + item[idempotent_id] - + "]", - ) - continue - base.append(item) - base.sort(key=lambda x: json.dumps(x, sort_keys=True)) - def assemble_fragments(self, assembled_file): # By file name sort order for f in sorted(os.listdir(self.src)): @@ -245,10 +190,10 @@ def assemble_fragments(self, assembled_file): with open(fragment, "r", encoding="utf-8") as fragment_file: try: - self.update_object(self.MERGED, json.loads(fragment_file.read())) + self.template.update_object(self.MERGED, json.loads(fragment_file.read())) except json.JSONDecodeError as e: self.module.fail_json( - msg=f"JSON parsing error: {to_text(e.msg)}", error=to_native(e) + msg=f"JSON parsing error for file, {fragment}: {to_text(e.msg)}", error=to_native(e) ) # Write out the final assembly diff --git a/tests/config.yml b/tests/config.yml index fb30b8a0..2969e7a8 100644 --- a/tests/config.yml +++ b/tests/config.yml @@ -1,4 +1,4 @@ -# Copyright 2023 Cloudera, Inc. All Rights Reserved. +# Copyright 2024 Cloudera, Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py new file mode 100644 index 00000000..02bf2923 --- /dev/null +++ b/tests/unit/__init__.py @@ -0,0 +1,33 @@ +# Copyright 2024 Cloudera, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +class AnsibleExitJson(Exception): + """Exception class to be raised by module.exit_json and caught by the test case""" + + def __init__(self, kwargs): + super(AnsibleExitJson, self).__init__( + kwargs.get("msg", "General module success") + ) + self.__dict__.update(kwargs) + + +class AnsibleFailJson(Exception): + """Exception class to be raised by module.fail_json and caught by the test case""" + + def __init__(self, kwargs): + super(AnsibleFailJson, self).__init__( + kwargs.get("msg", "General module failure") + ) + self.__dict__.update(kwargs) diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index 3a2df28c..b3394f40 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Cloudera, Inc. All Rights Reserved. +# Copyright 2024 Cloudera, Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,13 +14,55 @@ # See the License for the specific language governing permissions and # limitations under the License. -from __future__ import (absolute_import, division, print_function) +from __future__ import absolute_import, division, print_function + __metaclass__ = type +import json import sys import pytest +from ansible.module_utils import basic +from ansible.module_utils.common.text.converters import to_bytes + +from ansible_collections.cloudera.cluster.tests.unit import ( + AnsibleFailJson, + AnsibleExitJson, +) + + @pytest.fixture(autouse=True) def skip_python(): if sys.version_info < (3, 6): - pytest.skip('Skipping on Python %s. cloudera.cloud supports Python 3.6 and higher.' % sys.version) + pytest.skip( + "Skipping on Python %s. cloudera.cloud supports Python 3.6 and higher." + % sys.version + ) + + +@pytest.fixture(autouse=True) +def patch_module(monkeypatch): + """Patch AnsibleModule to raise exceptions on success and failure""" + + def exit_json(*args, **kwargs): + if "changed" not in kwargs: + kwargs["changed"] = False + raise AnsibleExitJson(kwargs) + + def fail_json(*args, **kwargs): + kwargs["failed"] = True + raise AnsibleFailJson(kwargs) + + monkeypatch.setattr(basic.AnsibleModule, "exit_json", exit_json) + monkeypatch.setattr(basic.AnsibleModule, "fail_json", fail_json) + + +@pytest.fixture +def module_args(): + """Prepare module arguments""" + + def prep_args(args=dict()): + args = json.dumps({"ANSIBLE_MODULE_ARGS": args}) + basic._ANSIBLE_ARGS = to_bytes(args) + + return prep_args diff --git a/tests/unit/plugins/modules/assemble_cluster_template/filtered/ignore.txt b/tests/unit/plugins/modules/assemble_cluster_template/filtered/ignore.txt new file mode 100644 index 00000000..87bc5d18 --- /dev/null +++ b/tests/unit/plugins/modules/assemble_cluster_template/filtered/ignore.txt @@ -0,0 +1,14 @@ +// Copyright 2024 Cloudera, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + diff --git a/tests/unit/plugins/modules/assemble_cluster_template/filtered/keep.json b/tests/unit/plugins/modules/assemble_cluster_template/filtered/keep.json new file mode 100644 index 00000000..e69de29b diff --git a/tests/unit/plugins/modules/assemble_cluster_template/fragments/01.json b/tests/unit/plugins/modules/assemble_cluster_template/fragments/01.json new file mode 100644 index 00000000..18cac2be --- /dev/null +++ b/tests/unit/plugins/modules/assemble_cluster_template/fragments/01.json @@ -0,0 +1,4 @@ +{ + "name": "Test", + "value": "01" +} \ No newline at end of file diff --git a/tests/unit/plugins/modules/assemble_cluster_template/fragments/02.json b/tests/unit/plugins/modules/assemble_cluster_template/fragments/02.json new file mode 100644 index 00000000..fcab3a69 --- /dev/null +++ b/tests/unit/plugins/modules/assemble_cluster_template/fragments/02.json @@ -0,0 +1,4 @@ +{ + "name": "Test", + "value": "02" +} \ No newline at end of file diff --git a/tests/unit/plugins/modules/assemble_cluster_template/invalid_src.json b/tests/unit/plugins/modules/assemble_cluster_template/invalid_src.json new file mode 100644 index 00000000..f083e059 --- /dev/null +++ b/tests/unit/plugins/modules/assemble_cluster_template/invalid_src.json @@ -0,0 +1,3 @@ +{ + "test": "false" +} \ No newline at end of file diff --git a/tests/unit/plugins/modules/assemble_cluster_template/test_assemble_cluster_template.py b/tests/unit/plugins/modules/assemble_cluster_template/test_assemble_cluster_template.py new file mode 100644 index 00000000..984b4da4 --- /dev/null +++ b/tests/unit/plugins/modules/assemble_cluster_template/test_assemble_cluster_template.py @@ -0,0 +1,67 @@ +# Copyright 2024 Cloudera, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +import os +import logging +import pytest + +from ansible_collections.cloudera.cluster.plugins.modules import assemble_cluster_template +from ansible_collections.cloudera.cluster.tests.unit import AnsibleExitJson, AnsibleFailJson + +LOG = logging.getLogger(__name__) +TEST_DIR = os.path.dirname(os.path.abspath(__file__)) + +def test_missing_required(module_args): + module_args() + + with pytest.raises(AnsibleFailJson, match="dest, src"): + assemble_cluster_template.main() + +def test_missing_dest(module_args): + module_args({ + "src": "foo.json" + }) + + with pytest.raises(AnsibleFailJson, match="dest"): + assemble_cluster_template.main() + +def test_missing_src(module_args): + module_args({ + "dest": "foo.json" + }) + + with pytest.raises(AnsibleFailJson, match="src"): + assemble_cluster_template.main() + +def test_src_not_directory(module_args): + module_args({ + "dest": "foo.json", + "src": os.path.join(TEST_DIR, "invalid_src.json"), + }) + + with pytest.raises(AnsibleFailJson, match="not a directory"): + assemble_cluster_template.main() + +def test_merge_idempotent_key(module_args, tmp_path): + module_args({ + "dest": os.path.join(tmp_path, "output.json"), + "src": os.path.join(TEST_DIR, "fragments"), + }) + + with pytest.raises(AnsibleExitJson): + assemble_cluster_template.main() diff --git a/tests/unit/requirements.txt b/tests/unit/requirements.txt index 1069667a..8ec82784 100644 --- a/tests/unit/requirements.txt +++ b/tests/unit/requirements.txt @@ -1,4 +1,4 @@ -# Copyright 2023 Cloudera, Inc. All Rights Reserved. +# Copyright 2024 Cloudera, Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -cm_client \ No newline at end of file +cm_client From 3212d804cab6d5b43b67ea2da0908f94bfa9f0ad Mon Sep 17 00:00:00 2001 From: Webster Mudge Date: Fri, 26 Jan 2024 09:27:36 -0500 Subject: [PATCH 05/18] Update logic in cm_cluster module and use shared cluster template merge functions Signed-off-by: Webster Mudge --- plugins/modules/cm_cluster.py | 126 ++++++++++++------ .../test_cm_cluster.py | 56 +++++--- 2 files changed, 124 insertions(+), 58 deletions(-) rename tests/unit/plugins/modules/{cm_import_cluster_template => cm_cluster}/test_cm_cluster.py (53%) diff --git a/plugins/modules/cm_cluster.py b/plugins/modules/cm_cluster.py index 3ac8c74f..aa05fbf8 100644 --- a/plugins/modules/cm_cluster.py +++ b/plugins/modules/cm_cluster.py @@ -12,8 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +from ansible.module_utils.common.text.converters import to_text, to_native + from ansible_collections.cloudera.cluster.plugins.module_utils.cm_utils import ( - ClouderaManagerModule, + ClouderaManagerModule, ClusterTemplate ) from cm_client.rest import ApiException from cm_client import ClouderaManagerResourceApi @@ -134,69 +136,111 @@ """ -class Cluster(ClouderaManagerModule): +class ClusterModule(ClouderaManagerModule): def __init__(self, module): - super(Cluster, self).__init__(module) + super(ClusterModule, self).__init__(module) + self.template = self.get_param("template") self.add_repositories = self.get_param("add_repositories") - self.clusterName = self.get_param("clusterName") + self.cluster_name = self.get_param("name") + self.state = self.get_param("state") + + self.changed = False + self.output = dict() + self.process() @ClouderaManagerModule.handle_process def process(self): + api_instance = ClouderaManagerResourceApi(self.api_client) + cluster_api_instance = ClustersResourceApi(self.api_client) + + template_contents = dict() + + if self.template: + try: + with open(self.template, 'r') as file: + template_contents = json.load(file) + except OSError as oe: + self.module.fail_json(msg=f"Error reading file '{to_text(self.template)}'", err=to_native(oe)) + # Need to catch malformed JSON, etc. + + if not self.cluster_name: + if template_contents: + self.cluster_name = template_contents['instantiator']['clusterName'] + else: + self.module.fail_json(msg="No cluster name found in template.") + try: - api_instance = ClouderaManagerResourceApi(self.api_client) - cluster_api_instance = ClustersResourceApi(self.api_client) - - with open(self.template, 'r') as file: - template_json = json.load(file) + self.existing = cluster_api_instance.read_cluster(cluster_name=self.cluster_name).to_dict() + except ApiException: + self.existing = dict() + + if self.state == "present": + if self.existing: + pass + # Reconcile the existing vs. the incoming values into a set of diffs + # then process via the PUT /clusters/{clusterName} endpoint + else: + payload = dict() + + # Construct import template payload from the template and/or explicit parameters + explicit_params = dict() - if not self.module.check_mode: - if self.add_repositories: - import_template_request = api_instance.import_cluster_template(add_repositories=True,body=template_json).to_dict() + # Set up 'instantiator' parameters + explicit_params.update(instantiator=dict( + clusterName=self.cluster_name + )) + + if template_contents: + TEMPLATE = ClusterTemplate(warn_fn=self.module.warn, error_fn=self.module.fail_json) + TEMPLATE.update_object(template_contents, explicit_params) + payload.update(body=template_contents) else: - import_template_request = api_instance.import_cluster_template(body=template_json).to_dict() + payload.update(body=explicit_params) + + # Update to include repositories + if self.add_repositories: + payload.update(add_repositories=True) + + # Execute the import + if not self.module.check_mode: + self.changed = True + import_template_request = api_instance.import_cluster_template(**payload).to_dict() command_id = import_template_request['id'] self.wait_for_command_state(command_id=command_id,polling_interval=60) + + # Retrieve the newly-minted cluster + self.output = cluster_api_instance.read_cluster(cluster_name=self.cluster_name).to_dict() + elif self.state == "absent": + if self.existing: + pass + # Delete the cluster via DELETE /clusters/{clusterName} + else: + self.module.fail_json(msg=f"Invalid state, ${self.state}") - if self.clusterName: - self.cm_cluster_template_output = cluster_api_instance.read_cluster(cluster_name=self.clusterName).to_dict() - self.changed = True - else: - self.cm_cluster_template_output = cluster_api_instance.read_clusters().to_dict() - self.changed = True - - except ApiException as e: - # If cluster already exsists it will reply with 400 Error - if e.status == 400: - self.cm_cluster_template_output = json.loads(e.body) - self.changed = False - - except FileNotFoundError: - self.cm_cluster_template_output = (f"Error: File '{self.template}' not found.") - self.module.fail_json(msg=str(self.cm_cluster_template_output)) def main(): module = ClouderaManagerModule.ansible_module( - argument_spec=dict( - template=dict(required=True, type="path"), - add_repositories=dict(required=False, type="bool", default=False), - clusterName=dict(required=False, type="str"), + template=dict(type="path", aliases=["cluster_template"]), + add_repositories=dict(type="bool", default=False), + name=dict(aliases=["cluster_name"]), + state=dict(default="present", choices=["present", "absent"]) ), - supports_check_mode=False - ) - - result = Cluster(module) - + required_one_of=[ + ["name", "template"] + ], + supports_check_mode=True + ) - changed = result.changed + result = ClusterModule(module) output = dict( - changed=changed, - cloudera_manager=result.cm_cluster_template_output, + changed=result.changed, + cloudera_manager=result.existing, ) if result.debug: diff --git a/tests/unit/plugins/modules/cm_import_cluster_template/test_cm_cluster.py b/tests/unit/plugins/modules/cm_cluster/test_cm_cluster.py similarity index 53% rename from tests/unit/plugins/modules/cm_import_cluster_template/test_cm_cluster.py rename to tests/unit/plugins/modules/cm_cluster/test_cm_cluster.py index 95b99cb0..96c358c0 100644 --- a/tests/unit/plugins/modules/cm_import_cluster_template/test_cm_cluster.py +++ b/tests/unit/plugins/modules/cm_cluster/test_cm_cluster.py @@ -17,6 +17,7 @@ from __future__ import absolute_import, division, print_function __metaclass__ = type + import os import logging import pytest @@ -26,23 +27,44 @@ LOG = logging.getLogger(__name__) -def test_pytest_cm_cluster(module_args): - module_args( - { - "username": os.getenv('CM_USERNAME'), - "password": os.getenv('CM_PASSWORD'), - "host": os.getenv('CM_HOST'), - "port": "7180", - "verify_tls": "no", - "clusterName": "OneNodeCluster", - "debug": "no", - "template": "./files/cluster-template.json", - "add_repositories": "True" - } - ) - with pytest.raises(AnsibleExitJson) as e: +@pytest.fixture() +def conn(): + return { + "username": os.getenv('CM_USERNAME'), + "password": os.getenv('CM_PASSWORD'), + "host": os.getenv('CM_HOST'), + "port": "7180", + "verify_tls": "no", + "debug": "yes", + } + +def test_missing_name_or_template(conn, module_args): + module_args(conn) + + with pytest.raises(AnsibleFailJson, match="name, template") as e: cm_cluster.main() - # LOG.info(str(e.value)) - LOG.info(str(e.value.cloudera_manager)) +def test_missing_cdh_version(conn, module_args): + module_args({ + **conn, + "name": "Test" + }) + + with pytest.raises(AnsibleFailJson, match="Bad Request") as e: + cm_cluster.main() + + assert "CDH version" in e.value.body['message'] + +def test_absent_not_existing(conn, module_args): + module_args({ + **conn, + "name": "Test", + "state": "absent" + }) + + with pytest.raises(AnsibleExitJson) as e: + cm_cluster.main() + + assert e.value.changed == False + \ No newline at end of file From cf59f6ac75e680645223c9c392ee2c46b8e6f5bd Mon Sep 17 00:00:00 2001 From: Webster Mudge Date: Fri, 26 Jan 2024 15:33:41 -0500 Subject: [PATCH 06/18] Add further unit tests Signed-off-by: Webster Mudge --- .../test_assemble_cluster_template.py | 495 +++++++++++++++++- 1 file changed, 473 insertions(+), 22 deletions(-) diff --git a/tests/unit/plugins/modules/assemble_cluster_template/test_assemble_cluster_template.py b/tests/unit/plugins/modules/assemble_cluster_template/test_assemble_cluster_template.py index 984b4da4..54aa0815 100644 --- a/tests/unit/plugins/modules/assemble_cluster_template/test_assemble_cluster_template.py +++ b/tests/unit/plugins/modules/assemble_cluster_template/test_assemble_cluster_template.py @@ -1,11 +1,11 @@ # Copyright 2024 Cloudera, Inc. -# +# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at -# +# # http://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -16,52 +16,503 @@ __metaclass__ = type -import os import logging +import json +import os import pytest -from ansible_collections.cloudera.cluster.plugins.modules import assemble_cluster_template -from ansible_collections.cloudera.cluster.tests.unit import AnsibleExitJson, AnsibleFailJson +from ansible_collections.cloudera.cluster.plugins.modules import ( + assemble_cluster_template, +) +from ansible_collections.cloudera.cluster.plugins.module_utils.cm_utils import ( + ClusterTemplate, +) +from ansible_collections.cloudera.cluster.tests.unit import ( + AnsibleExitJson, + AnsibleFailJson, +) LOG = logging.getLogger(__name__) TEST_DIR = os.path.dirname(os.path.abspath(__file__)) + +def expected_list(expected:list) -> list: + expected.sort(key=lambda x: json.dumps(x, sort_keys=True)) + return expected + + def test_missing_required(module_args): module_args() with pytest.raises(AnsibleFailJson, match="dest, src"): assemble_cluster_template.main() - + + def test_missing_dest(module_args): - module_args({ - "src": "foo.json" - }) + module_args({"src": "foo.json"}) with pytest.raises(AnsibleFailJson, match="dest"): assemble_cluster_template.main() + def test_missing_src(module_args): - module_args({ - "dest": "foo.json" - }) + module_args({"dest": "foo.json"}) with pytest.raises(AnsibleFailJson, match="src"): assemble_cluster_template.main() + def test_src_not_directory(module_args): - module_args({ - "dest": "foo.json", - "src": os.path.join(TEST_DIR, "invalid_src.json"), - }) + module_args( + { + "dest": "foo.json", + "src": os.path.join(TEST_DIR, "invalid_src.json"), + } + ) with pytest.raises(AnsibleFailJson, match="not a directory"): assemble_cluster_template.main() -def test_merge_idempotent_key(module_args, tmp_path): - module_args({ - "dest": os.path.join(tmp_path, "output.json"), - "src": os.path.join(TEST_DIR, "fragments"), - }) + +@pytest.mark.parametrize("key", ClusterTemplate.IDEMPOTENT_IDS) +def test_merge_idempotent_key(module_args, tmp_path, key): + root_dir = tmp_path / "idempotent" + root_dir.mkdir() + + content = dict() + content[key] = "Test" + + fragment_dir = root_dir / "fragments" + fragment_dir.mkdir() + + base = fragment_dir / "base.json" + base.write_text( + json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" + ) + + overlay = fragment_dir / "overlay.json" + overlay.write_text( + json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" + ) + + module_args({"dest": str(root_dir / "results.json"), "src": str(fragment_dir)}) + + with pytest.raises(AnsibleExitJson): + assemble_cluster_template.main() + +@pytest.mark.parametrize("key", ClusterTemplate.IDEMPOTENT_IDS) +def test_merge_idempotent_key_conflict(module_args, tmp_path, key): + root_dir = tmp_path / "idempotent_conflict" + root_dir.mkdir() + + content = dict() + content[key] = "Test" + + fragment_dir = root_dir / "fragments" + fragment_dir.mkdir() + + base = fragment_dir / "base.json" + base.write_text( + json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" + ) + + content[key] = "Error" + overlay = fragment_dir / "overlay.json" + overlay.write_text( + json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" + ) + + module_args({"dest": str(root_dir / "results.json"), "src": str(fragment_dir)}) + + with pytest.raises(AnsibleFailJson, match=f"/{key}"): + assemble_cluster_template.main() + +@pytest.mark.parametrize("key", ClusterTemplate.UNIQUE_IDS) +def test_merge_unique_key(module_args, tmp_path, key): + root_dir = tmp_path / "unique" + root_dir.mkdir() + + content = dict() + content[key] = ["one", "two"] + + fragment_dir = root_dir / "fragments" + fragment_dir.mkdir() + + base = fragment_dir / "base.json" + base.write_text( + json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" + ) + + overlay = fragment_dir / "overlay.json" + overlay.write_text( + json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" + ) + + results = root_dir / "results.json" + + module_args({"dest": str(results), "src": str(fragment_dir)}) with pytest.raises(AnsibleExitJson): assemble_cluster_template.main() + + output = json.load(results.open()) + + assert len(output[key]) == 2 + assert output[key] == expected_list(["one", "two"]) + +@pytest.mark.parametrize("key", ClusterTemplate.UNIQUE_IDS) +def test_merge_unique_key_additional(module_args, tmp_path, key): + root_dir = tmp_path / "unique_additional" + root_dir.mkdir() + + content = dict() + content[key] = ["one", "two"] + + fragment_dir = root_dir / "fragments" + fragment_dir.mkdir() + + base = fragment_dir / "base.json" + base.write_text( + json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" + ) + + content[key] = ["one", "three"] + overlay = fragment_dir / "overlay.json" + overlay.write_text( + json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" + ) + + results = root_dir / "results.json" + + module_args({"dest": str(results), "src": str(fragment_dir)}) + + with pytest.raises(AnsibleExitJson): + assemble_cluster_template.main() + + output = json.load(results.open()) + + assert len(output[key]) == 3 + assert output[key] == expected_list(["one", "two", "three"]) + +def test_merge_list(module_args, tmp_path): + root_dir = tmp_path / "list" + root_dir.mkdir() + + content = dict() + content["test"] = ["one", "two"] + + fragment_dir = root_dir / "fragments" + fragment_dir.mkdir() + + base = fragment_dir / "base.json" + base.write_text( + json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" + ) + + content["test"] = ["one", "three"] + overlay = fragment_dir / "overlay.json" + overlay.write_text( + json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" + ) + + results = root_dir / "results.json" + + module_args({"dest": str(results), "src": str(fragment_dir)}) + + with pytest.raises(AnsibleExitJson): + assemble_cluster_template.main() + + output = json.loads(results.read_text()) + + assert len(output["test"]) == 4 + assert output["test"] == expected_list(["one", "two", "one", "three"]) + +def test_merge_list_nested(module_args, tmp_path): + root_dir = tmp_path / "list_nested" + root_dir.mkdir() + + content = dict() + content["test"] = [["one"], "two"] + + fragment_dir = root_dir / "fragments" + fragment_dir.mkdir() + + base = fragment_dir / "base.json" + base.write_text( + json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" + ) + + content["test"] = [["one"], "three"] + overlay = fragment_dir / "overlay.json" + overlay.write_text( + json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" + ) + + results = root_dir / "results.json" + + module_args({"dest": str(results), "src": str(fragment_dir)}) + + with pytest.raises(AnsibleExitJson): + assemble_cluster_template.main() + + output = json.load(results.open()) + + assert len(output["test"]) == 4 + assert output["test"] == expected_list([["one"], ["one"], "two", "three"]) + +def test_merge_list_idempotent(module_args, tmp_path): + root_dir = tmp_path / "list_idempotent" + root_dir.mkdir() + + content = dict() + content["test"] = [{"name": "Test"}, "two"] + + fragment_dir = root_dir / "fragments" + fragment_dir.mkdir() + + base = fragment_dir / "base.json" + base.write_text( + json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" + ) + + content["test"] = [{"name": "Test"}, "three"] + overlay = fragment_dir / "overlay.json" + overlay.write_text( + json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" + ) + + results = root_dir / "results.json" + + module_args({"dest": str(results), "src": str(fragment_dir)}) + + with pytest.raises(AnsibleExitJson): + assemble_cluster_template.main() + + output = json.load(results.open()) + + assert len(output["test"]) == 3 + assert output["test"] == expected_list([{"name": "Test"}, "two", "three"]) + +def test_merge_list_idempotent_conflict(module_args, tmp_path): + root_dir = tmp_path / "list_idempotent_conflict" + root_dir.mkdir() + + content = dict() + content["test"] = [{"name": "Test"}, "two"] + + fragment_dir = root_dir / "fragments" + fragment_dir.mkdir() + + base = fragment_dir / "base.json" + base.write_text( + json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" + ) + + content["test"] = [{"name": "Conflict"}, "three"] + overlay = fragment_dir / "overlay.json" + overlay.write_text( + json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" + ) + + results = root_dir / "results.json" + + module_args({"dest": str(results), "src": str(fragment_dir)}) + + with pytest.raises(AnsibleFailJson, match="\/test\/\[name=Conflict\]\/name"): + assemble_cluster_template.main() + +def test_merge_dict(module_args, tmp_path): + root_dir = tmp_path / "dict" + root_dir.mkdir() + + content = dict() + content["test"] = {"one": 1} + + fragment_dir = root_dir / "fragments" + fragment_dir.mkdir() + + base = fragment_dir / "base.json" + base.write_text( + json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" + ) + + content["test"] = {"two": 2} + overlay = fragment_dir / "overlay.json" + overlay.write_text( + json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" + ) + + results = root_dir / "results.json" + + module_args({"dest": str(results), "src": str(fragment_dir)}) + + with pytest.raises(AnsibleExitJson): + assemble_cluster_template.main() + + output = json.load(results.open()) + + assert len(output["test"]) == 2 + assert output["test"] == {"one": 1, "two": 2} + +def test_merge_dict_overwrite(module_args, tmp_path): + root_dir = tmp_path / "dict_overwrite" + root_dir.mkdir() + + content = dict() + content["test"] = {"one": 1} + + fragment_dir = root_dir / "fragments" + fragment_dir.mkdir() + + base = fragment_dir / "base.json" + base.write_text( + json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" + ) + + content["test"] = {"one": 2} + overlay = fragment_dir / "overlay.json" + overlay.write_text( + json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" + ) + + results = root_dir / "results.json" + + module_args({"dest": str(results), "src": str(fragment_dir)}) + + with pytest.raises(AnsibleExitJson): + assemble_cluster_template.main() + + output = json.load(results.open()) + + assert len(output["test"]) == 1 + assert output["test"] == {"one": 2} + +def test_merge_dict_nested(module_args, tmp_path): + root_dir = tmp_path / "dict_nested" + root_dir.mkdir() + + content = dict() + content["test"] = {"one": {"two": 1}} + + fragment_dir = root_dir / "fragments" + fragment_dir.mkdir() + + base = fragment_dir / "base.json" + base.write_text( + json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" + ) + + content["test"] = {"one": {"three": 3}} + overlay = fragment_dir / "overlay.json" + overlay.write_text( + json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" + ) + + results = root_dir / "results.json" + + module_args({"dest": str(results), "src": str(fragment_dir)}) + + with pytest.raises(AnsibleExitJson): + assemble_cluster_template.main() + + output = json.load(results.open()) + + assert len(output["test"]) == 1 + assert len(output["test"]["one"]) == 2 + assert output["test"] == {"one": {"two": 1, "three": 3}} + +def test_merge_dict_nested_overwrite(module_args, tmp_path): + root_dir = tmp_path / "dict_nested_overwrite" + root_dir.mkdir() + + content = dict() + content["test"] = {"one": {"two": 1}} + + fragment_dir = root_dir / "fragments" + fragment_dir.mkdir() + + base = fragment_dir / "base.json" + base.write_text( + json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" + ) + + content["test"] = {"one": {"two": 2, "three": 3}} + overlay = fragment_dir / "overlay.json" + overlay.write_text( + json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" + ) + + results = root_dir / "results.json" + + module_args({"dest": str(results), "src": str(fragment_dir)}) + + with pytest.raises(AnsibleExitJson): + assemble_cluster_template.main() + + output = json.load(results.open()) + + assert len(output["test"]) == 1 + assert len(output["test"]["one"]) == 2 + assert output["test"] == {"one": {"two": 2, "three": 3}} + +def test_merge_dict_nested_idempotent(module_args, tmp_path): + root_dir = tmp_path / "dict_nested_idempotent" + root_dir.mkdir() + + content = dict() + content["test"] = {"one": {"name": "Test"}} + + fragment_dir = root_dir / "fragments" + fragment_dir.mkdir() + + base = fragment_dir / "base.json" + base.write_text( + json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" + ) + + content["test"] = {"one": {"name": "Test"}} + overlay = fragment_dir / "overlay.json" + overlay.write_text( + json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" + ) + + results = root_dir / "results.json" + + module_args({"dest": str(results), "src": str(fragment_dir)}) + + with pytest.raises(AnsibleExitJson): + assemble_cluster_template.main() + + output = json.load(results.open()) + + assert len(output["test"]) == 1 + assert len(output["test"]["one"]) == 1 + assert output["test"] == {"one": {"name": "Test"}} + +def test_merge_dict_nested_idempotent_conflict(module_args, tmp_path): + root_dir = tmp_path / "dict_nested_idempotent_conflict" + root_dir.mkdir() + + content = dict() + content["test"] = {"one": {"name": "Test"}} + + fragment_dir = root_dir / "fragments" + fragment_dir.mkdir() + + base = fragment_dir / "base.json" + base.write_text( + json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" + ) + + content["test"] = {"one": {"name": "Conflict"}} + overlay = fragment_dir / "overlay.json" + overlay.write_text( + json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" + ) + + results = root_dir / "results.json" + + module_args({"dest": str(results), "src": str(fragment_dir)}) + + with pytest.raises(AnsibleFailJson, match="\/test\/one\/name"): + assemble_cluster_template.main() From 07152f2d5d3d770062d0742876524738dde53e1f Mon Sep 17 00:00:00 2001 From: Webster Mudge Date: Fri, 26 Jan 2024 15:35:01 -0500 Subject: [PATCH 07/18] Update list merge logic and sort unique list values Signed-off-by: Webster Mudge --- plugins/module_utils/cm_utils.py | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/plugins/module_utils/cm_utils.py b/plugins/module_utils/cm_utils.py index 07a96130..d677434f 100644 --- a/plugins/module_utils/cm_utils.py +++ b/plugins/module_utils/cm_utils.py @@ -39,7 +39,6 @@ class ClusterTemplate(object): - IDEMPOTENT_IDS = ["refName", "name", "clusterName", "hostName", "product"] UNIQUE_IDS = ["repositories"] @@ -76,6 +75,7 @@ def update_dict(self, base, template, breadcrumbs="") -> None: if key in self.UNIQUE_IDS: base[key] = list(set(base[key])) + base[key].sort(key=lambda x: json.dumps(x, sort_keys=True)) def update_list(self, base, template, breadcrumbs="") -> None: for item in template: @@ -84,15 +84,18 @@ def update_list(self, base, template, breadcrumbs="") -> None: if attr in item: idempotent_id = attr break - else: - idempotent_id = None + else: + idempotent_id = None + if idempotent_id: namesake = [ - i for i in base if i[idempotent_id] == item[idempotent_id] + i + for i in base + if idempotent_id in i ] - if namesake: + for n in namesake: self.update_dict( - namesake[0], + n, item, breadcrumbs + "/[" @@ -101,7 +104,7 @@ def update_list(self, base, template, breadcrumbs="") -> None: + item[idempotent_id] + "]", ) - continue + continue base.append(item) base.sort(key=lambda x: json.dumps(x, sort_keys=True)) @@ -341,7 +344,7 @@ def ansible_module( mutually_exclusive=[], required_one_of=[], required_together=[], - **kwargs + **kwargs, ): """ Creates the base Ansible module argument spec and dependencies, From 555ebda7522c0c812a2b2ebbd890620ec1fc271c Mon Sep 17 00:00:00 2001 From: Webster Mudge Date: Fri, 26 Jan 2024 15:35:46 -0500 Subject: [PATCH 08/18] Update merged object to instance variable Signed-off-by: Webster Mudge --- plugins/modules/assemble_cluster_template.py | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/plugins/modules/assemble_cluster_template.py b/plugins/modules/assemble_cluster_template.py index e4bd73e5..7f0ce5c6 100644 --- a/plugins/modules/assemble_cluster_template.py +++ b/plugins/modules/assemble_cluster_template.py @@ -140,12 +140,12 @@ from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.common.text.converters import to_native, to_text -from ansible_collections.cloudera.cluster.plugins.module_utils.cm_utils import ClusterTemplate +from ansible_collections.cloudera.cluster.plugins.module_utils.cm_utils import ( + ClusterTemplate, +) class AssembleClusterTemplate(object): - MERGED = {} - def __init__(self, module): self.module = module @@ -167,9 +167,10 @@ def __init__(self, module): # Initialize internal values self.compiled = None self.template = ClusterTemplate( - warn_fn=self.module.warn, - error_fn=self.module.fail_json + warn_fn=self.module.warn, + error_fn=lambda msg: self.module.fail_json(msg=msg), ) + self.merged = dict() # Execute the logic self.process() @@ -190,14 +191,17 @@ def assemble_fragments(self, assembled_file): with open(fragment, "r", encoding="utf-8") as fragment_file: try: - self.template.update_object(self.MERGED, json.loads(fragment_file.read())) + self.template.update_object( + self.merged, json.loads(fragment_file.read()) + ) except json.JSONDecodeError as e: self.module.fail_json( - msg=f"JSON parsing error for file, {fragment}: {to_text(e.msg)}", error=to_native(e) + msg=f"JSON parsing error for file, {fragment}: {to_text(e.msg)}", + error=to_native(e), ) # Write out the final assembly - json.dump(self.MERGED, assembled_file, indent=2, sort_keys=False) + json.dump(self.merged, assembled_file, indent=2, sort_keys=False) # Close the assembled file handle; will not delete for atomic_move assembled_file.close() From 35bc6d744a4f8e2c00792048324d3c01c4be7512 Mon Sep 17 00:00:00 2001 From: Webster Mudge Date: Fri, 26 Jan 2024 15:43:29 -0500 Subject: [PATCH 09/18] Add unit tests for multiple idempotent keys and elements Signed-off-by: Webster Mudge --- .../test_assemble_cluster_template.py | 66 +++++++++++++++++++ 1 file changed, 66 insertions(+) diff --git a/tests/unit/plugins/modules/assemble_cluster_template/test_assemble_cluster_template.py b/tests/unit/plugins/modules/assemble_cluster_template/test_assemble_cluster_template.py index 54aa0815..1fbb9f10 100644 --- a/tests/unit/plugins/modules/assemble_cluster_template/test_assemble_cluster_template.py +++ b/tests/unit/plugins/modules/assemble_cluster_template/test_assemble_cluster_template.py @@ -292,6 +292,72 @@ def test_merge_list_idempotent(module_args, tmp_path): assert len(output["test"]) == 3 assert output["test"] == expected_list([{"name": "Test"}, "two", "three"]) + +def test_merge_list_idempotent_multiple_elements(module_args, tmp_path): + root_dir = tmp_path / "list_idempotent_multiple_elements" + root_dir.mkdir() + + content = dict() + content["test"] = [{"name": "Test"}, {"product": "Product"}, "two"] + + fragment_dir = root_dir / "fragments" + fragment_dir.mkdir() + + base = fragment_dir / "base.json" + base.write_text( + json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" + ) + + content["test"] = [{"name": "Test"}, {"product": "Product"}, "three"] + overlay = fragment_dir / "overlay.json" + overlay.write_text( + json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" + ) + + results = root_dir / "results.json" + + module_args({"dest": str(results), "src": str(fragment_dir)}) + + with pytest.raises(AnsibleExitJson): + assemble_cluster_template.main() + + output = json.load(results.open()) + + assert len(output["test"]) == 4 + assert output["test"] == expected_list([{"name": "Test"}, {"product": "Product"}, "two", "three"]) + +def test_merge_list_idempotent_multiple_keys(module_args, tmp_path): + root_dir = tmp_path / "list_idempotent_multiple_keys" + root_dir.mkdir() + + content = dict() + content["test"] = [{"name": "Test", "product": "Product"}, "two"] + + fragment_dir = root_dir / "fragments" + fragment_dir.mkdir() + + base = fragment_dir / "base.json" + base.write_text( + json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" + ) + + content["test"] = [{"name": "Test", "product": "Product"}, "three"] + overlay = fragment_dir / "overlay.json" + overlay.write_text( + json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" + ) + + results = root_dir / "results.json" + + module_args({"dest": str(results), "src": str(fragment_dir)}) + + with pytest.raises(AnsibleExitJson): + assemble_cluster_template.main() + + output = json.load(results.open()) + + assert len(output["test"]) == 3 + assert output["test"] == expected_list([{"name": "Test", "product": "Product"}, "two", "three"]) def test_merge_list_idempotent_conflict(module_args, tmp_path): root_dir = tmp_path / "list_idempotent_conflict" From 56f1aab60093aed0ac076a8cf3012d5977b4c68e Mon Sep 17 00:00:00 2001 From: Webster Mudge Date: Thu, 1 Feb 2024 21:23:07 -0500 Subject: [PATCH 10/18] Refactor merged object to instance from class. Short-circuit merger for initial fragment. Signed-off-by: Webster Mudge --- plugins/action/assemble_cluster_template.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/plugins/action/assemble_cluster_template.py b/plugins/action/assemble_cluster_template.py index e1b0e22e..d6d51e48 100644 --- a/plugins/action/assemble_cluster_template.py +++ b/plugins/action/assemble_cluster_template.py @@ -33,12 +33,11 @@ class ActionModule(ActionBase): TRANSFERS_FILES = True - - MERGED = {} def __init__(self, task, connection, play_context, loader, templar, shared_loader_obj): super().__init__(task, connection, play_context, loader, templar, shared_loader_obj) self.TEMPLATE = ClusterTemplate(warn_fn=self._display.warning, error_fn=self._display.error) + self.MERGED = {} def assemble_fragments( self, assembled_file, src_path, regex=None, ignore_hidden=True, decrypt=True @@ -65,7 +64,10 @@ def assemble_fragments( encoding="utf-8", ) as fragment_file: try: - self.TEMPLATE.update_object(self.MERGED, json.loads(fragment_file.read())) + if not self.MERGED: + self.MERGED = json.loads(fragment_file.read()) + else: + self.TEMPLATE.update_object(self.MERGED, json.loads(fragment_file.read())) except json.JSONDecodeError as e: raise AnsibleActionFail( message=f"JSON parsing error: {to_text(e.msg)}", @@ -99,6 +101,8 @@ def run(self, tmp=None, task_vars=None): regexp = self._task.args.get("regexp", None) if regexp is None: regexp = self._task.args.get("filter", None) + if regexp is None: + regexp = self._task.args.get("regex", None) remote_src = boolean(self._task.args.get("remote_src", False)) follow = boolean(self._task.args.get("follow", False)) From 1ced19c56a3b00440fa4c9990893341559ffee24 Mon Sep 17 00:00:00 2001 From: Webster Mudge Date: Thu, 1 Feb 2024 21:23:43 -0500 Subject: [PATCH 11/18] Short-circuit merger for initial fragment. Add 'regex' alias. Update API docs. Signed-off-by: Webster Mudge --- plugins/modules/assemble_cluster_template.py | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/plugins/modules/assemble_cluster_template.py b/plugins/modules/assemble_cluster_template.py index 7f0ce5c6..d10490fa 100644 --- a/plugins/modules/assemble_cluster_template.py +++ b/plugins/modules/assemble_cluster_template.py @@ -40,15 +40,15 @@ options: src: description: - - An already existing directory of cluster template files. - - TODO Local or remote + - A directory of cluster template files, i.e. fragments. + - Each file must be a valid JSON file. type: path required: True aliases: - cluster_template_src dest: description: - - A file to create using the merger of all of the cluster template files. + - A file created from the merger of the cluster template files. type: path required: True aliases: @@ -61,7 +61,7 @@ default: False remote_src: description: - - Flag to control the location of the cluster template configuration source files. + - Flag for the location of the cluster template fragment files. - If V(false), search for I(src) on the controller. - If V(true), search for I(src) on the remote/target. type: bool @@ -75,6 +75,7 @@ type: str aliases: - filter + - regex ignore_hidden: description: - Flag whether to include files that begin with a '.'. @@ -191,9 +192,12 @@ def assemble_fragments(self, assembled_file): with open(fragment, "r", encoding="utf-8") as fragment_file: try: - self.template.update_object( - self.merged, json.loads(fragment_file.read()) - ) + if not self.merged: + self.merged = json.loads(fragment_file.read()) + else: + self.template.update_object( + self.merged, json.loads(fragment_file.read()) + ) except json.JSONDecodeError as e: self.module.fail_json( msg=f"JSON parsing error for file, {fragment}: {to_text(e.msg)}", @@ -271,7 +275,7 @@ def main(): dest=dict(required=True, type="path", aliases=["cluster_template"]), backup=dict(type="bool", default=False), remote_src=dict(type="bool", default=False), - regexp=dict(type="str", aliases=["filter"]), + regexp=dict(type="str", aliases=["filter", "regex"]), ignore_hidden=dict(type="bool", default=True), ), add_file_common_args=True, From d2761b61c29cb5194374ce51b9a395669481116e Mon Sep 17 00:00:00 2001 From: Webster Mudge Date: Thu, 1 Feb 2024 21:24:12 -0500 Subject: [PATCH 12/18] Add unit tests for assemble_cluster_template action Signed-off-by: Webster Mudge --- .../test_assemble_cluster_template_action.py | 319 ++++++++++++++++++ 1 file changed, 319 insertions(+) create mode 100644 tests/unit/plugins/actions/assemble_cluster_template/test_assemble_cluster_template_action.py diff --git a/tests/unit/plugins/actions/assemble_cluster_template/test_assemble_cluster_template_action.py b/tests/unit/plugins/actions/assemble_cluster_template/test_assemble_cluster_template_action.py new file mode 100644 index 00000000..57447a76 --- /dev/null +++ b/tests/unit/plugins/actions/assemble_cluster_template/test_assemble_cluster_template_action.py @@ -0,0 +1,319 @@ +# Copyright 2024 Cloudera, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +import logging +import json +import os +import pytest +import re + +from dataclasses import dataclass + +from unittest.mock import MagicMock + +from ansible.errors import AnsibleError, AnsibleActionFail +from ansible.parsing.dataloader import DataLoader +from ansible.playbook.task import Task +from ansible.template import Templar + +from ansible_collections.cloudera.cluster.plugins.action.assemble_cluster_template import ( + ActionModule as AssembleClusterTemplateAction, +) + +LOG = logging.getLogger(__name__) +TEST_DIR = os.path.dirname(os.path.abspath(__file__)) + + +@dataclass(init=True) +class MockContext: + task: MagicMock(Task) = MagicMock(Task) + connection: MagicMock = MagicMock() + play_context: MagicMock = MagicMock() + loader: MagicMock(DataLoader) = MagicMock() + templar: Templar = Templar(loader=MagicMock(DataLoader)) + shared_loader_obj: MagicMock(DataLoader) = None + + +@pytest.fixture() +def mock_module_exec(): + def setup(plugin): + plugin._get_module_args = MagicMock() + plugin._execute_module = MagicMock() + + return setup + + +def test_empty_parameters(mock_module_exec): + context = MockContext() + context.task.async_val = False + context.play_context.check_mode = False + + plugin = AssembleClusterTemplateAction(**vars(context)) + + mock_module_exec(plugin) + + context.task.args = dict() + + results = plugin.run() + + assert results["failed"] == True + assert results["msg"] == "Both 'src' and 'dest' are required" + + +def test_missing_src(mock_module_exec, tmp_path): + context = MockContext() + context.task.async_val = False + context.play_context.check_mode = False + + plugin = AssembleClusterTemplateAction(**vars(context)) + + mock_module_exec(plugin) + + src_file = tmp_path / "src.json" + src_file.touch() + + context.task.args = dict(src=str(src_file)) + + results = plugin.run() + + assert results["failed"] == True + assert results["msg"] == "Both 'src' and 'dest' are required" + + +def test_missing_dest(mock_module_exec, tmp_path): + context = MockContext() + context.task.async_val = False + context.play_context.check_mode = False + + plugin = AssembleClusterTemplateAction(**vars(context)) + + mock_module_exec(plugin) + + dest_file = tmp_path / "dest.json" + dest_file.touch() + + context.task.args = dict(dest=str(dest_file)) + + results = plugin.run() + + assert results["failed"] == True + assert results["msg"] == "Both 'src' and 'dest' are required" + + +def test_remote_src(mock_module_exec, tmp_path): + context = MockContext() + context.task.async_val = False + context.play_context.check_mode = False + + plugin = AssembleClusterTemplateAction(**vars(context)) + + mock_module_exec(plugin) + + src_file = tmp_path / "src.json" + src_file.touch() + + dest_file = tmp_path / "dest.json" + dest_file.touch() + + context.task.args = dict(remote_src=True, src=str(src_file), dest=str(dest_file)) + plugin._execute_module.return_value = dict(msg="Module called") + + results = plugin.run() + + assert results["msg"] == "Module called" + + +def test_src_not_found(mock_module_exec, tmp_path, monkeypatch): + context = MockContext() + context.task.async_val = False + context.play_context.check_mode = False + + plugin = AssembleClusterTemplateAction(**vars(context)) + + mock_module_exec(plugin) + + src_file = tmp_path / "src.json" + src_file.touch() + + dest_file = tmp_path / "dest.json" + dest_file.touch() + + context.task.args = dict(src=str(src_file), dest=str(dest_file)) + + not_found = MagicMock() + not_found.side_effect = AnsibleError("NOT FOUND") + + monkeypatch.setattr(plugin, "_find_needle", not_found) + + results = plugin.run() + + assert results["failed"] == True + assert results["msg"] == "NOT FOUND" + + +def test_src_not_directory(mock_module_exec, tmp_path, monkeypatch): + context = MockContext() + context.task.async_val = False + context.play_context.check_mode = False + + plugin = AssembleClusterTemplateAction(**vars(context)) + + mock_module_exec(plugin) + + src_file = tmp_path / "src.json" + src_file.touch() + + dest_file = tmp_path / "dest.json" + dest_file.touch() + + context.task.args = dict(src=str(src_file), dest=str(dest_file)) + monkeypatch.setattr(plugin, "_find_needle", MagicMock(return_value=src_file)) + + results = plugin.run() + + assert results["failed"] == True + assert results["msg"] == f"Source, {str(src_file)}, is not a directory" + + +def test_invalid_regexp(mock_module_exec, tmp_path, monkeypatch): + context = MockContext() + context.task.async_val = False + context.play_context.check_mode = False + + plugin = AssembleClusterTemplateAction(**vars(context)) + + mock_module_exec(plugin) + + src_dir = tmp_path / "fragments" + src_dir.mkdir() + + dest_file = tmp_path / "dest.json" + dest_file.touch() + + regexp = "[" + context.task.args = dict(src=str(src_dir), dest=str(dest_file), regexp=regexp) + monkeypatch.setattr(plugin, "_find_needle", MagicMock(return_value=src_dir)) + + results = plugin.run() + + assert results["failed"] == True + assert ( + results["msg"] + == f"Regular expression, {regexp}, is invalid: unterminated character set at position 0" + ) + + +def test_assemble_fragments(tmp_path): + context = MockContext() + context.task.async_val = False + context.play_context.check_mode = False + + plugin = AssembleClusterTemplateAction(**vars(context)) + + src_dir = tmp_path / "fragments" + src_dir.mkdir() + + base = src_dir / "base.json" + base.write_text(json.dumps(dict(one="BASE", two="BASE"))) + + overlay = src_dir / "overlay.json" + overlay.write_text(json.dumps(dict(one="OVERLAY"))) + + dest_file = tmp_path / "dest.json" + + def find_in_tmp(fragment, decrypt): + return os.path.join(src_dir, fragment) + + context.loader.get_real_file = find_in_tmp + + plugin.assemble_fragments(dest_file.open(mode="w", encoding="utf-8"), src_dir) + + results = json.load(dest_file.open(mode="r", encoding="utf-8")) + + assert len(results) == 2 + assert results["one"] == "OVERLAY" + assert results["two"] == "BASE" + + +def test_assemble_fragments_regexp(tmp_path): + context = MockContext() + context.task.async_val = False + context.play_context.check_mode = False + + plugin = AssembleClusterTemplateAction(**vars(context)) + + src_dir = tmp_path / "fragments" + src_dir.mkdir() + + base = src_dir / "base.json" + base.write_text(json.dumps(dict(one="BASE", two="BASE"))) + + overlay = src_dir / "overlay.json" + overlay.write_text(json.dumps(dict(one="OVERLAY"))) + + ignored = src_dir / "ignored.json" + ignored.write_text(json.dumps(dict(one="IGNORED"))) + + dest_file = tmp_path / "dest.json" + + def find_in_tmp(fragment, decrypt): + return os.path.join(src_dir, fragment) + + context.loader.get_real_file = find_in_tmp + + regexp = re.compile("^((?!ig).)*$") + + plugin.assemble_fragments( + dest_file.open(mode="w", encoding="utf-8"), src_dir, regex=regexp + ) + + results = json.load(dest_file.open(mode="r", encoding="utf-8")) + + assert len(results) == 2 + assert results["one"] == "OVERLAY" + assert results["two"] == "BASE" + + +def test_assemble_fragments_malformed(tmp_path): + context = MockContext() + context.task.async_val = False + context.play_context.check_mode = False + + plugin = AssembleClusterTemplateAction(**vars(context)) + + src_dir = tmp_path / "fragments" + src_dir.mkdir() + + base = src_dir / "base.json" + base.write_text(json.dumps(dict(one="BASE", two="BASE"))) + + overlay = src_dir / "overlay.json" + overlay.write_text(json.dumps(dict(one="OVERLAY"))) + + ignored = src_dir / "malformed.txt" + ignored.write_text("BOOM") + + dest_file = tmp_path / "dest.json" + + def find_in_tmp(fragment, decrypt): + return os.path.join(src_dir, fragment) + + context.loader.get_real_file = find_in_tmp + + with pytest.raises(AnsibleActionFail, match="JSON parsing error"): + plugin.assemble_fragments(dest_file.open(mode="w", encoding="utf-8"), src_dir) From 88edd463ad9065d285461065ed0b1c479cfbba2c Mon Sep 17 00:00:00 2001 From: Webster Mudge Date: Thu, 1 Feb 2024 21:25:05 -0500 Subject: [PATCH 13/18] Refactor test files to be temporary Signed-off-by: Webster Mudge --- .../assemble_cluster_template/filtered/ignore.txt | 14 -------------- .../assemble_cluster_template/filtered/keep.json | 0 .../assemble_cluster_template/fragments/01.json | 4 ---- .../assemble_cluster_template/fragments/02.json | 4 ---- .../assemble_cluster_template/invalid_src.json | 3 --- 5 files changed, 25 deletions(-) delete mode 100644 tests/unit/plugins/modules/assemble_cluster_template/filtered/ignore.txt delete mode 100644 tests/unit/plugins/modules/assemble_cluster_template/filtered/keep.json delete mode 100644 tests/unit/plugins/modules/assemble_cluster_template/fragments/01.json delete mode 100644 tests/unit/plugins/modules/assemble_cluster_template/fragments/02.json delete mode 100644 tests/unit/plugins/modules/assemble_cluster_template/invalid_src.json diff --git a/tests/unit/plugins/modules/assemble_cluster_template/filtered/ignore.txt b/tests/unit/plugins/modules/assemble_cluster_template/filtered/ignore.txt deleted file mode 100644 index 87bc5d18..00000000 --- a/tests/unit/plugins/modules/assemble_cluster_template/filtered/ignore.txt +++ /dev/null @@ -1,14 +0,0 @@ -// Copyright 2024 Cloudera, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - diff --git a/tests/unit/plugins/modules/assemble_cluster_template/filtered/keep.json b/tests/unit/plugins/modules/assemble_cluster_template/filtered/keep.json deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/unit/plugins/modules/assemble_cluster_template/fragments/01.json b/tests/unit/plugins/modules/assemble_cluster_template/fragments/01.json deleted file mode 100644 index 18cac2be..00000000 --- a/tests/unit/plugins/modules/assemble_cluster_template/fragments/01.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "name": "Test", - "value": "01" -} \ No newline at end of file diff --git a/tests/unit/plugins/modules/assemble_cluster_template/fragments/02.json b/tests/unit/plugins/modules/assemble_cluster_template/fragments/02.json deleted file mode 100644 index fcab3a69..00000000 --- a/tests/unit/plugins/modules/assemble_cluster_template/fragments/02.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "name": "Test", - "value": "02" -} \ No newline at end of file diff --git a/tests/unit/plugins/modules/assemble_cluster_template/invalid_src.json b/tests/unit/plugins/modules/assemble_cluster_template/invalid_src.json deleted file mode 100644 index f083e059..00000000 --- a/tests/unit/plugins/modules/assemble_cluster_template/invalid_src.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "test": "false" -} \ No newline at end of file From 885eb886633193248e52cf283fce0c63778f7ea5 Mon Sep 17 00:00:00 2001 From: Webster Mudge Date: Thu, 1 Feb 2024 21:25:49 -0500 Subject: [PATCH 14/18] Rename bare test to module test Signed-off-by: Webster Mudge --- ... test_assemble_cluster_template_module.py} | 68 ++++++++++++++++++- 1 file changed, 65 insertions(+), 3 deletions(-) rename tests/unit/plugins/modules/assemble_cluster_template/{test_assemble_cluster_template.py => test_assemble_cluster_template_module.py} (90%) diff --git a/tests/unit/plugins/modules/assemble_cluster_template/test_assemble_cluster_template.py b/tests/unit/plugins/modules/assemble_cluster_template/test_assemble_cluster_template_module.py similarity index 90% rename from tests/unit/plugins/modules/assemble_cluster_template/test_assemble_cluster_template.py rename to tests/unit/plugins/modules/assemble_cluster_template/test_assemble_cluster_template_module.py index 1fbb9f10..09b46009 100644 --- a/tests/unit/plugins/modules/assemble_cluster_template/test_assemble_cluster_template.py +++ b/tests/unit/plugins/modules/assemble_cluster_template/test_assemble_cluster_template_module.py @@ -62,17 +62,79 @@ def test_missing_src(module_args): assemble_cluster_template.main() -def test_src_not_directory(module_args): +def test_src_not_directory(module_args, tmp_path): + root_dir = tmp_path / "not_directory" + root_dir.mkdir() + + invalid_src = root_dir / "invalid_src.json" + invalid_src.touch() + module_args( { "dest": "foo.json", - "src": os.path.join(TEST_DIR, "invalid_src.json"), + "src": str(invalid_src), } ) with pytest.raises(AnsibleFailJson, match="not a directory"): assemble_cluster_template.main() +def test_src_invalid_file(module_args, tmp_path): + root_dir = tmp_path / "not_valid" + root_dir.mkdir() + + invalid_file = root_dir / "invalid_file.txt" + invalid_file.touch() + + module_args( + { + "dest": "foo.json", + "src": str(root_dir), + } + ) + + with pytest.raises(AnsibleFailJson, match="JSON parsing error"): + assemble_cluster_template.main() + +def test_src_filtered(module_args, tmp_path): + root_dir = tmp_path / "filtered" + root_dir.mkdir() + + content = dict() + content["test"] = "Test" + + base = root_dir / "base.json" + base.write_text( + json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" + ) + + overlay = dict() + overlay["error"] = True + + filtered = root_dir / "filtered.json" + filtered.write_text( + json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" + ) + + results = root_dir / "results.json" + + module_args( + { + "dest": str(results), + "src": str(root_dir), + "regexp": "^filtered" + } + ) + + with pytest.raises(AnsibleExitJson): + assemble_cluster_template.main() + + output = json.load(results.open()) + + assert "error" not in output + assert len(output) == 1 + assert "test" in output + assert output["test"] == "Test" @pytest.mark.parametrize("key", ClusterTemplate.IDEMPOTENT_IDS) def test_merge_idempotent_key(module_args, tmp_path, key): @@ -384,7 +446,7 @@ def test_merge_list_idempotent_conflict(module_args, tmp_path): module_args({"dest": str(results), "src": str(fragment_dir)}) - with pytest.raises(AnsibleFailJson, match="\/test\/\[name=Conflict\]\/name"): + with pytest.raises(AnsibleFailJson, match="/test/\[name=Conflict\]/name"): assemble_cluster_template.main() def test_merge_dict(module_args, tmp_path): From 945b1d027f2c647e82e83e0da37b3e8ac94269d7 Mon Sep 17 00:00:00 2001 From: Webster Mudge Date: Thu, 1 Feb 2024 21:26:14 -0500 Subject: [PATCH 15/18] Add "fix" for VSCode pytest discovery Signed-off-by: Webster Mudge --- tests/unit/conftest.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index b3394f40..5f964969 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -25,12 +25,17 @@ from ansible.module_utils import basic from ansible.module_utils.common.text.converters import to_bytes +# Required for pytest discovery in VSCode, reasons unknown... +try: + from ansible.plugins.action import ActionBase +except ModuleNotFoundError: + pass + from ansible_collections.cloudera.cluster.tests.unit import ( AnsibleFailJson, AnsibleExitJson, ) - @pytest.fixture(autouse=True) def skip_python(): if sys.version_info < (3, 6): From 409019dc669021a783ddeb05c7a49ccce8191c97 Mon Sep 17 00:00:00 2001 From: Webster Mudge Date: Thu, 1 Feb 2024 21:26:31 -0500 Subject: [PATCH 16/18] Add pythonpath parameter to pytest.ini Signed-off-by: Webster Mudge --- pytest.ini | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/pytest.ini b/pytest.ini index f6125c59..f71a11df 100644 --- a/pytest.ini +++ b/pytest.ini @@ -14,4 +14,6 @@ [pytest] filterwarnings = - ignore::DeprecationWarning \ No newline at end of file + ignore::DeprecationWarning + +pythonpath = "../../../" \ No newline at end of file From bdba1dc90a5821bae417e76e588b2c499f1f6e5c Mon Sep 17 00:00:00 2001 From: Webster Mudge Date: Wed, 7 Feb 2024 16:31:33 -0500 Subject: [PATCH 17/18] Update merge logic to handle idempotent fragments within lists Signed-off-by: Webster Mudge --- plugins/action/assemble_cluster_template.py | 2 +- plugins/module_utils/cm_utils.py | 125 +++-- plugins/modules/assemble_cluster_template.py | 2 +- plugins/modules/cm_cluster.py | 2 +- .../fragments/base.json | 31 ++ .../fragments/service-1.json | 51 ++ .../fragments/service-2.json | 43 ++ .../fragments/service-3.json | 32 ++ .../test_assemble_cluster_template_module.py | 492 +++++++++++------- 9 files changed, 539 insertions(+), 241 deletions(-) create mode 100644 tests/unit/plugins/modules/assemble_cluster_template/fragments/base.json create mode 100644 tests/unit/plugins/modules/assemble_cluster_template/fragments/service-1.json create mode 100644 tests/unit/plugins/modules/assemble_cluster_template/fragments/service-2.json create mode 100644 tests/unit/plugins/modules/assemble_cluster_template/fragments/service-3.json diff --git a/plugins/action/assemble_cluster_template.py b/plugins/action/assemble_cluster_template.py index d6d51e48..33c81b80 100644 --- a/plugins/action/assemble_cluster_template.py +++ b/plugins/action/assemble_cluster_template.py @@ -67,7 +67,7 @@ def assemble_fragments( if not self.MERGED: self.MERGED = json.loads(fragment_file.read()) else: - self.TEMPLATE.update_object(self.MERGED, json.loads(fragment_file.read())) + self.TEMPLATE.merge(self.MERGED, json.loads(fragment_file.read())) except json.JSONDecodeError as e: raise AnsibleActionFail( message=f"JSON parsing error: {to_text(e.msg)}", diff --git a/plugins/module_utils/cm_utils.py b/plugins/module_utils/cm_utils.py index d677434f..35bb9370 100644 --- a/plugins/module_utils/cm_utils.py +++ b/plugins/module_utils/cm_utils.py @@ -21,6 +21,7 @@ import logging from functools import wraps +from typing import Union from urllib3 import disable_warnings from urllib3.exceptions import InsecureRequestWarning, MaxRetryError, HTTPError from urllib3.util import Url @@ -39,73 +40,97 @@ class ClusterTemplate(object): - IDEMPOTENT_IDS = ["refName", "name", "clusterName", "hostName", "product"] - UNIQUE_IDS = ["repositories"] + IDEMPOTENT_IDS = frozenset( + ["refName", "name", "clusterName", "hostName", "product"] + ) + UNIQUE_IDS = frozenset(["repositories"]) def __init__(self, warn_fn, error_fn) -> None: self._warn = warn_fn self._error = error_fn - def update_object(self, base, template, breadcrumbs="") -> bool: - if isinstance(base, dict) and isinstance(template, dict): - self.update_dict(base, template, breadcrumbs) - return True - elif isinstance(base, list) and isinstance(template, list): - self.update_list(base, template, breadcrumbs) - return True - return False - - def update_dict(self, base, template, breadcrumbs="") -> None: - for key, value in template.items(): + def merge(self, base: Union[dict, list], fragment: Union[dict, list]) -> bool: + if isinstance(base, dict) and isinstance(fragment, dict): + self._update_dict(base, fragment) + elif isinstance(base, list) and isinstance(fragment, list): + self._update_list(base, fragment) + else: + raise TypeError( + f"Base and fragment arguments must be the same type: base[{type(base)}], fragment[{type(fragment)}]" + ) + + def _update_dict(self, base, fragment, breadcrumbs="") -> None: + for key, value in fragment.items(): crumb = breadcrumbs + "/" + key + # If the key is idempotent, error that the values are different if key in self.IDEMPOTENT_IDS: - if key in base and base[key] != value: - self._error( - "Objects with distinct IDs should not be merged: " + crumb - ) + if base[key] != value: + self._error(f"Unable to override value for distinct key [{crumb}]") + continue + # If it's a new key, add to the bae if key not in base: base[key] = value - elif not self.update_object(base[key], value, crumb) and base[key] != value: - self._warn( - f"Value being overwritten for key [{crumb}]], Old: [{base[key]}], New: [{value}]" - ) - base[key] = value + # If the value is a dictionary, merge + elif isinstance(value, dict): + self._update_dict(base[key], value, crumb) + # If the value is a list, merge + elif isinstance(value, list): + self._update_list(base[key], value, crumb) + # Else the value is a scalar + else: + # If the value is different, override + if base[key] != value: + self._warn( + f"Overriding value for key [{crumb}]], Old: [{base[key]}], New: [{value}]" + ) + base[key] = value if key in self.UNIQUE_IDS: base[key] = list(set(base[key])) base[key].sort(key=lambda x: json.dumps(x, sort_keys=True)) - def update_list(self, base, template, breadcrumbs="") -> None: - for item in template: - if isinstance(item, dict): - for attr in self.IDEMPOTENT_IDS: - if attr in item: - idempotent_id = attr - break - else: - idempotent_id = None - - if idempotent_id: - namesake = [ - i - for i in base - if idempotent_id in i - ] - for n in namesake: - self.update_dict( - n, - item, - breadcrumbs - + "/[" - + idempotent_id - + "=" - + item[idempotent_id] - + "]", + def _update_list(self, base, fragment, breadcrumbs="") -> None: + for entry in fragment: + if isinstance(entry, dict): + # Discover if the incoming dict has an idempotent key + idempotent_key = next( + iter( + [ + id + for id in set(entry.keys()).intersection( + self.IDEMPOTENT_IDS + ) + ] + ), + None, + ) + + # Merge the idemponent key's dictionary rather than appending as a new entry + if idempotent_key: + existing_entry = next( + iter( + [ + i + for i in base + if isinstance(i, dict) + and idempotent_key in i + and i[idempotent_key] == entry[idempotent_key] + ] + ), + None, + ) + if existing_entry: + self._update_dict( + existing_entry, + entry, + f"{breadcrumbs}/[{idempotent_key}={entry[idempotent_key]}]", ) - continue - base.append(item) + continue + # Else, drop to appending the entry as net new + base.append(entry) + base.sort(key=lambda x: json.dumps(x, sort_keys=True)) diff --git a/plugins/modules/assemble_cluster_template.py b/plugins/modules/assemble_cluster_template.py index d10490fa..857ef8f4 100644 --- a/plugins/modules/assemble_cluster_template.py +++ b/plugins/modules/assemble_cluster_template.py @@ -195,7 +195,7 @@ def assemble_fragments(self, assembled_file): if not self.merged: self.merged = json.loads(fragment_file.read()) else: - self.template.update_object( + self.template.merge( self.merged, json.loads(fragment_file.read()) ) except json.JSONDecodeError as e: diff --git a/plugins/modules/cm_cluster.py b/plugins/modules/cm_cluster.py index aa05fbf8..c3762764 100644 --- a/plugins/modules/cm_cluster.py +++ b/plugins/modules/cm_cluster.py @@ -195,7 +195,7 @@ def process(self): if template_contents: TEMPLATE = ClusterTemplate(warn_fn=self.module.warn, error_fn=self.module.fail_json) - TEMPLATE.update_object(template_contents, explicit_params) + TEMPLATE.merge(template_contents, explicit_params) payload.update(body=template_contents) else: payload.update(body=explicit_params) diff --git a/tests/unit/plugins/modules/assemble_cluster_template/fragments/base.json b/tests/unit/plugins/modules/assemble_cluster_template/fragments/base.json new file mode 100644 index 00000000..96d16542 --- /dev/null +++ b/tests/unit/plugins/modules/assemble_cluster_template/fragments/base.json @@ -0,0 +1,31 @@ +{ + "cdhVersion": "1.2.3", + "cmVersion": "4.5.6", + "displayName": "ExampleClusterTemplate", + "hostTemplates": [ + { + "cardinality": 1, + "refName": "ExampleHostTemplate", + "roleConfigGroupsRefNames": [] + } + ], + "instantiator": { + "clusterName": "ExampleCluster", + "hosts": [ + { + "hostName": "host.example.com", + "hostTemplateRefName": "ExampleHostTemplate" + } + ] + }, + "products": [ + { + "product": "CDH", + "version": "1.2.3" + } + ], + "repositories": [ + "https://archive.cloudera.com/" + ], + "services": [] +} diff --git a/tests/unit/plugins/modules/assemble_cluster_template/fragments/service-1.json b/tests/unit/plugins/modules/assemble_cluster_template/fragments/service-1.json new file mode 100644 index 00000000..2d3b8931 --- /dev/null +++ b/tests/unit/plugins/modules/assemble_cluster_template/fragments/service-1.json @@ -0,0 +1,51 @@ +{ + "hostTemplates": [ + { + "refName": "AnotherExampleHostTemplate", + "roleConfigGroupsRefNames": [ + "atlas-ATLAS_SERVER-BASE", + "atlas-GATEWAY-BASE" + ] + } + ], + "services": [ + { + "refName": "atlas", + "serviceType": "ATLAS", + "displayName": "Atlas", + "serviceConfigs": [], + "roleConfigGroups": [ + { + "refName": "atlas-ATLAS_SERVER-BASE", + "roleType": "ATLAS_SERVER", + "base": true, + "configs": [ + { + "name": "atlas_server_http_port", + "value": "31000" + }, + { + "name": "atlas_server_https_port", + "value": "31443" + } + ] + }, + { + "refName": "atlas-GATEWAY-BASE", + "roleType": "GATEWAY", + "base": true, + "configs": [] + } + ] + } + ], + "products": [ + { + "product": "FOO", + "version": "9.8.7" + } + ], + "repositories": [ + "https://archive.cloudera.com/atlas" + ] +} diff --git a/tests/unit/plugins/modules/assemble_cluster_template/fragments/service-2.json b/tests/unit/plugins/modules/assemble_cluster_template/fragments/service-2.json new file mode 100644 index 00000000..afa8560b --- /dev/null +++ b/tests/unit/plugins/modules/assemble_cluster_template/fragments/service-2.json @@ -0,0 +1,43 @@ +{ + "hostTemplates": [ + { + "refName": "ExampleHostTemplate", + "roleConfigGroupsRefNames": [ + "schemaregistry-SCHEMA_REGISTRY_SERVER-BASE" + ] + } + ], + "services": [ + { + "refName": "schemaregistry", + "serviceType": "SCHEMAREGISTRY", + "displayName": "Schema Registry", + "serviceConfigs": [ + { + "name": "database_host", + "value": "host.example.com" + } + ], + "roleConfigGroups": [ + { + "refName": "schemaregistry-SCHEMA_REGISTRY_SERVER-BASE", + "roleType": "SCHEMA_REGISTRY_SERVER", + "base": true, + "configs": [ + { + "name": "schema.registry.port", + "value": "7788" + }, + { + "name": "schema.registry.ssl.port", + "value": "7790" + } + ] + } + ] + } + ], + "repositories": [ + "https://archive.cloudera.com/schemaregistry" + ] +} diff --git a/tests/unit/plugins/modules/assemble_cluster_template/fragments/service-3.json b/tests/unit/plugins/modules/assemble_cluster_template/fragments/service-3.json new file mode 100644 index 00000000..718f5143 --- /dev/null +++ b/tests/unit/plugins/modules/assemble_cluster_template/fragments/service-3.json @@ -0,0 +1,32 @@ +{ + "hostTemplates": [ + { + "refName": "ExampleHostTemplate", + "roleConfigGroupsRefNames": [ + "livy-GATEWAY-BASE", + "livy-LIVY_SERVER-BASE" + ] + } + ], + "services": [ + { + "refName": "livy", + "serviceType": "LIVY", + "displayName": "Livy", + "roleConfigGroups": [ + { + "refName": "livy-GATEWAY-BASE", + "roleType": "GATEWAY", + "base": true, + "configs": [] + }, + { + "refName": "livy-LIVY_SERVER-BASE", + "roleType": "LIVY_SERVER", + "base": true, + "configs": [] + } + ] + } + ] +} diff --git a/tests/unit/plugins/modules/assemble_cluster_template/test_assemble_cluster_template_module.py b/tests/unit/plugins/modules/assemble_cluster_template/test_assemble_cluster_template_module.py index 09b46009..5c054db2 100644 --- a/tests/unit/plugins/modules/assemble_cluster_template/test_assemble_cluster_template_module.py +++ b/tests/unit/plugins/modules/assemble_cluster_template/test_assemble_cluster_template_module.py @@ -36,7 +36,7 @@ TEST_DIR = os.path.dirname(os.path.abspath(__file__)) -def expected_list(expected:list) -> list: +def expected_list(expected: list) -> list: expected.sort(key=lambda x: json.dumps(x, sort_keys=True)) return expected @@ -65,10 +65,10 @@ def test_missing_src(module_args): def test_src_not_directory(module_args, tmp_path): root_dir = tmp_path / "not_directory" root_dir.mkdir() - + invalid_src = root_dir / "invalid_src.json" invalid_src.touch() - + module_args( { "dest": "foo.json", @@ -79,13 +79,14 @@ def test_src_not_directory(module_args, tmp_path): with pytest.raises(AnsibleFailJson, match="not a directory"): assemble_cluster_template.main() + def test_src_invalid_file(module_args, tmp_path): root_dir = tmp_path / "not_valid" root_dir.mkdir() - + invalid_file = root_dir / "invalid_file.txt" invalid_file.touch() - + module_args( { "dest": "foo.json", @@ -95,52 +96,46 @@ def test_src_invalid_file(module_args, tmp_path): with pytest.raises(AnsibleFailJson, match="JSON parsing error"): assemble_cluster_template.main() - + + def test_src_filtered(module_args, tmp_path): root_dir = tmp_path / "filtered" root_dir.mkdir() - + content = dict() content["test"] = "Test" - + base = root_dir / "base.json" - base.write_text( - json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" - ) - + base.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + overlay = dict() overlay["error"] = True - + filtered = root_dir / "filtered.json" filtered.write_text( json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" ) - + results = root_dir / "results.json" - - module_args( - { - "dest": str(results), - "src": str(root_dir), - "regexp": "^filtered" - } - ) + + module_args({"dest": str(results), "src": str(root_dir), "regexp": "^filtered"}) with pytest.raises(AnsibleExitJson): assemble_cluster_template.main() - + output = json.load(results.open()) - + assert "error" not in output assert len(output) == 1 assert "test" in output assert output["test"] == "Test" + @pytest.mark.parametrize("key", ClusterTemplate.IDEMPOTENT_IDS) def test_merge_idempotent_key(module_args, tmp_path, key): root_dir = tmp_path / "idempotent" root_dir.mkdir() - + content = dict() content[key] = "Test" @@ -148,25 +143,22 @@ def test_merge_idempotent_key(module_args, tmp_path, key): fragment_dir.mkdir() base = fragment_dir / "base.json" - base.write_text( - json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" - ) + base.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") overlay = fragment_dir / "overlay.json" - overlay.write_text( - json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" - ) + overlay.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") module_args({"dest": str(root_dir / "results.json"), "src": str(fragment_dir)}) with pytest.raises(AnsibleExitJson): assemble_cluster_template.main() + @pytest.mark.parametrize("key", ClusterTemplate.IDEMPOTENT_IDS) def test_merge_idempotent_key_conflict(module_args, tmp_path, key): root_dir = tmp_path / "idempotent_conflict" root_dir.mkdir() - + content = dict() content[key] = "Test" @@ -174,26 +166,23 @@ def test_merge_idempotent_key_conflict(module_args, tmp_path, key): fragment_dir.mkdir() base = fragment_dir / "base.json" - base.write_text( - json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" - ) + base.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") content[key] = "Error" overlay = fragment_dir / "overlay.json" - overlay.write_text( - json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" - ) + overlay.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") module_args({"dest": str(root_dir / "results.json"), "src": str(fragment_dir)}) with pytest.raises(AnsibleFailJson, match=f"/{key}"): assemble_cluster_template.main() + @pytest.mark.parametrize("key", ClusterTemplate.UNIQUE_IDS) def test_merge_unique_key(module_args, tmp_path, key): root_dir = tmp_path / "unique" root_dir.mkdir() - + content = dict() content[key] = ["one", "two"] @@ -201,32 +190,29 @@ def test_merge_unique_key(module_args, tmp_path, key): fragment_dir.mkdir() base = fragment_dir / "base.json" - base.write_text( - json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" - ) + base.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") overlay = fragment_dir / "overlay.json" - overlay.write_text( - json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" - ) - + overlay.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + results = root_dir / "results.json" module_args({"dest": str(results), "src": str(fragment_dir)}) with pytest.raises(AnsibleExitJson): assemble_cluster_template.main() - + output = json.load(results.open()) - + assert len(output[key]) == 2 assert output[key] == expected_list(["one", "two"]) + @pytest.mark.parametrize("key", ClusterTemplate.UNIQUE_IDS) def test_merge_unique_key_additional(module_args, tmp_path, key): root_dir = tmp_path / "unique_additional" root_dir.mkdir() - + content = dict() content[key] = ["one", "two"] @@ -234,32 +220,29 @@ def test_merge_unique_key_additional(module_args, tmp_path, key): fragment_dir.mkdir() base = fragment_dir / "base.json" - base.write_text( - json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" - ) + base.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") content[key] = ["one", "three"] overlay = fragment_dir / "overlay.json" - overlay.write_text( - json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" - ) - + overlay.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + results = root_dir / "results.json" module_args({"dest": str(results), "src": str(fragment_dir)}) with pytest.raises(AnsibleExitJson): assemble_cluster_template.main() - + output = json.load(results.open()) - + assert len(output[key]) == 3 assert output[key] == expected_list(["one", "two", "three"]) + def test_merge_list(module_args, tmp_path): root_dir = tmp_path / "list" root_dir.mkdir() - + content = dict() content["test"] = ["one", "two"] @@ -267,32 +250,29 @@ def test_merge_list(module_args, tmp_path): fragment_dir.mkdir() base = fragment_dir / "base.json" - base.write_text( - json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" - ) + base.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") content["test"] = ["one", "three"] overlay = fragment_dir / "overlay.json" - overlay.write_text( - json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" - ) - + overlay.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + results = root_dir / "results.json" module_args({"dest": str(results), "src": str(fragment_dir)}) with pytest.raises(AnsibleExitJson): assemble_cluster_template.main() - + output = json.loads(results.read_text()) - + assert len(output["test"]) == 4 assert output["test"] == expected_list(["one", "two", "one", "three"]) + def test_merge_list_nested(module_args, tmp_path): root_dir = tmp_path / "list_nested" root_dir.mkdir() - + content = dict() content["test"] = [["one"], "two"] @@ -300,32 +280,29 @@ def test_merge_list_nested(module_args, tmp_path): fragment_dir.mkdir() base = fragment_dir / "base.json" - base.write_text( - json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" - ) + base.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") content["test"] = [["one"], "three"] overlay = fragment_dir / "overlay.json" - overlay.write_text( - json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" - ) - + overlay.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + results = root_dir / "results.json" module_args({"dest": str(results), "src": str(fragment_dir)}) with pytest.raises(AnsibleExitJson): assemble_cluster_template.main() - + output = json.load(results.open()) - + assert len(output["test"]) == 4 assert output["test"] == expected_list([["one"], ["one"], "two", "three"]) + def test_merge_list_idempotent(module_args, tmp_path): root_dir = tmp_path / "list_idempotent" root_dir.mkdir() - + content = dict() content["test"] = [{"name": "Test"}, "two"] @@ -333,32 +310,29 @@ def test_merge_list_idempotent(module_args, tmp_path): fragment_dir.mkdir() base = fragment_dir / "base.json" - base.write_text( - json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" - ) + base.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") content["test"] = [{"name": "Test"}, "three"] overlay = fragment_dir / "overlay.json" - overlay.write_text( - json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" - ) - + overlay.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + results = root_dir / "results.json" module_args({"dest": str(results), "src": str(fragment_dir)}) with pytest.raises(AnsibleExitJson): assemble_cluster_template.main() - + output = json.load(results.open()) - + assert len(output["test"]) == 3 assert output["test"] == expected_list([{"name": "Test"}, "two", "three"]) + def test_merge_list_idempotent_multiple_elements(module_args, tmp_path): root_dir = tmp_path / "list_idempotent_multiple_elements" root_dir.mkdir() - + content = dict() content["test"] = [{"name": "Test"}, {"product": "Product"}, "two"] @@ -366,32 +340,31 @@ def test_merge_list_idempotent_multiple_elements(module_args, tmp_path): fragment_dir.mkdir() base = fragment_dir / "base.json" - base.write_text( - json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" - ) + base.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") content["test"] = [{"name": "Test"}, {"product": "Product"}, "three"] overlay = fragment_dir / "overlay.json" - overlay.write_text( - json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" - ) - + overlay.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + results = root_dir / "results.json" module_args({"dest": str(results), "src": str(fragment_dir)}) with pytest.raises(AnsibleExitJson): assemble_cluster_template.main() - + output = json.load(results.open()) - + assert len(output["test"]) == 4 - assert output["test"] == expected_list([{"name": "Test"}, {"product": "Product"}, "two", "three"]) + assert output["test"] == expected_list( + [{"name": "Test"}, {"product": "Product"}, "two", "three"] + ) + def test_merge_list_idempotent_multiple_keys(module_args, tmp_path): root_dir = tmp_path / "list_idempotent_multiple_keys" root_dir.mkdir() - + content = dict() content["test"] = [{"name": "Test", "product": "Product"}, "two"] @@ -399,32 +372,31 @@ def test_merge_list_idempotent_multiple_keys(module_args, tmp_path): fragment_dir.mkdir() base = fragment_dir / "base.json" - base.write_text( - json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" - ) + base.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") content["test"] = [{"name": "Test", "product": "Product"}, "three"] overlay = fragment_dir / "overlay.json" - overlay.write_text( - json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" - ) - + overlay.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + results = root_dir / "results.json" module_args({"dest": str(results), "src": str(fragment_dir)}) with pytest.raises(AnsibleExitJson): assemble_cluster_template.main() - + output = json.load(results.open()) - + assert len(output["test"]) == 3 - assert output["test"] == expected_list([{"name": "Test", "product": "Product"}, "two", "three"]) - -def test_merge_list_idempotent_conflict(module_args, tmp_path): - root_dir = tmp_path / "list_idempotent_conflict" + assert output["test"] == expected_list( + [{"name": "Test", "product": "Product"}, "two", "three"] + ) + + +def test_merge_list_idempotent_append(module_args, tmp_path): + root_dir = tmp_path / "list_idempotent_append" root_dir.mkdir() - + content = dict() content["test"] = [{"name": "Test"}, "two"] @@ -432,27 +404,31 @@ def test_merge_list_idempotent_conflict(module_args, tmp_path): fragment_dir.mkdir() base = fragment_dir / "base.json" - base.write_text( - json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" - ) + base.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") - content["test"] = [{"name": "Conflict"}, "three"] + content["test"] = [{"name": "Additional"}, "three"] overlay = fragment_dir / "overlay.json" - overlay.write_text( - json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" - ) - + overlay.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + results = root_dir / "results.json" module_args({"dest": str(results), "src": str(fragment_dir)}) - with pytest.raises(AnsibleFailJson, match="/test/\[name=Conflict\]/name"): + with pytest.raises(AnsibleExitJson): assemble_cluster_template.main() + output = json.load(results.open()) + + assert len(output["test"]) == 4 + assert output["test"] == expected_list( + [{"name": "Test"}, "two", {"name": "Additional"}, "three"] + ) + + def test_merge_dict(module_args, tmp_path): root_dir = tmp_path / "dict" root_dir.mkdir() - + content = dict() content["test"] = {"one": 1} @@ -460,32 +436,29 @@ def test_merge_dict(module_args, tmp_path): fragment_dir.mkdir() base = fragment_dir / "base.json" - base.write_text( - json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" - ) + base.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") content["test"] = {"two": 2} overlay = fragment_dir / "overlay.json" - overlay.write_text( - json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" - ) - + overlay.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + results = root_dir / "results.json" module_args({"dest": str(results), "src": str(fragment_dir)}) with pytest.raises(AnsibleExitJson): assemble_cluster_template.main() - + output = json.load(results.open()) - + assert len(output["test"]) == 2 assert output["test"] == {"one": 1, "two": 2} -def test_merge_dict_overwrite(module_args, tmp_path): + +def test_merge_dict_overwrite(module_args, tmp_path): root_dir = tmp_path / "dict_overwrite" root_dir.mkdir() - + content = dict() content["test"] = {"one": 1} @@ -493,32 +466,59 @@ def test_merge_dict_overwrite(module_args, tmp_path): fragment_dir.mkdir() base = fragment_dir / "base.json" - base.write_text( - json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" - ) + base.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") content["test"] = {"one": 2} overlay = fragment_dir / "overlay.json" - overlay.write_text( - json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" - ) - + overlay.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + + results = root_dir / "results.json" + + module_args({"dest": str(results), "src": str(fragment_dir)}) + + with pytest.raises(AnsibleExitJson): + assemble_cluster_template.main() + + output = json.load(results.open()) + + assert len(output["test"]) == 1 + assert output["test"] == {"one": 2} + + +def test_merge_dict_idempotent_key(module_args, tmp_path): + root_dir = tmp_path / "dict_idempotent_key" + root_dir.mkdir() + + content = dict() + content["test"] = {"one": 1} + + fragment_dir = root_dir / "fragments" + fragment_dir.mkdir() + + base = fragment_dir / "base.json" + base.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + + content["test"] = {"one": 2} + overlay = fragment_dir / "overlay.json" + overlay.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + results = root_dir / "results.json" module_args({"dest": str(results), "src": str(fragment_dir)}) with pytest.raises(AnsibleExitJson): assemble_cluster_template.main() - + output = json.load(results.open()) - + assert len(output["test"]) == 1 - assert output["test"] == {"one": 2} + assert output["test"] == {"one": 2} + -def test_merge_dict_nested(module_args, tmp_path): +def test_merge_dict_nested(module_args, tmp_path): root_dir = tmp_path / "dict_nested" root_dir.mkdir() - + content = dict() content["test"] = {"one": {"two": 1}} @@ -526,33 +526,30 @@ def test_merge_dict_nested(module_args, tmp_path): fragment_dir.mkdir() base = fragment_dir / "base.json" - base.write_text( - json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" - ) + base.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") content["test"] = {"one": {"three": 3}} overlay = fragment_dir / "overlay.json" - overlay.write_text( - json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" - ) - + overlay.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + results = root_dir / "results.json" module_args({"dest": str(results), "src": str(fragment_dir)}) with pytest.raises(AnsibleExitJson): assemble_cluster_template.main() - + output = json.load(results.open()) - + assert len(output["test"]) == 1 assert len(output["test"]["one"]) == 2 - assert output["test"] == {"one": {"two": 1, "three": 3}} + assert output["test"] == {"one": {"two": 1, "three": 3}} + -def test_merge_dict_nested_overwrite(module_args, tmp_path): +def test_merge_dict_nested_overwrite(module_args, tmp_path): root_dir = tmp_path / "dict_nested_overwrite" root_dir.mkdir() - + content = dict() content["test"] = {"one": {"two": 1}} @@ -560,33 +557,30 @@ def test_merge_dict_nested_overwrite(module_args, tmp_path): fragment_dir.mkdir() base = fragment_dir / "base.json" - base.write_text( - json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" - ) + base.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") content["test"] = {"one": {"two": 2, "three": 3}} overlay = fragment_dir / "overlay.json" - overlay.write_text( - json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" - ) - + overlay.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + results = root_dir / "results.json" module_args({"dest": str(results), "src": str(fragment_dir)}) with pytest.raises(AnsibleExitJson): assemble_cluster_template.main() - + output = json.load(results.open()) - + assert len(output["test"]) == 1 assert len(output["test"]["one"]) == 2 assert output["test"] == {"one": {"two": 2, "three": 3}} -def test_merge_dict_nested_idempotent(module_args, tmp_path): + +def test_merge_dict_nested_idempotent(module_args, tmp_path): root_dir = tmp_path / "dict_nested_idempotent" root_dir.mkdir() - + content = dict() content["test"] = {"one": {"name": "Test"}} @@ -594,33 +588,30 @@ def test_merge_dict_nested_idempotent(module_args, tmp_path): fragment_dir.mkdir() base = fragment_dir / "base.json" - base.write_text( - json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" - ) + base.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") content["test"] = {"one": {"name": "Test"}} overlay = fragment_dir / "overlay.json" - overlay.write_text( - json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" - ) - + overlay.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + results = root_dir / "results.json" module_args({"dest": str(results), "src": str(fragment_dir)}) with pytest.raises(AnsibleExitJson): assemble_cluster_template.main() - + output = json.load(results.open()) - + assert len(output["test"]) == 1 assert len(output["test"]["one"]) == 1 assert output["test"] == {"one": {"name": "Test"}} -def test_merge_dict_nested_idempotent_conflict(module_args, tmp_path): + +def test_merge_dict_nested_idempotent_conflict(module_args, tmp_path): root_dir = tmp_path / "dict_nested_idempotent_conflict" root_dir.mkdir() - + content = dict() content["test"] = {"one": {"name": "Test"}} @@ -628,19 +619,144 @@ def test_merge_dict_nested_idempotent_conflict(module_args, tmp_path): fragment_dir.mkdir() base = fragment_dir / "base.json" - base.write_text( - json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" - ) + base.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") content["test"] = {"one": {"name": "Conflict"}} overlay = fragment_dir / "overlay.json" - overlay.write_text( - json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" - ) - + overlay.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + results = root_dir / "results.json" module_args({"dest": str(results), "src": str(fragment_dir)}) with pytest.raises(AnsibleFailJson, match="\/test\/one\/name"): assemble_cluster_template.main() + + +def test_multiple_services(module_args, tmp_path): + results = tmp_path / "results.json" + + module_args({"dest": str(results), "src": os.path.join(TEST_DIR, "fragments")}) + + with pytest.raises(AnsibleExitJson): + assemble_cluster_template.main() + + output = json.load(results.open()) + + assert output["displayName"] == "ExampleClusterTemplate" + assert output["cdhVersion"] == "1.2.3" + assert output["cmVersion"] == "4.5.6" + + assert set(output["repositories"]) == set( + [ + "https://archive.cloudera.com/", + "https://archive.cloudera.com/schemaregistry", + "https://archive.cloudera.com/atlas", + ] + ) + + assert len(output["products"]) == 2 + assert output["products"] == expected_list( + [ + dict(product="CDH", version="1.2.3"), + dict(product="FOO", version="9.8.7"), + ] + ) + + assert output["instantiator"]["clusterName"] == "ExampleCluster" + assert len(output["instantiator"]["hosts"]) == 1 + assert output["instantiator"]["hosts"] == expected_list( + [{"hostName": "host.example.com", "hostTemplateRefName": "ExampleHostTemplate"}] + ) + + assert len(output["hostTemplates"]) == 2 + assert output["hostTemplates"] == expected_list( + [ + { + "cardinality": 1, + "refName": "ExampleHostTemplate", + "roleConfigGroupsRefNames": [ + "livy-GATEWAY-BASE", + "livy-LIVY_SERVER-BASE", + "schemaregistry-SCHEMA_REGISTRY_SERVER-BASE", + ], + }, + { + "refName": "AnotherExampleHostTemplate", + "roleConfigGroupsRefNames": [ + "atlas-ATLAS_SERVER-BASE", + "atlas-GATEWAY-BASE", + ], + }, + ] + ) + + assert len(output["services"]) == 3 + assert output["services"] == expected_list( + [ + { + "refName": "atlas", + "serviceType": "ATLAS", + "displayName": "Atlas", + "serviceConfigs": [], + "roleConfigGroups": [ + { + "refName": "atlas-ATLAS_SERVER-BASE", + "roleType": "ATLAS_SERVER", + "base": True, + "configs": [ + {"name": "atlas_server_http_port", "value": "31000"}, + {"name": "atlas_server_https_port", "value": "31443"}, + ], + }, + { + "refName": "atlas-GATEWAY-BASE", + "roleType": "GATEWAY", + "base": True, + "configs": [], + }, + ], + }, + { + "refName": "schemaregistry", + "serviceType": "SCHEMAREGISTRY", + "displayName": "Schema Registry", + "serviceConfigs": [ + {"name": "database_host", "value": "host.example.com"} + ], + "roleConfigGroups": [ + { + "refName": "schemaregistry-SCHEMA_REGISTRY_SERVER-BASE", + "roleType": "SCHEMA_REGISTRY_SERVER", + "base": True, + "configs": [ + {"name": "schema.registry.port", "value": "7788"}, + { + "name": "schema.registry.ssl.port", + "value": "7790", + }, + ], + } + ], + }, + { + "refName": "livy", + "serviceType": "LIVY", + "displayName": "Livy", + "roleConfigGroups": [ + { + "refName": "livy-GATEWAY-BASE", + "roleType": "GATEWAY", + "base": True, + "configs": [], + }, + { + "refName": "livy-LIVY_SERVER-BASE", + "roleType": "LIVY_SERVER", + "base": True, + "configs": [], + }, + ], + }, + ], + ) From 717ce6a5f65ec7256c7c1b93d14ea1e128492ba9 Mon Sep 17 00:00:00 2001 From: rsuplina Date: Thu, 8 Feb 2024 15:17:42 +0000 Subject: [PATCH 18/18] Add wait_for_command_state Signed-off-by: rsuplina --- plugins/module_utils/cm_utils.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/plugins/module_utils/cm_utils.py b/plugins/module_utils/cm_utils.py index 35bb9370..c835cbc4 100644 --- a/plugins/module_utils/cm_utils.py +++ b/plugins/module_utils/cm_utils.py @@ -29,10 +29,11 @@ from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.common.text.converters import to_text - +from time import sleep from cm_client import ApiClient, Configuration from cm_client.rest import ApiException, RESTClientObject from cm_client.apis.cloudera_manager_resource_api import ClouderaManagerResourceApi +from cm_client.apis.commands_resource_api import CommandsResourceApi __credits__ = ["frisch@cloudera.com"] @@ -302,6 +303,16 @@ def set_session_cookie(self): api_instance.get_version() self.api_client.cookie = self.api_client.last_response.getheader("Set-Cookie") + def wait_for_command_state(self,command_id, polling_interval): + command_api_instance = CommandsResourceApi(self.api_client) + while True: + get_command_state = command_api_instance.read_command_with_http_info(command_id=command_id) + state = get_command_state[0].active + if not state: + break + sleep(polling_interval) + return True + def call_api(self, path, method, query=None, field="items", body=None): """Wrapper to call a CM API endpoint path directly.""" path_params = []