diff --git a/plugins/action/assemble_cluster_template.py b/plugins/action/assemble_cluster_template.py index 655682be..33c81b80 100644 --- a/plugins/action/assemble_cluster_template.py +++ b/plugins/action/assemble_cluster_template.py @@ -29,72 +29,15 @@ from ansible.plugins.action import ActionBase from ansible.utils.hashing import checksum_s +from ansible_collections.cloudera.cluster.plugins.module_utils.cm_utils import ClusterTemplate class ActionModule(ActionBase): TRANSFERS_FILES = True - - MERGED = {} - IDEMPOTENT_IDS = ["refName", "name", "clusterName", "hostName", "product"] - UNIQUE_IDS = ["repositories"] - - def update_object(self, base, template, breadcrumbs=""): - if isinstance(base, dict) and isinstance(template, dict): - self.update_dict(base, template, breadcrumbs) - return True - elif isinstance(base, list) and isinstance(template, list): - self.update_list(base, template, breadcrumbs) - return True - return False - - def update_dict(self, base, template, breadcrumbs=""): - for key, value in template.items(): - crumb = breadcrumbs + "/" + key - - if key in self.IDEMPOTENT_IDS: - if base[key] != value: - self._display.error( - "Objects with distinct IDs should not be merged: " + crumb - ) - continue - - if key not in base: - base[key] = value - elif not self.update_object(base[key], value, crumb) and base[key] != value: - self._display.warning( - f"Value being overwritten for key [{crumb}]], Old: [{base[key]}], New: [{value}]" - ) - base[key] = value - - if key in self.UNIQUE_IDS: - base[key] = list(set(base[key])) - - def update_list(self, base, template, breadcrumbs=""): - for item in template: - if isinstance(item, dict): - for attr in self.IDEMPOTENT_IDS: - if attr in item: - idempotent_id = attr - break - else: - idempotent_id = None - if idempotent_id: - namesake = [ - i for i in base if i[idempotent_id] == item[idempotent_id] - ] - if namesake: - self.update_dict( - namesake[0], - item, - breadcrumbs - + "/[" - + idempotent_id - + "=" - + item[idempotent_id] - + "]", - ) - continue - base.append(item) - base.sort(key=lambda x: json.dumps(x, sort_keys=True)) + + def __init__(self, task, connection, play_context, loader, templar, shared_loader_obj): + super().__init__(task, connection, play_context, loader, templar, shared_loader_obj) + self.TEMPLATE = ClusterTemplate(warn_fn=self._display.warning, error_fn=self._display.error) + self.MERGED = {} def assemble_fragments( self, assembled_file, src_path, regex=None, ignore_hidden=True, decrypt=True @@ -121,7 +64,10 @@ def assemble_fragments( encoding="utf-8", ) as fragment_file: try: - self.update_object(self.MERGED, json.loads(fragment_file.read())) + if not self.MERGED: + self.MERGED = json.loads(fragment_file.read()) + else: + self.TEMPLATE.merge(self.MERGED, json.loads(fragment_file.read())) except json.JSONDecodeError as e: raise AnsibleActionFail( message=f"JSON parsing error: {to_text(e.msg)}", @@ -155,6 +101,8 @@ def run(self, tmp=None, task_vars=None): regexp = self._task.args.get("regexp", None) if regexp is None: regexp = self._task.args.get("filter", None) + if regexp is None: + regexp = self._task.args.get("regex", None) remote_src = boolean(self._task.args.get("remote_src", False)) follow = boolean(self._task.args.get("follow", False)) diff --git a/plugins/module_utils/cm_utils.py b/plugins/module_utils/cm_utils.py index c0d8af01..c835cbc4 100644 --- a/plugins/module_utils/cm_utils.py +++ b/plugins/module_utils/cm_utils.py @@ -21,6 +21,7 @@ import logging from functools import wraps +from typing import Union from urllib3 import disable_warnings from urllib3.exceptions import InsecureRequestWarning, MaxRetryError, HTTPError from urllib3.util import Url @@ -28,16 +29,112 @@ from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.common.text.converters import to_text - +from time import sleep from cm_client import ApiClient, Configuration from cm_client.rest import ApiException, RESTClientObject from cm_client.apis.cloudera_manager_resource_api import ClouderaManagerResourceApi +from cm_client.apis.commands_resource_api import CommandsResourceApi __credits__ = ["frisch@cloudera.com"] __maintainer__ = ["wmudge@cloudera.com"] +class ClusterTemplate(object): + IDEMPOTENT_IDS = frozenset( + ["refName", "name", "clusterName", "hostName", "product"] + ) + UNIQUE_IDS = frozenset(["repositories"]) + + def __init__(self, warn_fn, error_fn) -> None: + self._warn = warn_fn + self._error = error_fn + + def merge(self, base: Union[dict, list], fragment: Union[dict, list]) -> bool: + if isinstance(base, dict) and isinstance(fragment, dict): + self._update_dict(base, fragment) + elif isinstance(base, list) and isinstance(fragment, list): + self._update_list(base, fragment) + else: + raise TypeError( + f"Base and fragment arguments must be the same type: base[{type(base)}], fragment[{type(fragment)}]" + ) + + def _update_dict(self, base, fragment, breadcrumbs="") -> None: + for key, value in fragment.items(): + crumb = breadcrumbs + "/" + key + + # If the key is idempotent, error that the values are different + if key in self.IDEMPOTENT_IDS: + if base[key] != value: + self._error(f"Unable to override value for distinct key [{crumb}]") + continue + + # If it's a new key, add to the bae + if key not in base: + base[key] = value + # If the value is a dictionary, merge + elif isinstance(value, dict): + self._update_dict(base[key], value, crumb) + # If the value is a list, merge + elif isinstance(value, list): + self._update_list(base[key], value, crumb) + # Else the value is a scalar + else: + # If the value is different, override + if base[key] != value: + self._warn( + f"Overriding value for key [{crumb}]], Old: [{base[key]}], New: [{value}]" + ) + base[key] = value + + if key in self.UNIQUE_IDS: + base[key] = list(set(base[key])) + base[key].sort(key=lambda x: json.dumps(x, sort_keys=True)) + + def _update_list(self, base, fragment, breadcrumbs="") -> None: + for entry in fragment: + if isinstance(entry, dict): + # Discover if the incoming dict has an idempotent key + idempotent_key = next( + iter( + [ + id + for id in set(entry.keys()).intersection( + self.IDEMPOTENT_IDS + ) + ] + ), + None, + ) + + # Merge the idemponent key's dictionary rather than appending as a new entry + if idempotent_key: + existing_entry = next( + iter( + [ + i + for i in base + if isinstance(i, dict) + and idempotent_key in i + and i[idempotent_key] == entry[idempotent_key] + ] + ), + None, + ) + if existing_entry: + self._update_dict( + existing_entry, + entry, + f"{breadcrumbs}/[{idempotent_key}={entry[idempotent_key]}]", + ) + continue + # Else, drop to appending the entry as net new + base.append(entry) + + base.sort(key=lambda x: json.dumps(x, sort_keys=True)) + + class ClouderaManagerModule(object): """Base Ansible Module for API access to Cloudera Manager.""" @@ -60,13 +157,12 @@ def _add_log(err): err = dict( msg="API error: " + to_text(ae.reason), status_code=ae.status, - body=ae.body.decode("utf-8"), ) - if err["body"] != "": + if ae.body: try: - err.update(body=json.loads(err["body"])) - except Exception as te: - pass + err.update(body=json.loads(ae.body)) + except Exception: + err.update(body=ae.body.decode("utf-8")), self.module.fail_json(**_add_log(err)) except MaxRetryError as maxe: @@ -207,6 +303,16 @@ def set_session_cookie(self): api_instance.get_version() self.api_client.cookie = self.api_client.last_response.getheader("Set-Cookie") + def wait_for_command_state(self,command_id, polling_interval): + command_api_instance = CommandsResourceApi(self.api_client) + while True: + get_command_state = command_api_instance.read_command_with_http_info(command_id=command_id) + state = get_command_state[0].active + if not state: + break + sleep(polling_interval) + return True + def call_api(self, path, method, query=None, field="items", body=None): """Wrapper to call a CM API endpoint path directly.""" path_params = [] @@ -274,7 +380,7 @@ def ansible_module( mutually_exclusive=[], required_one_of=[], required_together=[], - **kwargs + **kwargs, ): """ Creates the base Ansible module argument spec and dependencies, diff --git a/plugins/modules/assemble_cluster_template.py b/plugins/modules/assemble_cluster_template.py index 7710c6d5..857ef8f4 100644 --- a/plugins/modules/assemble_cluster_template.py +++ b/plugins/modules/assemble_cluster_template.py @@ -40,15 +40,15 @@ options: src: description: - - An already existing directory of cluster template files. - - TODO Local or remote + - A directory of cluster template files, i.e. fragments. + - Each file must be a valid JSON file. type: path required: True aliases: - cluster_template_src dest: description: - - A file to create using the merger of all of the cluster template files. + - A file created from the merger of the cluster template files. type: path required: True aliases: @@ -61,7 +61,7 @@ default: False remote_src: description: - - Flag to control the location of the cluster template configuration source files. + - Flag for the location of the cluster template fragment files. - If V(false), search for I(src) on the controller. - If V(true), search for I(src) on the remote/target. type: bool @@ -75,6 +75,7 @@ type: str aliases: - filter + - regex ignore_hidden: description: - Flag whether to include files that begin with a '.'. @@ -138,14 +139,14 @@ import tempfile from ansible.module_utils.basic import AnsibleModule -from ansible.module_utils.common.text.converters import to_native +from ansible.module_utils.common.text.converters import to_native, to_text +from ansible_collections.cloudera.cluster.plugins.module_utils.cm_utils import ( + ClusterTemplate, +) -class AssembleClusterTemplate(object): - MERGED = {} - IDEMPOTENT_IDS = ["refName", "name", "clusterName", "hostName", "product"] - UNIQUE_IDS = ["repositories"] +class AssembleClusterTemplate(object): def __init__(self, module): self.module = module @@ -166,69 +167,15 @@ def __init__(self, module): # Initialize internal values self.compiled = None + self.template = ClusterTemplate( + warn_fn=self.module.warn, + error_fn=lambda msg: self.module.fail_json(msg=msg), + ) + self.merged = dict() # Execute the logic self.process() - def update_object(self, base, template, breadcrumbs=""): - if isinstance(base, dict) and isinstance(template, dict): - self.update_dict(base, template, breadcrumbs) - return True - elif isinstance(base, list) and isinstance(template, list): - self.update_list(base, template, breadcrumbs) - return True - return False - - def update_dict(self, base, template, breadcrumbs=""): - for key, value in template.items(): - crumb = breadcrumbs + "/" + key - - if key in self.IDEMPOTENT_IDS: - if base[key] != value: - self.module.warn( - f"Objects with distinct IDs should not be merged: {crumb}" - ) - continue - - if key not in base: - base[key] = value - elif not self.update_object(base[key], value, crumb) and base[key] != value: - self.module.warn( - f"Value being overwritten for key [{crumb}]; Old: [{base[key]}], New: [{value}]" - ) - base[key] = value - - if key in self.UNIQUE_IDS: - base[key] = list(set(base[key])) - - def update_list(self, base, template, breadcrumbs=""): - for item in template: - if isinstance(item, dict): - for attr in self.IDEMPOTENT_IDS: - if attr in item: - idempotent_id = attr - break - else: - idempotent_id = None - if idempotent_id: - namesake = [ - i for i in base if i[idempotent_id] == item[idempotent_id] - ] - if namesake: - self.update_dict( - namesake[0], - item, - breadcrumbs - + "/[" - + idempotent_id - + "=" - + item[idempotent_id] - + "]", - ) - continue - base.append(item) - base.sort(key=lambda x: json.dumps(x, sort_keys=True)) - def assemble_fragments(self, assembled_file): # By file name sort order for f in sorted(os.listdir(self.src)): @@ -245,14 +192,20 @@ def assemble_fragments(self, assembled_file): with open(fragment, "r", encoding="utf-8") as fragment_file: try: - self.update_object(self.MERGED, json.loads(fragment_file.read())) + if not self.merged: + self.merged = json.loads(fragment_file.read()) + else: + self.template.merge( + self.merged, json.loads(fragment_file.read()) + ) except json.JSONDecodeError as e: self.module.fail_json( - msg=f"JSON parsing error: {to_text(e.msg)}", error=to_native(e) + msg=f"JSON parsing error for file, {fragment}: {to_text(e.msg)}", + error=to_native(e), ) # Write out the final assembly - json.dump(self.MERGED, assembled_file, indent=2, sort_keys=False) + json.dump(self.merged, assembled_file, indent=2, sort_keys=False) # Close the assembled file handle; will not delete for atomic_move assembled_file.close() @@ -322,7 +275,7 @@ def main(): dest=dict(required=True, type="path", aliases=["cluster_template"]), backup=dict(type="bool", default=False), remote_src=dict(type="bool", default=False), - regexp=dict(type="str", aliases=["filter"]), + regexp=dict(type="str", aliases=["filter", "regex"]), ignore_hidden=dict(type="bool", default=True), ), add_file_common_args=True, diff --git a/plugins/modules/cm_cluster.py b/plugins/modules/cm_cluster.py new file mode 100644 index 00000000..c3762764 --- /dev/null +++ b/plugins/modules/cm_cluster.py @@ -0,0 +1,254 @@ +# Copyright 2024 Cloudera, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ansible.module_utils.common.text.converters import to_text, to_native + +from ansible_collections.cloudera.cluster.plugins.module_utils.cm_utils import ( + ClouderaManagerModule, ClusterTemplate +) +from cm_client.rest import ApiException +from cm_client import ClouderaManagerResourceApi +from cm_client import ClustersResourceApi +import json + +ANSIBLE_METADATA = { + "metadata_version": "1.1", + "status": ["preview"], + "supported_by": "community", +} + +DOCUMENTATION = r""" +--- +module: cm_cluster +short_description: Create a cluster based on the provided cluster template +description: + - Searches for a template file. + - The search for the file starts at the root directory where the Ansible playbook is executed. By default, the template is expected to be placed inside the './files' directory. + - Imports the template file and uses it to create the cluster. + - This module ensures that the cluster is created according to the specified template. +author: + - "Ronald Suplina (@rsuplina)" +options: + template: + description: + - Path to template file which defines the cluster + type: path + elements: str + required: True + add_repositories: + description: + - Install parcel repositories in parcel directory + type: bool + required: False + default: False + clusterName: + description: + - Name of Cloudera Manager Cluster + type: str + required: False + default: False +requirements: + - cm_client +""" + +EXAMPLES = r""" +--- +- name: Create a cluster on Cloudera Manager host + cloudera.cluster.cm_cluster: + host: example.cloudera.com + username: "jane_smith" + clusterName: "OneNodeCluster" + password: "S&peR4Ec*re" + port: "7180" + template: "./files/cluster-template.json" + +- name: Create a cluster and install the repositories defined in template + cloudera.cluster.cm_cluster: + host: example.cloudera.com + username: "jane_smith" + password: "S&peR4Ec*re" + port: "7180" + template: "./files/cluster-template.json" + add_repositories: "True" +""" + +RETURN = r""" +--- +cloudera_manager: + description: Details about Cloudera Manager Cluster + type: dict + contains: + cluster_type: + description: The type of cluster created from template. + type: str + returned: optional + cluster_url: + description: Url of Cloudera Manager cluster. + type: str + returned: optional + display_name: + description: The name of the cluster displayed on the site. + type: str + returned: optional + entity_status: + description: Health status of the cluster. + type: str + returned: optional + full_version: + description: Version of the cluster installed. + type: str + returned: optional + hosts_url: + description: Url of all the hosts on which cluster is installed. + type: str + returned: optional + maintenance_mode: + description: Maintance mode of Cloudera Manager Cluster. + type: bool + returned: optional + maintenance_owners: + description: List of Maintance owners for Cloudera Manager Cluster. + type: list + returned: optional + name: + description: The name of the cluster created. + type: str + returned: optional + tags: + description: List of tags for Cloudera Manager Cluster. + type: list + returned: optional + uuid: + description: Unique ID of created cluster + type: bool + returned: optional +""" + + +class ClusterModule(ClouderaManagerModule): + def __init__(self, module): + super(ClusterModule, self).__init__(module) + + self.template = self.get_param("template") + self.add_repositories = self.get_param("add_repositories") + self.cluster_name = self.get_param("name") + self.state = self.get_param("state") + + self.changed = False + self.output = dict() + + self.process() + + @ClouderaManagerModule.handle_process + def process(self): + + api_instance = ClouderaManagerResourceApi(self.api_client) + cluster_api_instance = ClustersResourceApi(self.api_client) + + template_contents = dict() + + if self.template: + try: + with open(self.template, 'r') as file: + template_contents = json.load(file) + except OSError as oe: + self.module.fail_json(msg=f"Error reading file '{to_text(self.template)}'", err=to_native(oe)) + # Need to catch malformed JSON, etc. + + if not self.cluster_name: + if template_contents: + self.cluster_name = template_contents['instantiator']['clusterName'] + else: + self.module.fail_json(msg="No cluster name found in template.") + + try: + self.existing = cluster_api_instance.read_cluster(cluster_name=self.cluster_name).to_dict() + except ApiException: + self.existing = dict() + + if self.state == "present": + if self.existing: + pass + # Reconcile the existing vs. the incoming values into a set of diffs + # then process via the PUT /clusters/{clusterName} endpoint + else: + payload = dict() + + # Construct import template payload from the template and/or explicit parameters + explicit_params = dict() + + # Set up 'instantiator' parameters + explicit_params.update(instantiator=dict( + clusterName=self.cluster_name + )) + + if template_contents: + TEMPLATE = ClusterTemplate(warn_fn=self.module.warn, error_fn=self.module.fail_json) + TEMPLATE.merge(template_contents, explicit_params) + payload.update(body=template_contents) + else: + payload.update(body=explicit_params) + + # Update to include repositories + if self.add_repositories: + payload.update(add_repositories=True) + + # Execute the import + if not self.module.check_mode: + self.changed = True + import_template_request = api_instance.import_cluster_template(**payload).to_dict() + + command_id = import_template_request['id'] + self.wait_for_command_state(command_id=command_id,polling_interval=60) + + # Retrieve the newly-minted cluster + self.output = cluster_api_instance.read_cluster(cluster_name=self.cluster_name).to_dict() + elif self.state == "absent": + if self.existing: + pass + # Delete the cluster via DELETE /clusters/{clusterName} + else: + self.module.fail_json(msg=f"Invalid state, ${self.state}") + + +def main(): + module = ClouderaManagerModule.ansible_module( + argument_spec=dict( + template=dict(type="path", aliases=["cluster_template"]), + add_repositories=dict(type="bool", default=False), + name=dict(aliases=["cluster_name"]), + state=dict(default="present", choices=["present", "absent"]) + ), + required_one_of=[ + ["name", "template"] + ], + supports_check_mode=True + ) + + result = ClusterModule(module) + + output = dict( + changed=result.changed, + cloudera_manager=result.existing, + ) + + if result.debug: + log = result.log_capture.getvalue() + output.update(debug=log, debug_lines=log.split("\n")) + + module.exit_json(**output) + + +if __name__ == "__main__": + main() diff --git a/pytest.ini b/pytest.ini index f6125c59..f71a11df 100644 --- a/pytest.ini +++ b/pytest.ini @@ -14,4 +14,6 @@ [pytest] filterwarnings = - ignore::DeprecationWarning \ No newline at end of file + ignore::DeprecationWarning + +pythonpath = "../../../" \ No newline at end of file diff --git a/tests/config.yml b/tests/config.yml index fb30b8a0..2969e7a8 100644 --- a/tests/config.yml +++ b/tests/config.yml @@ -1,4 +1,4 @@ -# Copyright 2023 Cloudera, Inc. All Rights Reserved. +# Copyright 2024 Cloudera, Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py new file mode 100644 index 00000000..02bf2923 --- /dev/null +++ b/tests/unit/__init__.py @@ -0,0 +1,33 @@ +# Copyright 2024 Cloudera, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +class AnsibleExitJson(Exception): + """Exception class to be raised by module.exit_json and caught by the test case""" + + def __init__(self, kwargs): + super(AnsibleExitJson, self).__init__( + kwargs.get("msg", "General module success") + ) + self.__dict__.update(kwargs) + + +class AnsibleFailJson(Exception): + """Exception class to be raised by module.fail_json and caught by the test case""" + + def __init__(self, kwargs): + super(AnsibleFailJson, self).__init__( + kwargs.get("msg", "General module failure") + ) + self.__dict__.update(kwargs) diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index 3a2df28c..5f964969 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Cloudera, Inc. All Rights Reserved. +# Copyright 2024 Cloudera, Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,13 +14,60 @@ # See the License for the specific language governing permissions and # limitations under the License. -from __future__ import (absolute_import, division, print_function) +from __future__ import absolute_import, division, print_function + __metaclass__ = type +import json import sys import pytest +from ansible.module_utils import basic +from ansible.module_utils.common.text.converters import to_bytes + +# Required for pytest discovery in VSCode, reasons unknown... +try: + from ansible.plugins.action import ActionBase +except ModuleNotFoundError: + pass + +from ansible_collections.cloudera.cluster.tests.unit import ( + AnsibleFailJson, + AnsibleExitJson, +) + @pytest.fixture(autouse=True) def skip_python(): if sys.version_info < (3, 6): - pytest.skip('Skipping on Python %s. cloudera.cloud supports Python 3.6 and higher.' % sys.version) + pytest.skip( + "Skipping on Python %s. cloudera.cloud supports Python 3.6 and higher." + % sys.version + ) + + +@pytest.fixture(autouse=True) +def patch_module(monkeypatch): + """Patch AnsibleModule to raise exceptions on success and failure""" + + def exit_json(*args, **kwargs): + if "changed" not in kwargs: + kwargs["changed"] = False + raise AnsibleExitJson(kwargs) + + def fail_json(*args, **kwargs): + kwargs["failed"] = True + raise AnsibleFailJson(kwargs) + + monkeypatch.setattr(basic.AnsibleModule, "exit_json", exit_json) + monkeypatch.setattr(basic.AnsibleModule, "fail_json", fail_json) + + +@pytest.fixture +def module_args(): + """Prepare module arguments""" + + def prep_args(args=dict()): + args = json.dumps({"ANSIBLE_MODULE_ARGS": args}) + basic._ANSIBLE_ARGS = to_bytes(args) + + return prep_args diff --git a/tests/unit/plugins/actions/assemble_cluster_template/test_assemble_cluster_template_action.py b/tests/unit/plugins/actions/assemble_cluster_template/test_assemble_cluster_template_action.py new file mode 100644 index 00000000..57447a76 --- /dev/null +++ b/tests/unit/plugins/actions/assemble_cluster_template/test_assemble_cluster_template_action.py @@ -0,0 +1,319 @@ +# Copyright 2024 Cloudera, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +import logging +import json +import os +import pytest +import re + +from dataclasses import dataclass + +from unittest.mock import MagicMock + +from ansible.errors import AnsibleError, AnsibleActionFail +from ansible.parsing.dataloader import DataLoader +from ansible.playbook.task import Task +from ansible.template import Templar + +from ansible_collections.cloudera.cluster.plugins.action.assemble_cluster_template import ( + ActionModule as AssembleClusterTemplateAction, +) + +LOG = logging.getLogger(__name__) +TEST_DIR = os.path.dirname(os.path.abspath(__file__)) + + +@dataclass(init=True) +class MockContext: + task: MagicMock(Task) = MagicMock(Task) + connection: MagicMock = MagicMock() + play_context: MagicMock = MagicMock() + loader: MagicMock(DataLoader) = MagicMock() + templar: Templar = Templar(loader=MagicMock(DataLoader)) + shared_loader_obj: MagicMock(DataLoader) = None + + +@pytest.fixture() +def mock_module_exec(): + def setup(plugin): + plugin._get_module_args = MagicMock() + plugin._execute_module = MagicMock() + + return setup + + +def test_empty_parameters(mock_module_exec): + context = MockContext() + context.task.async_val = False + context.play_context.check_mode = False + + plugin = AssembleClusterTemplateAction(**vars(context)) + + mock_module_exec(plugin) + + context.task.args = dict() + + results = plugin.run() + + assert results["failed"] == True + assert results["msg"] == "Both 'src' and 'dest' are required" + + +def test_missing_src(mock_module_exec, tmp_path): + context = MockContext() + context.task.async_val = False + context.play_context.check_mode = False + + plugin = AssembleClusterTemplateAction(**vars(context)) + + mock_module_exec(plugin) + + src_file = tmp_path / "src.json" + src_file.touch() + + context.task.args = dict(src=str(src_file)) + + results = plugin.run() + + assert results["failed"] == True + assert results["msg"] == "Both 'src' and 'dest' are required" + + +def test_missing_dest(mock_module_exec, tmp_path): + context = MockContext() + context.task.async_val = False + context.play_context.check_mode = False + + plugin = AssembleClusterTemplateAction(**vars(context)) + + mock_module_exec(plugin) + + dest_file = tmp_path / "dest.json" + dest_file.touch() + + context.task.args = dict(dest=str(dest_file)) + + results = plugin.run() + + assert results["failed"] == True + assert results["msg"] == "Both 'src' and 'dest' are required" + + +def test_remote_src(mock_module_exec, tmp_path): + context = MockContext() + context.task.async_val = False + context.play_context.check_mode = False + + plugin = AssembleClusterTemplateAction(**vars(context)) + + mock_module_exec(plugin) + + src_file = tmp_path / "src.json" + src_file.touch() + + dest_file = tmp_path / "dest.json" + dest_file.touch() + + context.task.args = dict(remote_src=True, src=str(src_file), dest=str(dest_file)) + plugin._execute_module.return_value = dict(msg="Module called") + + results = plugin.run() + + assert results["msg"] == "Module called" + + +def test_src_not_found(mock_module_exec, tmp_path, monkeypatch): + context = MockContext() + context.task.async_val = False + context.play_context.check_mode = False + + plugin = AssembleClusterTemplateAction(**vars(context)) + + mock_module_exec(plugin) + + src_file = tmp_path / "src.json" + src_file.touch() + + dest_file = tmp_path / "dest.json" + dest_file.touch() + + context.task.args = dict(src=str(src_file), dest=str(dest_file)) + + not_found = MagicMock() + not_found.side_effect = AnsibleError("NOT FOUND") + + monkeypatch.setattr(plugin, "_find_needle", not_found) + + results = plugin.run() + + assert results["failed"] == True + assert results["msg"] == "NOT FOUND" + + +def test_src_not_directory(mock_module_exec, tmp_path, monkeypatch): + context = MockContext() + context.task.async_val = False + context.play_context.check_mode = False + + plugin = AssembleClusterTemplateAction(**vars(context)) + + mock_module_exec(plugin) + + src_file = tmp_path / "src.json" + src_file.touch() + + dest_file = tmp_path / "dest.json" + dest_file.touch() + + context.task.args = dict(src=str(src_file), dest=str(dest_file)) + monkeypatch.setattr(plugin, "_find_needle", MagicMock(return_value=src_file)) + + results = plugin.run() + + assert results["failed"] == True + assert results["msg"] == f"Source, {str(src_file)}, is not a directory" + + +def test_invalid_regexp(mock_module_exec, tmp_path, monkeypatch): + context = MockContext() + context.task.async_val = False + context.play_context.check_mode = False + + plugin = AssembleClusterTemplateAction(**vars(context)) + + mock_module_exec(plugin) + + src_dir = tmp_path / "fragments" + src_dir.mkdir() + + dest_file = tmp_path / "dest.json" + dest_file.touch() + + regexp = "[" + context.task.args = dict(src=str(src_dir), dest=str(dest_file), regexp=regexp) + monkeypatch.setattr(plugin, "_find_needle", MagicMock(return_value=src_dir)) + + results = plugin.run() + + assert results["failed"] == True + assert ( + results["msg"] + == f"Regular expression, {regexp}, is invalid: unterminated character set at position 0" + ) + + +def test_assemble_fragments(tmp_path): + context = MockContext() + context.task.async_val = False + context.play_context.check_mode = False + + plugin = AssembleClusterTemplateAction(**vars(context)) + + src_dir = tmp_path / "fragments" + src_dir.mkdir() + + base = src_dir / "base.json" + base.write_text(json.dumps(dict(one="BASE", two="BASE"))) + + overlay = src_dir / "overlay.json" + overlay.write_text(json.dumps(dict(one="OVERLAY"))) + + dest_file = tmp_path / "dest.json" + + def find_in_tmp(fragment, decrypt): + return os.path.join(src_dir, fragment) + + context.loader.get_real_file = find_in_tmp + + plugin.assemble_fragments(dest_file.open(mode="w", encoding="utf-8"), src_dir) + + results = json.load(dest_file.open(mode="r", encoding="utf-8")) + + assert len(results) == 2 + assert results["one"] == "OVERLAY" + assert results["two"] == "BASE" + + +def test_assemble_fragments_regexp(tmp_path): + context = MockContext() + context.task.async_val = False + context.play_context.check_mode = False + + plugin = AssembleClusterTemplateAction(**vars(context)) + + src_dir = tmp_path / "fragments" + src_dir.mkdir() + + base = src_dir / "base.json" + base.write_text(json.dumps(dict(one="BASE", two="BASE"))) + + overlay = src_dir / "overlay.json" + overlay.write_text(json.dumps(dict(one="OVERLAY"))) + + ignored = src_dir / "ignored.json" + ignored.write_text(json.dumps(dict(one="IGNORED"))) + + dest_file = tmp_path / "dest.json" + + def find_in_tmp(fragment, decrypt): + return os.path.join(src_dir, fragment) + + context.loader.get_real_file = find_in_tmp + + regexp = re.compile("^((?!ig).)*$") + + plugin.assemble_fragments( + dest_file.open(mode="w", encoding="utf-8"), src_dir, regex=regexp + ) + + results = json.load(dest_file.open(mode="r", encoding="utf-8")) + + assert len(results) == 2 + assert results["one"] == "OVERLAY" + assert results["two"] == "BASE" + + +def test_assemble_fragments_malformed(tmp_path): + context = MockContext() + context.task.async_val = False + context.play_context.check_mode = False + + plugin = AssembleClusterTemplateAction(**vars(context)) + + src_dir = tmp_path / "fragments" + src_dir.mkdir() + + base = src_dir / "base.json" + base.write_text(json.dumps(dict(one="BASE", two="BASE"))) + + overlay = src_dir / "overlay.json" + overlay.write_text(json.dumps(dict(one="OVERLAY"))) + + ignored = src_dir / "malformed.txt" + ignored.write_text("BOOM") + + dest_file = tmp_path / "dest.json" + + def find_in_tmp(fragment, decrypt): + return os.path.join(src_dir, fragment) + + context.loader.get_real_file = find_in_tmp + + with pytest.raises(AnsibleActionFail, match="JSON parsing error"): + plugin.assemble_fragments(dest_file.open(mode="w", encoding="utf-8"), src_dir) diff --git a/tests/unit/plugins/modules/assemble_cluster_template/fragments/base.json b/tests/unit/plugins/modules/assemble_cluster_template/fragments/base.json new file mode 100644 index 00000000..96d16542 --- /dev/null +++ b/tests/unit/plugins/modules/assemble_cluster_template/fragments/base.json @@ -0,0 +1,31 @@ +{ + "cdhVersion": "1.2.3", + "cmVersion": "4.5.6", + "displayName": "ExampleClusterTemplate", + "hostTemplates": [ + { + "cardinality": 1, + "refName": "ExampleHostTemplate", + "roleConfigGroupsRefNames": [] + } + ], + "instantiator": { + "clusterName": "ExampleCluster", + "hosts": [ + { + "hostName": "host.example.com", + "hostTemplateRefName": "ExampleHostTemplate" + } + ] + }, + "products": [ + { + "product": "CDH", + "version": "1.2.3" + } + ], + "repositories": [ + "https://archive.cloudera.com/" + ], + "services": [] +} diff --git a/tests/unit/plugins/modules/assemble_cluster_template/fragments/service-1.json b/tests/unit/plugins/modules/assemble_cluster_template/fragments/service-1.json new file mode 100644 index 00000000..2d3b8931 --- /dev/null +++ b/tests/unit/plugins/modules/assemble_cluster_template/fragments/service-1.json @@ -0,0 +1,51 @@ +{ + "hostTemplates": [ + { + "refName": "AnotherExampleHostTemplate", + "roleConfigGroupsRefNames": [ + "atlas-ATLAS_SERVER-BASE", + "atlas-GATEWAY-BASE" + ] + } + ], + "services": [ + { + "refName": "atlas", + "serviceType": "ATLAS", + "displayName": "Atlas", + "serviceConfigs": [], + "roleConfigGroups": [ + { + "refName": "atlas-ATLAS_SERVER-BASE", + "roleType": "ATLAS_SERVER", + "base": true, + "configs": [ + { + "name": "atlas_server_http_port", + "value": "31000" + }, + { + "name": "atlas_server_https_port", + "value": "31443" + } + ] + }, + { + "refName": "atlas-GATEWAY-BASE", + "roleType": "GATEWAY", + "base": true, + "configs": [] + } + ] + } + ], + "products": [ + { + "product": "FOO", + "version": "9.8.7" + } + ], + "repositories": [ + "https://archive.cloudera.com/atlas" + ] +} diff --git a/tests/unit/plugins/modules/assemble_cluster_template/fragments/service-2.json b/tests/unit/plugins/modules/assemble_cluster_template/fragments/service-2.json new file mode 100644 index 00000000..afa8560b --- /dev/null +++ b/tests/unit/plugins/modules/assemble_cluster_template/fragments/service-2.json @@ -0,0 +1,43 @@ +{ + "hostTemplates": [ + { + "refName": "ExampleHostTemplate", + "roleConfigGroupsRefNames": [ + "schemaregistry-SCHEMA_REGISTRY_SERVER-BASE" + ] + } + ], + "services": [ + { + "refName": "schemaregistry", + "serviceType": "SCHEMAREGISTRY", + "displayName": "Schema Registry", + "serviceConfigs": [ + { + "name": "database_host", + "value": "host.example.com" + } + ], + "roleConfigGroups": [ + { + "refName": "schemaregistry-SCHEMA_REGISTRY_SERVER-BASE", + "roleType": "SCHEMA_REGISTRY_SERVER", + "base": true, + "configs": [ + { + "name": "schema.registry.port", + "value": "7788" + }, + { + "name": "schema.registry.ssl.port", + "value": "7790" + } + ] + } + ] + } + ], + "repositories": [ + "https://archive.cloudera.com/schemaregistry" + ] +} diff --git a/tests/unit/plugins/modules/assemble_cluster_template/fragments/service-3.json b/tests/unit/plugins/modules/assemble_cluster_template/fragments/service-3.json new file mode 100644 index 00000000..718f5143 --- /dev/null +++ b/tests/unit/plugins/modules/assemble_cluster_template/fragments/service-3.json @@ -0,0 +1,32 @@ +{ + "hostTemplates": [ + { + "refName": "ExampleHostTemplate", + "roleConfigGroupsRefNames": [ + "livy-GATEWAY-BASE", + "livy-LIVY_SERVER-BASE" + ] + } + ], + "services": [ + { + "refName": "livy", + "serviceType": "LIVY", + "displayName": "Livy", + "roleConfigGroups": [ + { + "refName": "livy-GATEWAY-BASE", + "roleType": "GATEWAY", + "base": true, + "configs": [] + }, + { + "refName": "livy-LIVY_SERVER-BASE", + "roleType": "LIVY_SERVER", + "base": true, + "configs": [] + } + ] + } + ] +} diff --git a/tests/unit/plugins/modules/assemble_cluster_template/test_assemble_cluster_template_module.py b/tests/unit/plugins/modules/assemble_cluster_template/test_assemble_cluster_template_module.py new file mode 100644 index 00000000..5c054db2 --- /dev/null +++ b/tests/unit/plugins/modules/assemble_cluster_template/test_assemble_cluster_template_module.py @@ -0,0 +1,762 @@ +# Copyright 2024 Cloudera, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +import logging +import json +import os +import pytest + +from ansible_collections.cloudera.cluster.plugins.modules import ( + assemble_cluster_template, +) +from ansible_collections.cloudera.cluster.plugins.module_utils.cm_utils import ( + ClusterTemplate, +) +from ansible_collections.cloudera.cluster.tests.unit import ( + AnsibleExitJson, + AnsibleFailJson, +) + +LOG = logging.getLogger(__name__) +TEST_DIR = os.path.dirname(os.path.abspath(__file__)) + + +def expected_list(expected: list) -> list: + expected.sort(key=lambda x: json.dumps(x, sort_keys=True)) + return expected + + +def test_missing_required(module_args): + module_args() + + with pytest.raises(AnsibleFailJson, match="dest, src"): + assemble_cluster_template.main() + + +def test_missing_dest(module_args): + module_args({"src": "foo.json"}) + + with pytest.raises(AnsibleFailJson, match="dest"): + assemble_cluster_template.main() + + +def test_missing_src(module_args): + module_args({"dest": "foo.json"}) + + with pytest.raises(AnsibleFailJson, match="src"): + assemble_cluster_template.main() + + +def test_src_not_directory(module_args, tmp_path): + root_dir = tmp_path / "not_directory" + root_dir.mkdir() + + invalid_src = root_dir / "invalid_src.json" + invalid_src.touch() + + module_args( + { + "dest": "foo.json", + "src": str(invalid_src), + } + ) + + with pytest.raises(AnsibleFailJson, match="not a directory"): + assemble_cluster_template.main() + + +def test_src_invalid_file(module_args, tmp_path): + root_dir = tmp_path / "not_valid" + root_dir.mkdir() + + invalid_file = root_dir / "invalid_file.txt" + invalid_file.touch() + + module_args( + { + "dest": "foo.json", + "src": str(root_dir), + } + ) + + with pytest.raises(AnsibleFailJson, match="JSON parsing error"): + assemble_cluster_template.main() + + +def test_src_filtered(module_args, tmp_path): + root_dir = tmp_path / "filtered" + root_dir.mkdir() + + content = dict() + content["test"] = "Test" + + base = root_dir / "base.json" + base.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + + overlay = dict() + overlay["error"] = True + + filtered = root_dir / "filtered.json" + filtered.write_text( + json.dumps(content, indent=2, sort_keys=False), encoding="utf-8" + ) + + results = root_dir / "results.json" + + module_args({"dest": str(results), "src": str(root_dir), "regexp": "^filtered"}) + + with pytest.raises(AnsibleExitJson): + assemble_cluster_template.main() + + output = json.load(results.open()) + + assert "error" not in output + assert len(output) == 1 + assert "test" in output + assert output["test"] == "Test" + + +@pytest.mark.parametrize("key", ClusterTemplate.IDEMPOTENT_IDS) +def test_merge_idempotent_key(module_args, tmp_path, key): + root_dir = tmp_path / "idempotent" + root_dir.mkdir() + + content = dict() + content[key] = "Test" + + fragment_dir = root_dir / "fragments" + fragment_dir.mkdir() + + base = fragment_dir / "base.json" + base.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + + overlay = fragment_dir / "overlay.json" + overlay.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + + module_args({"dest": str(root_dir / "results.json"), "src": str(fragment_dir)}) + + with pytest.raises(AnsibleExitJson): + assemble_cluster_template.main() + + +@pytest.mark.parametrize("key", ClusterTemplate.IDEMPOTENT_IDS) +def test_merge_idempotent_key_conflict(module_args, tmp_path, key): + root_dir = tmp_path / "idempotent_conflict" + root_dir.mkdir() + + content = dict() + content[key] = "Test" + + fragment_dir = root_dir / "fragments" + fragment_dir.mkdir() + + base = fragment_dir / "base.json" + base.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + + content[key] = "Error" + overlay = fragment_dir / "overlay.json" + overlay.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + + module_args({"dest": str(root_dir / "results.json"), "src": str(fragment_dir)}) + + with pytest.raises(AnsibleFailJson, match=f"/{key}"): + assemble_cluster_template.main() + + +@pytest.mark.parametrize("key", ClusterTemplate.UNIQUE_IDS) +def test_merge_unique_key(module_args, tmp_path, key): + root_dir = tmp_path / "unique" + root_dir.mkdir() + + content = dict() + content[key] = ["one", "two"] + + fragment_dir = root_dir / "fragments" + fragment_dir.mkdir() + + base = fragment_dir / "base.json" + base.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + + overlay = fragment_dir / "overlay.json" + overlay.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + + results = root_dir / "results.json" + + module_args({"dest": str(results), "src": str(fragment_dir)}) + + with pytest.raises(AnsibleExitJson): + assemble_cluster_template.main() + + output = json.load(results.open()) + + assert len(output[key]) == 2 + assert output[key] == expected_list(["one", "two"]) + + +@pytest.mark.parametrize("key", ClusterTemplate.UNIQUE_IDS) +def test_merge_unique_key_additional(module_args, tmp_path, key): + root_dir = tmp_path / "unique_additional" + root_dir.mkdir() + + content = dict() + content[key] = ["one", "two"] + + fragment_dir = root_dir / "fragments" + fragment_dir.mkdir() + + base = fragment_dir / "base.json" + base.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + + content[key] = ["one", "three"] + overlay = fragment_dir / "overlay.json" + overlay.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + + results = root_dir / "results.json" + + module_args({"dest": str(results), "src": str(fragment_dir)}) + + with pytest.raises(AnsibleExitJson): + assemble_cluster_template.main() + + output = json.load(results.open()) + + assert len(output[key]) == 3 + assert output[key] == expected_list(["one", "two", "three"]) + + +def test_merge_list(module_args, tmp_path): + root_dir = tmp_path / "list" + root_dir.mkdir() + + content = dict() + content["test"] = ["one", "two"] + + fragment_dir = root_dir / "fragments" + fragment_dir.mkdir() + + base = fragment_dir / "base.json" + base.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + + content["test"] = ["one", "three"] + overlay = fragment_dir / "overlay.json" + overlay.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + + results = root_dir / "results.json" + + module_args({"dest": str(results), "src": str(fragment_dir)}) + + with pytest.raises(AnsibleExitJson): + assemble_cluster_template.main() + + output = json.loads(results.read_text()) + + assert len(output["test"]) == 4 + assert output["test"] == expected_list(["one", "two", "one", "three"]) + + +def test_merge_list_nested(module_args, tmp_path): + root_dir = tmp_path / "list_nested" + root_dir.mkdir() + + content = dict() + content["test"] = [["one"], "two"] + + fragment_dir = root_dir / "fragments" + fragment_dir.mkdir() + + base = fragment_dir / "base.json" + base.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + + content["test"] = [["one"], "three"] + overlay = fragment_dir / "overlay.json" + overlay.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + + results = root_dir / "results.json" + + module_args({"dest": str(results), "src": str(fragment_dir)}) + + with pytest.raises(AnsibleExitJson): + assemble_cluster_template.main() + + output = json.load(results.open()) + + assert len(output["test"]) == 4 + assert output["test"] == expected_list([["one"], ["one"], "two", "three"]) + + +def test_merge_list_idempotent(module_args, tmp_path): + root_dir = tmp_path / "list_idempotent" + root_dir.mkdir() + + content = dict() + content["test"] = [{"name": "Test"}, "two"] + + fragment_dir = root_dir / "fragments" + fragment_dir.mkdir() + + base = fragment_dir / "base.json" + base.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + + content["test"] = [{"name": "Test"}, "three"] + overlay = fragment_dir / "overlay.json" + overlay.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + + results = root_dir / "results.json" + + module_args({"dest": str(results), "src": str(fragment_dir)}) + + with pytest.raises(AnsibleExitJson): + assemble_cluster_template.main() + + output = json.load(results.open()) + + assert len(output["test"]) == 3 + assert output["test"] == expected_list([{"name": "Test"}, "two", "three"]) + + +def test_merge_list_idempotent_multiple_elements(module_args, tmp_path): + root_dir = tmp_path / "list_idempotent_multiple_elements" + root_dir.mkdir() + + content = dict() + content["test"] = [{"name": "Test"}, {"product": "Product"}, "two"] + + fragment_dir = root_dir / "fragments" + fragment_dir.mkdir() + + base = fragment_dir / "base.json" + base.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + + content["test"] = [{"name": "Test"}, {"product": "Product"}, "three"] + overlay = fragment_dir / "overlay.json" + overlay.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + + results = root_dir / "results.json" + + module_args({"dest": str(results), "src": str(fragment_dir)}) + + with pytest.raises(AnsibleExitJson): + assemble_cluster_template.main() + + output = json.load(results.open()) + + assert len(output["test"]) == 4 + assert output["test"] == expected_list( + [{"name": "Test"}, {"product": "Product"}, "two", "three"] + ) + + +def test_merge_list_idempotent_multiple_keys(module_args, tmp_path): + root_dir = tmp_path / "list_idempotent_multiple_keys" + root_dir.mkdir() + + content = dict() + content["test"] = [{"name": "Test", "product": "Product"}, "two"] + + fragment_dir = root_dir / "fragments" + fragment_dir.mkdir() + + base = fragment_dir / "base.json" + base.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + + content["test"] = [{"name": "Test", "product": "Product"}, "three"] + overlay = fragment_dir / "overlay.json" + overlay.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + + results = root_dir / "results.json" + + module_args({"dest": str(results), "src": str(fragment_dir)}) + + with pytest.raises(AnsibleExitJson): + assemble_cluster_template.main() + + output = json.load(results.open()) + + assert len(output["test"]) == 3 + assert output["test"] == expected_list( + [{"name": "Test", "product": "Product"}, "two", "three"] + ) + + +def test_merge_list_idempotent_append(module_args, tmp_path): + root_dir = tmp_path / "list_idempotent_append" + root_dir.mkdir() + + content = dict() + content["test"] = [{"name": "Test"}, "two"] + + fragment_dir = root_dir / "fragments" + fragment_dir.mkdir() + + base = fragment_dir / "base.json" + base.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + + content["test"] = [{"name": "Additional"}, "three"] + overlay = fragment_dir / "overlay.json" + overlay.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + + results = root_dir / "results.json" + + module_args({"dest": str(results), "src": str(fragment_dir)}) + + with pytest.raises(AnsibleExitJson): + assemble_cluster_template.main() + + output = json.load(results.open()) + + assert len(output["test"]) == 4 + assert output["test"] == expected_list( + [{"name": "Test"}, "two", {"name": "Additional"}, "three"] + ) + + +def test_merge_dict(module_args, tmp_path): + root_dir = tmp_path / "dict" + root_dir.mkdir() + + content = dict() + content["test"] = {"one": 1} + + fragment_dir = root_dir / "fragments" + fragment_dir.mkdir() + + base = fragment_dir / "base.json" + base.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + + content["test"] = {"two": 2} + overlay = fragment_dir / "overlay.json" + overlay.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + + results = root_dir / "results.json" + + module_args({"dest": str(results), "src": str(fragment_dir)}) + + with pytest.raises(AnsibleExitJson): + assemble_cluster_template.main() + + output = json.load(results.open()) + + assert len(output["test"]) == 2 + assert output["test"] == {"one": 1, "two": 2} + + +def test_merge_dict_overwrite(module_args, tmp_path): + root_dir = tmp_path / "dict_overwrite" + root_dir.mkdir() + + content = dict() + content["test"] = {"one": 1} + + fragment_dir = root_dir / "fragments" + fragment_dir.mkdir() + + base = fragment_dir / "base.json" + base.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + + content["test"] = {"one": 2} + overlay = fragment_dir / "overlay.json" + overlay.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + + results = root_dir / "results.json" + + module_args({"dest": str(results), "src": str(fragment_dir)}) + + with pytest.raises(AnsibleExitJson): + assemble_cluster_template.main() + + output = json.load(results.open()) + + assert len(output["test"]) == 1 + assert output["test"] == {"one": 2} + + +def test_merge_dict_idempotent_key(module_args, tmp_path): + root_dir = tmp_path / "dict_idempotent_key" + root_dir.mkdir() + + content = dict() + content["test"] = {"one": 1} + + fragment_dir = root_dir / "fragments" + fragment_dir.mkdir() + + base = fragment_dir / "base.json" + base.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + + content["test"] = {"one": 2} + overlay = fragment_dir / "overlay.json" + overlay.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + + results = root_dir / "results.json" + + module_args({"dest": str(results), "src": str(fragment_dir)}) + + with pytest.raises(AnsibleExitJson): + assemble_cluster_template.main() + + output = json.load(results.open()) + + assert len(output["test"]) == 1 + assert output["test"] == {"one": 2} + + +def test_merge_dict_nested(module_args, tmp_path): + root_dir = tmp_path / "dict_nested" + root_dir.mkdir() + + content = dict() + content["test"] = {"one": {"two": 1}} + + fragment_dir = root_dir / "fragments" + fragment_dir.mkdir() + + base = fragment_dir / "base.json" + base.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + + content["test"] = {"one": {"three": 3}} + overlay = fragment_dir / "overlay.json" + overlay.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + + results = root_dir / "results.json" + + module_args({"dest": str(results), "src": str(fragment_dir)}) + + with pytest.raises(AnsibleExitJson): + assemble_cluster_template.main() + + output = json.load(results.open()) + + assert len(output["test"]) == 1 + assert len(output["test"]["one"]) == 2 + assert output["test"] == {"one": {"two": 1, "three": 3}} + + +def test_merge_dict_nested_overwrite(module_args, tmp_path): + root_dir = tmp_path / "dict_nested_overwrite" + root_dir.mkdir() + + content = dict() + content["test"] = {"one": {"two": 1}} + + fragment_dir = root_dir / "fragments" + fragment_dir.mkdir() + + base = fragment_dir / "base.json" + base.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + + content["test"] = {"one": {"two": 2, "three": 3}} + overlay = fragment_dir / "overlay.json" + overlay.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + + results = root_dir / "results.json" + + module_args({"dest": str(results), "src": str(fragment_dir)}) + + with pytest.raises(AnsibleExitJson): + assemble_cluster_template.main() + + output = json.load(results.open()) + + assert len(output["test"]) == 1 + assert len(output["test"]["one"]) == 2 + assert output["test"] == {"one": {"two": 2, "three": 3}} + + +def test_merge_dict_nested_idempotent(module_args, tmp_path): + root_dir = tmp_path / "dict_nested_idempotent" + root_dir.mkdir() + + content = dict() + content["test"] = {"one": {"name": "Test"}} + + fragment_dir = root_dir / "fragments" + fragment_dir.mkdir() + + base = fragment_dir / "base.json" + base.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + + content["test"] = {"one": {"name": "Test"}} + overlay = fragment_dir / "overlay.json" + overlay.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + + results = root_dir / "results.json" + + module_args({"dest": str(results), "src": str(fragment_dir)}) + + with pytest.raises(AnsibleExitJson): + assemble_cluster_template.main() + + output = json.load(results.open()) + + assert len(output["test"]) == 1 + assert len(output["test"]["one"]) == 1 + assert output["test"] == {"one": {"name": "Test"}} + + +def test_merge_dict_nested_idempotent_conflict(module_args, tmp_path): + root_dir = tmp_path / "dict_nested_idempotent_conflict" + root_dir.mkdir() + + content = dict() + content["test"] = {"one": {"name": "Test"}} + + fragment_dir = root_dir / "fragments" + fragment_dir.mkdir() + + base = fragment_dir / "base.json" + base.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + + content["test"] = {"one": {"name": "Conflict"}} + overlay = fragment_dir / "overlay.json" + overlay.write_text(json.dumps(content, indent=2, sort_keys=False), encoding="utf-8") + + results = root_dir / "results.json" + + module_args({"dest": str(results), "src": str(fragment_dir)}) + + with pytest.raises(AnsibleFailJson, match="\/test\/one\/name"): + assemble_cluster_template.main() + + +def test_multiple_services(module_args, tmp_path): + results = tmp_path / "results.json" + + module_args({"dest": str(results), "src": os.path.join(TEST_DIR, "fragments")}) + + with pytest.raises(AnsibleExitJson): + assemble_cluster_template.main() + + output = json.load(results.open()) + + assert output["displayName"] == "ExampleClusterTemplate" + assert output["cdhVersion"] == "1.2.3" + assert output["cmVersion"] == "4.5.6" + + assert set(output["repositories"]) == set( + [ + "https://archive.cloudera.com/", + "https://archive.cloudera.com/schemaregistry", + "https://archive.cloudera.com/atlas", + ] + ) + + assert len(output["products"]) == 2 + assert output["products"] == expected_list( + [ + dict(product="CDH", version="1.2.3"), + dict(product="FOO", version="9.8.7"), + ] + ) + + assert output["instantiator"]["clusterName"] == "ExampleCluster" + assert len(output["instantiator"]["hosts"]) == 1 + assert output["instantiator"]["hosts"] == expected_list( + [{"hostName": "host.example.com", "hostTemplateRefName": "ExampleHostTemplate"}] + ) + + assert len(output["hostTemplates"]) == 2 + assert output["hostTemplates"] == expected_list( + [ + { + "cardinality": 1, + "refName": "ExampleHostTemplate", + "roleConfigGroupsRefNames": [ + "livy-GATEWAY-BASE", + "livy-LIVY_SERVER-BASE", + "schemaregistry-SCHEMA_REGISTRY_SERVER-BASE", + ], + }, + { + "refName": "AnotherExampleHostTemplate", + "roleConfigGroupsRefNames": [ + "atlas-ATLAS_SERVER-BASE", + "atlas-GATEWAY-BASE", + ], + }, + ] + ) + + assert len(output["services"]) == 3 + assert output["services"] == expected_list( + [ + { + "refName": "atlas", + "serviceType": "ATLAS", + "displayName": "Atlas", + "serviceConfigs": [], + "roleConfigGroups": [ + { + "refName": "atlas-ATLAS_SERVER-BASE", + "roleType": "ATLAS_SERVER", + "base": True, + "configs": [ + {"name": "atlas_server_http_port", "value": "31000"}, + {"name": "atlas_server_https_port", "value": "31443"}, + ], + }, + { + "refName": "atlas-GATEWAY-BASE", + "roleType": "GATEWAY", + "base": True, + "configs": [], + }, + ], + }, + { + "refName": "schemaregistry", + "serviceType": "SCHEMAREGISTRY", + "displayName": "Schema Registry", + "serviceConfigs": [ + {"name": "database_host", "value": "host.example.com"} + ], + "roleConfigGroups": [ + { + "refName": "schemaregistry-SCHEMA_REGISTRY_SERVER-BASE", + "roleType": "SCHEMA_REGISTRY_SERVER", + "base": True, + "configs": [ + {"name": "schema.registry.port", "value": "7788"}, + { + "name": "schema.registry.ssl.port", + "value": "7790", + }, + ], + } + ], + }, + { + "refName": "livy", + "serviceType": "LIVY", + "displayName": "Livy", + "roleConfigGroups": [ + { + "refName": "livy-GATEWAY-BASE", + "roleType": "GATEWAY", + "base": True, + "configs": [], + }, + { + "refName": "livy-LIVY_SERVER-BASE", + "roleType": "LIVY_SERVER", + "base": True, + "configs": [], + }, + ], + }, + ], + ) diff --git a/tests/unit/plugins/modules/cm_cluster/test_cm_cluster.py b/tests/unit/plugins/modules/cm_cluster/test_cm_cluster.py new file mode 100644 index 00000000..96c358c0 --- /dev/null +++ b/tests/unit/plugins/modules/cm_cluster/test_cm_cluster.py @@ -0,0 +1,70 @@ +# -*- coding: utf-8 -*- + +# Copyright 2023 Cloudera, Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +import os +import logging +import pytest + +from ansible_collections.cloudera.cluster.plugins.modules import cm_cluster +from ansible_collections.cloudera.cluster.tests.unit import AnsibleExitJson, AnsibleFailJson + +LOG = logging.getLogger(__name__) + + +@pytest.fixture() +def conn(): + return { + "username": os.getenv('CM_USERNAME'), + "password": os.getenv('CM_PASSWORD'), + "host": os.getenv('CM_HOST'), + "port": "7180", + "verify_tls": "no", + "debug": "yes", + } + +def test_missing_name_or_template(conn, module_args): + module_args(conn) + + with pytest.raises(AnsibleFailJson, match="name, template") as e: + cm_cluster.main() + +def test_missing_cdh_version(conn, module_args): + module_args({ + **conn, + "name": "Test" + }) + + with pytest.raises(AnsibleFailJson, match="Bad Request") as e: + cm_cluster.main() + + assert "CDH version" in e.value.body['message'] + +def test_absent_not_existing(conn, module_args): + module_args({ + **conn, + "name": "Test", + "state": "absent" + }) + + with pytest.raises(AnsibleExitJson) as e: + cm_cluster.main() + + assert e.value.changed == False + \ No newline at end of file diff --git a/tests/unit/requirements.txt b/tests/unit/requirements.txt index 1069667a..8ec82784 100644 --- a/tests/unit/requirements.txt +++ b/tests/unit/requirements.txt @@ -1,4 +1,4 @@ -# Copyright 2023 Cloudera, Inc. All Rights Reserved. +# Copyright 2024 Cloudera, Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -cm_client \ No newline at end of file +cm_client