diff --git a/.github/actions/ansible_test_splitter/action.yml b/.github/actions/ansible_test_splitter/action.yml new file mode 100644 index 00000000..3c445969 --- /dev/null +++ b/.github/actions/ansible_test_splitter/action.yml @@ -0,0 +1,46 @@ +name: Cloud integration test splitter +description: Evaluate which targets need to be tested. + +inputs: + collections_to_test: + description: | + Path to the collections to test. + Provide as a comma-separated list of collection path and base ref to test against. + e.g: 'repo_path_1:main,repo_path_2:stable-2' + required: true + total_jobs: + description: The total number of jobs to share targets on + required: false + default: "3" +outputs: + test_targets: + description: The list of targets to test + value: ${{ steps.splitter.outputs.test_targets }} + +runs: + using: composite + steps: + - name: setup python + uses: actions/setup-python@v4 + with: + python-version: "3.9" + + - name: Install python required libraries + run: pip install -U pyyaml + shell: bash + + - name: Set variable to set test all targets + run: echo "ANSIBLE_TEST_ALL_THE_TARGETS=true" >> "$GITHUB_ENV" + shell: bash + if: ${{ (contains(github.event.pull_request.labels.*.name, 'test-all-the-targets')) }} + + - name: Evaluate targets to test + id: splitter + run: >- + python ${{ github.action_path }}/list_changed_targets.py + env: + COLLECTIONS_TO_TEST: "${{ inputs.collections_to_test }}" + TOTAL_JOBS: "${{ inputs.total_jobs }}" + PULL_REQUEST_BODY: "${{ github.event.pull_request.body }}" + PULL_REQUEST_BASE_REF: "${{ github.event.pull_request.base.ref }}" + shell: bash diff --git a/.github/actions/ansible_test_splitter/list_changed_common.py b/.github/actions/ansible_test_splitter/list_changed_common.py new file mode 100644 index 00000000..21f0770f --- /dev/null +++ b/.github/actions/ansible_test_splitter/list_changed_common.py @@ -0,0 +1,582 @@ +#!/usr/bin/env python3 +"""Define collection module for list_changed_targets executable.""" + +import ast +import os +import re +import subprocess + +from collections import defaultdict +from collections.abc import Generator +from pathlib import PosixPath +from typing import Any +from typing import Dict +from typing import List +from typing import Optional + +import yaml + + +def read_collection_name(collection_path: PosixPath) -> str: + """Read collection namespace from galaxy.yml. + + :param collection_path: path to the collection + :returns: collection name as string + """ + with (collection_path / "galaxy.yml").open() as file_handler: + content = yaml.safe_load(file_handler) + return f'{content["namespace"]}.{content["name"]}' + + +def run_command(command: str, chdir: Optional[PosixPath]) -> str: + """Run shell command using subprocess. + + :param command: command to execute + :param chdir: directory to place in before running the command + :returns: command output + """ + with subprocess.Popen( + command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, cwd=chdir + ) as proc: + out, _ = proc.communicate() + return out.decode() + + +def list_pyimport(prefix: str, subdir: str, module_content: str) -> Generator[str, None, None]: + """Read collection namespace from galaxy.yml. + + :param prefix: files prefix + :param subdir: sub directory + :param module_content: module content + :yields: python module import + """ + root = ast.parse(module_content) + for node in ast.walk(root): + if isinstance(node, ast.Import): + yield node.names[0].name + elif isinstance(node, ast.ImportFrom): + if node.level == 1: + current_prefix = f"{prefix}{subdir}." + elif node.level == 2: + current_prefix = f"{prefix}" + else: + current_prefix = "" + yield f"{current_prefix}{node.module}" + + +def build_import_tree( + import_path: PosixPath, module_collection_name: str, all_collections_names: list[str] +) -> tuple[dict[str, list[Any]], dict[str, list[Any]]]: + """Generate import dependencies for the modules and the module_utils. + + Let say we have the following input: + + modules: ec2_mod1 + import a_py_mod + import ansible.basic + modules: ec2_mod2 + import another_py_mod + import ansible_collections.amazon.aws.plugins.module_utils.core + modules: ec2_mod3 + import ansible_collections.amazon.aws.plugins.module_utils.tagging + import ansible_collections.amazon.aws.plugins.module_utils.waiters + + module_utils: waiters + import some_py_mod + import ansible_collections.amazon.aws.plugins.module_utils.core + module_utils: tagging + import some_py_tricky_mod + import ansible_collections.amazon.aws.plugins.module_utils.core + module_utils: core + import some_py_fancy_mod + + This will generated the following dicts (list only import part of this collection): + + modules_imports + { + "ec2_mod1": [], + "ec2_mod2": [ + "ansible_collections.amazon.aws.plugins.module_utils.core", + ], + "ec2_instance_info": [ + "ansible_collections.amazon.aws.plugins.module_utils.tagging", + "ansible_collections.amazon.aws.plugins.module_utils.waiters" + ], + } + + utils_import + { + "ansible_collections.amazon.aws.plugins.module_utils.core": [ + "ansible_collections.amazon.aws.plugins.module_utils.waiters" + "ansible_collections.amazon.aws.plugins.module_utils.tagging" + ] + } + + :param all_collections_names: collections names + :param module_collection_name: current collection name + :param import_path: the path to import from + :returns: tuple of modules and utils imports + """ + modules_import = defaultdict(list) # type: Dict[str, List[Any]] + prefix = f"ansible_collections.{module_collection_name}.plugins." + all_prefixes = [f"ansible_collections.{n}.plugins." for n in all_collections_names] + utils_to_visit = [] + for mod in import_path.glob("plugins/modules/*"): + for i in list_pyimport(prefix, "modules", mod.read_text()): + if any(i.startswith(p) for p in all_prefixes) and i not in modules_import[mod.stem]: + modules_import[mod.stem].append(i) + if i not in utils_to_visit: + utils_to_visit.append(i) + + utils_import = defaultdict(list) # type: Dict[str, List[Any]] + visited = [] + while utils_to_visit: + utils = utils_to_visit.pop() + if utils in visited: + continue + visited.append(utils) + try: + utils_path = import_path / PosixPath( + utils.replace(f"ansible_collections.{module_collection_name}.", "").replace( + ".", "/" + ) + + ".py" + ) + for i in list_pyimport(prefix, "module_utils", utils_path.read_text()): + if i.startswith(prefix) and i not in utils_import[utils]: + utils_import[utils].append(i) + if i not in visited: + utils_to_visit.append(i) + except Exception: # pylint: disable=broad-except + pass + return modules_import, utils_import + + +class WhatHaveChanged: + """A class to store information about changes for a specific collection.""" + + def __init__(self, change_path: PosixPath, base_ref: str) -> None: + """Class constructor. + + :param change_path: path to the change + :param base_ref: pull request base reference + """ + assert isinstance(change_path, PosixPath) + self.collection_path = change_path + self.base_ref = base_ref + self.collection_name = read_collection_name(change_path) + self.files = [] # type: List[PosixPath] + + def changed_files(self) -> list[PosixPath]: + """List of changed files. + + :returns: a list of pathlib.PosixPath + """ + if not self.files: + stdout = run_command( + command=f"git diff origin/{self.base_ref} --name-only", chdir=self.collection_path + ) + self.files = [PosixPath(p) for p in stdout.split("\n") if p] + return self.files + + def targets(self) -> Generator[str, None, None]: + """List the test targets impacted by the change. + + :yields: targets impacted by this change + """ + for change in self.changed_files(): + if str(change).startswith("tests/integration/targets/"): + # These are a special case, we only care that 'something' changed in that test + yield str(change).replace("tests/integration/targets/", "").split("/", maxsplit=1)[ + 0 + ] + + def _path_matches(self, base_path: str) -> Generator[PosixPath, None, None]: + """Simplest case, just a file name. + + :param base_path: path of the module + :yields: path to a change file + """ + for changed_file in self.changed_files(): + if str(changed_file).startswith(base_path): + yield PosixPath(changed_file) + + def connection(self) -> Generator[PosixPath, None, None]: + """List the connection plugins impacted by the change. + + :yields: path to a connection plugin change + """ + yield from self._path_matches("plugins/connection/") + + def inventory(self) -> Generator[PosixPath, None, None]: + """List the inventory plugins impacted by the change. + + :yields: path to an inventory plugin change + """ + yield from self._path_matches("plugins/inventory/") + + def lookup(self) -> Generator[PosixPath, None, None]: + """List the lookup plugins impacted by the change. + + :yields: path to a connection lookup change + """ + yield from self._path_matches("plugins/lookup/") + + def modules(self) -> Generator[PosixPath, None, None]: + """List the modules impacted by the change. + + :yields: path to a module plugin change + """ + yield from self._path_matches("plugins/modules/") + + def _util_matches( + self, base_path: str, import_path: str + ) -> Generator[tuple[PosixPath, str], None, None]: + """List matching utils files. + + :param base_path: path of the module or plugin util + :param import_path: path of the import library + :yields: path to a module or plugin utils change + """ + # We care about the file, but we also need to find what potential side effects would be for + # our change + base_name = f"ansible_collections.{self.collection_name}.plugins.{import_path}." + for util_change in self.changed_files(): + if str(util_change).startswith(base_path): + yield ( + PosixPath(util_change), + f"{base_name}{util_change.stem}", + ) + + def module_utils(self) -> Generator[tuple[PosixPath, str], None, None]: + """List the Python modules impacted by the change. + + :yields: path to a module util change + """ + yield from self._util_matches("plugins/module_utils/", "module_utils") + + def plugin_utils(self) -> Generator[tuple[PosixPath, str], None, None]: + """List the Python modules impacted by the change. + + :yields: path to a plugin util change + """ + yield from self._util_matches("plugins/plugin_utils/", "plugin_utils") + + +class Target: + """A class to store information about a specific target.""" + + def __init__(self, target_path: PosixPath) -> None: + """Class constructor. + + :param target_path: path to the target + """ + self.path = target_path + self.lines = [line.split("#")[0] for line in target_path.read_text().split("\n") if line] + self.name = target_path.parent.name + self.exec_time = 0 + + def is_alias_of(self, name: str) -> bool: + """Test alias target. + + :param name: the name of the source target + :returns: whether target is an alias or not + """ + return name in self.lines or self.name == name + + def is_unstable(self) -> bool: + """Test unstable target. + + :returns: whether target is unstable or not + """ + if "unstable" in self.lines: + return True + return False + + def is_disabled(self) -> bool: + """Test disabled target. + + :returns: whether target is disabled or not + """ + if "disabled" in self.lines: + return True + return False + + def is_slow(self) -> bool: + """Test slow target. + + :returns: whether target is slow or not + """ + # NOTE: Should be replaced by time=3000 + if "slow" in self.lines or "# reason: slow" in self.lines: + return True + return False + + def is_ignored(self) -> bool: + """Show the target be ignored. + + :returns: whether target is set as ignored or not + """ + ignore = {"unsupported", "disabled", "unstable", "hidden"} + return not ignore.isdisjoint(set(self.lines)) + + def execution_time(self) -> int: + """Retrieve execution time of a target. + + :returns: execution time of the target + """ + if self.exec_time: + return self.exec_time + + self.exec_time = 3000 if self.is_slow() else 180 + for line in self.lines: + if match := re.match(r"^time=([0-9]+)s\S*$", line): + self.exec_time = int(match.group(1)) + elif match := re.match(r"^time=([0-9]+)m\S*$", line): + self.exec_time = int(match.group(1)) * 60 + elif match := re.match(r"^time=([0-9]+)\S*$", line): + self.exec_time = int(match.group(1)) + + return self.exec_time + + +class Collection: + """A class storing collection information.""" + + def __init__(self, collection_path: PosixPath) -> None: + """Class Constructor. + + :param collection_path: path to the collection + """ + self.collection_path = collection_path + self._my_test_plan = [] # type: List[Target] + self.collection_name = read_collection_name(collection_path) # type: str + self.modules_import = {} # type: Dict[str, List[Any]] + self.utils_import = {} # type: Dict[str, List[Any]] + self.test_groups = [] # type: List[Dict[str, Any]] + + @property + def test_plan_names(self) -> list[str]: + """Return list of name of the test plan. + + :returns: a list of test plan names + """ + return [t.name for t in self._my_test_plan] + + @property + def test_plan(self) -> list[Target]: + """Get protected attribute _my_test_plan. + + :returns: a list of test plan objects + """ + return self._my_test_plan + + def targets(self) -> Generator[Target, None, None]: + """List collection targets. + + :yields: a collection target + """ + for alias in self.collection_path.glob("tests/integration/targets/*/aliases"): + yield Target(alias) + + def _is_target_already_added(self, target_name: str) -> bool: + """Return true if the target is already part of the test plan. + + :param target_name: target name being checked + :returns: whether the target is already part of the test plan or not + """ + for target_src in self._my_test_plan: + if target_src.is_alias_of(target_name): + return True + return False + + def add_target_to_plan(self, target_name: str, is_direct: bool = True) -> None: + """Add specific target to the test plan. + + :param target_name: target name being added + :param is_direct: whether it is a direct target or an alias + """ + if not self._is_target_already_added(target_name): + for plan_target in self.targets(): + if plan_target.is_disabled(): + continue + # For indirect targets we want to skip "ignored" tests + if not is_direct and plan_target.is_ignored(): + continue + if plan_target.is_alias_of(target_name): + self._my_test_plan.append(plan_target) + + def cover_all(self) -> None: + """Cover all the targets available.""" + for cover_target in self.targets(): + self.add_target_to_plan(cover_target.name, is_direct=False) + + def cover_module_utils(self, pymodule: str, names: list[str]) -> None: + """Track the targets to run follow up to a module_utils changed. + + :param pymodule: collection module + :param names: collections names + """ + if self.modules_import is None or self.utils_import is None: + self.modules_import, self.utils_import = build_import_tree( + self.collection_path, self.collection_name, names + ) + + u_candidates = [pymodule] + # add as candidates all module_utils which include this module_utils + u_candidates += [ + import_lib for _, imports in self.utils_import.items() for import_lib in imports + ] + + for mod, mod_imports in self.modules_import.items(): + if any(util in mod_imports for util in u_candidates): + self.add_target_to_plan(mod, is_direct=False) + + def slow_targets_to_test(self) -> list[str]: + """List collection slow targets. + + :returns: list of slow targets + """ + return sorted(list({t.name for t in self.test_plan if t.is_slow()})) + + def regular_targets_to_test(self) -> list[str]: + """List regular targets to test. + + :returns: list of regular targets + """ + return sorted(list({t.name for t in self._my_test_plan if not t.is_slow()})) + + +class ElGrandeSeparator: + """A class to build output for the targets to test.""" + + def __init__(self, collections_items: list[Collection], number_jobs: int) -> None: + """Class constructor. + + :param collections_items: list of collections being tested + :param number_jobs: number of jobs to share targets on + """ + self.collections = collections_items + self.total_jobs = number_jobs + self.targets_per_slot = 10 + + def output(self) -> str: + """Produce output for the targets to test. + + :returns: a string describing the output + """ + batches = [] + for col in self.collections: + slots = [f"{col.collection_name}-{i+1}" for i in range(self.total_jobs)] + for batch in self.build_up_batches(slots, col): + batches.append(batch) + return ";".join([f"{x}:{','.join(y)}" for x, y in batches]) + + def build_up_batches( + self, slots: list[str], my_collection: Collection + ) -> Generator[tuple[str, list[str]], None, None]: + """Build up batches. + + :param slots: list of slots + :param my_collection: collection containing list of targets + :yields: batches + """ + if not my_collection.test_groups: + sorted_targets = sorted( + my_collection.test_plan, key=lambda x: x.execution_time(), reverse=True + ) + my_collection.test_groups = [{"total": 0, "targets": []} for _ in range(len(slots))] + my_collection.test_groups = equal_share(sorted_targets, len(slots)) + + for group in my_collection.test_groups: + if group["targets"] == []: + continue + my_slot = slots.pop(0) + yield (my_slot, group["targets"]) + + +def make_unique(data: list[str]) -> list[str]: + """Remove duplicated items of a list containing string. + + :param data: input list of string + :returns: A list containing unique items + """ + tmp = [] + for i in data: + if i not in tmp: + tmp.append(i) + return tmp + + +def equal_share(targets: list[Target], nbchunks: int) -> list[dict[str, Any]]: + """Split a list of targets into equal size chunks. + + :param targets: The list of target to share + :param nbchunks: The number of chunks to share targets into + :returns: A list of dictionary with a set of targets and the total size + """ + total_data = [0 for _ in range(nbchunks)] + targets_data = [[] for _ in range(nbchunks)] # type: List[List[str]] + + for my_target in targets: + index = total_data.index(min(total_data)) + total_data[index] += my_target.execution_time() + targets_data[index].append(my_target.name) + + return [{"total": total_data[i], "targets": targets_data[i]} for i in range(nbchunks)] + + +def read_test_all_the_targets() -> bool: + """Test if all targets should be executed. + + :returns: whether the full suite should be run or not + """ + test_all = os.environ.get("ANSIBLE_TEST_ALL_THE_TARGETS", "") + test_all_the_targets = False + if test_all and test_all.lower() == "true": + test_all_the_targets = True + return test_all_the_targets + + +def read_total_jobs() -> int: + """Read the number of job to divide targets into. + + :returns: total jobs as integer + """ + default_value = "3" + total_jobs = os.environ.get("TOTAL_JOBS", default_value) + try: + result = int(total_jobs) + except ValueError: + result = int(default_value) + return result + + +def read_targets_to_test() -> dict[str, list[str]]: + """Determine specific targets to test based on TargetsToTest flag into pull request body. + + :returns: list of targets to test per collection + """ + targets_to_test = {} + body = os.environ.get("PULL_REQUEST_BODY", "") + regex = re.compile(r"^TargetsToTest=([\w\.\:,;]+)", re.MULTILINE | re.IGNORECASE) + match = regex.search(body) + if match: + for item in match.group(1).split(";"): + if not item: + continue + elements = item.split(":") + targets_to_test[elements[0]] = elements[1].split(",") + return targets_to_test + + +def read_collections_to_test() -> list[PosixPath]: + """Read module parameters from environment variables. + + :returns: a list of parameters to execute the module + """ + return [ + PosixPath(path) + for path in os.environ.get("COLLECTIONS_TO_TEST", "").replace("\n", ",").split(",") + if path.strip() + ] diff --git a/.github/actions/ansible_test_splitter/list_changed_targets.py b/.github/actions/ansible_test_splitter/list_changed_targets.py new file mode 100644 index 00000000..19e7dced --- /dev/null +++ b/.github/actions/ansible_test_splitter/list_changed_targets.py @@ -0,0 +1,157 @@ +#!/usr/bin/env python3 +"""Script to list target to test for a pull request.""" + +import json +import os + +from pathlib import PosixPath +from typing import Dict +from typing import List +from typing import Union + +from list_changed_common import Collection +from list_changed_common import ElGrandeSeparator +from list_changed_common import WhatHaveChanged +from list_changed_common import make_unique +from list_changed_common import read_collections_to_test +from list_changed_common import read_targets_to_test +from list_changed_common import read_test_all_the_targets +from list_changed_common import read_total_jobs + + +class ListChangedTargets: + """A class used to list changed impacted for a pull request.""" + + def __init__(self) -> None: + """Class constructor.""" + self.collections_to_test = read_collections_to_test() + self.total_jobs = read_total_jobs() + + self.test_all_the_targets = read_test_all_the_targets() + self.targets_to_test = read_targets_to_test() + self.base_ref = os.environ.get("PULL_REQUEST_BASE_REF", "") + + def make_change_targets_to_test(self, collections: list[Collection]) -> dict[str, list[str]]: + """Create change for a specific target to test. + + :param collections: list of collections being tested + :returns: list of target per collection + """ + changes = {} + for collection in collections: + name = collection.collection_name + if name in self.targets_to_test: + for target in self.targets_to_test[name]: + collection.add_target_to_plan(target) + changes[name] = collection.test_plan_names + + return changes + + def make_change_for_all_targets(self, collections: list[Collection]) -> dict[str, list[str]]: + """Create change for full test suite. + + :param collections: list of collections being tested + :returns: list of all targets per collection + """ + changes = {} + for collection in collections: + collection.cover_all() + changes[collection.collection_name] = collection.test_plan_names + + return changes + + def make_changed_targets(self, collections: list[Collection]) -> dict[str, list[str]]: + """Create change for changed targets. + + :param collections: list of collections being tested + :returns: list of targets per collection + """ + listed_changes = {} # type: Dict[str, Dict[str, List[str]]] + collections_names = [collection.collection_name for collection in collections] + + def _add_changed_target( + name: str, ref_path: Union[PosixPath, str], plugin_type: str + ) -> None: + if plugin_type == "targets": + file_name, plugin_file_name = str(ref_path), str(ref_path) + elif plugin_type == "modules": + file_name = PosixPath(ref_path).stem + plugin_file_name = file_name + else: + file_name = PosixPath(ref_path).stem + plugin_file_name = f"{plugin_type}_{PosixPath(ref_path).stem}" + listed_changes[name][plugin_type].append(file_name) + for collection in collections: + collection.add_target_to_plan(plugin_file_name) + + for whc in [WhatHaveChanged(path, self.base_ref) for path in self.collections_to_test]: + listed_changes[whc.collection_name] = { + "modules": [], + "inventory": [], + "connection": [], + "module_utils": [], + "plugin_utils": [], + "lookup": [], + "targets": [], + } + for path in whc.modules(): + _add_changed_target(whc.collection_name, path, "modules") + for path in whc.inventory(): + _add_changed_target(whc.collection_name, path, "inventory") + for path in whc.connection(): + _add_changed_target(whc.collection_name, path, "connection") + for path, pymod in whc.module_utils(): + _add_changed_target(whc.collection_name, path, "module_utils") + for collection in collections: + collection.cover_module_utils(pymod, collections_names) + for path, pymod in whc.plugin_utils(): + _add_changed_target(whc.collection_name, path, "plugin_utils") + for collection in collections: + collection.cover_module_utils(pymod, collections_names) + for path in whc.lookup(): + _add_changed_target(whc.collection_name, path, "lookup") + for target in whc.targets(): + _add_changed_target(whc.collection_name, target, "targets") + + print("----------- Listed Changes -----------\n", json.dumps(listed_changes, indent=2)) + return {x: make_unique(y["targets"]) for x, y in listed_changes.items()} + + def run(self) -> str: + """List changes and divide targets into chunk. + + :returns: resulting string of targets divide into chunks + """ + collections = [Collection(p) for p in self.collections_to_test] + + if self.targets_to_test: + changes = self.make_change_targets_to_test(collections) + elif self.test_all_the_targets: + changes = self.make_change_for_all_targets(collections) + else: + changes = self.make_changed_targets(collections) + + print("----------- Changes -----------\n", json.dumps(changes, indent=2)) + egs = ElGrandeSeparator(collections, self.total_jobs) + return egs.output() + + +def write_variable_to_github_output(name: str, value: str) -> None: + """Write content variable to GITHUB_OUTPUT. + + :param name: variable name to write into GITHUB_OUTPUT + :param value: variable content + """ + github_output_file = os.environ.get("GITHUB_OUTPUT") or "" + if github_output_file: + with open(github_output_file, "a", encoding="utf-8") as file_write: + file_write.write(f"{name}={value}\n") + + +def main() -> None: + """Perform main process of the module.""" + output = ListChangedTargets().run() + write_variable_to_github_output("test_targets", output) + + +if __name__ == "__main__": + main() diff --git a/.github/actions/ansible_test_splitter/test_list_changed_targets.py b/.github/actions/ansible_test_splitter/test_list_changed_targets.py new file mode 100644 index 00000000..98a2e031 --- /dev/null +++ b/.github/actions/ansible_test_splitter/test_list_changed_targets.py @@ -0,0 +1,421 @@ +#!/usr/bin/env python3 +"""Contains tests cases for list_changed_common and list_changed_targets modules.""" + +import io + +from pathlib import PosixPath +from typing import Any +from unittest.mock import ANY +from unittest.mock import MagicMock +from unittest.mock import patch + +import pytest + +from list_changed_common import Collection +from list_changed_common import ElGrandeSeparator +from list_changed_common import WhatHaveChanged +from list_changed_common import list_pyimport +from list_changed_common import make_unique +from list_changed_common import read_collection_name +from list_changed_common import read_collections_to_test +from list_changed_common import read_targets_to_test +from list_changed_common import read_test_all_the_targets +from list_changed_common import read_total_jobs + + +MY_MODULE = """ +from ..module_utils.core import AnsibleAWSModule +from ipaddress import ipaddress +import time +import botocore.exceptions +""" + +MY_MODULE_2 = """ +import ansible_collections.kubernetes.core.plugins.module_utils.k8sdynamicclient + +def main(): + mutually_exclusive = [ + ("resource_definition", "src"), + ] + module = AnsibleModule( + argument_spec=argspec(), + ) + from ansible_collections.kubernetes.core.plugins.module_utils.common import ( + K8sAnsibleMixin, + get_api_client, + ) + + k8s_ansible_mixin = K8sAnsibleMixin(module) +""" + +MY_MODULE_3 = """ +from .modules import AnsibleAWSModule +from ipaddress import ipaddress +import time +import botocore.exceptions +""" + + +def test_read_collection_name() -> None: + """Test read_collection_name method.""" + m_galaxy_file = MagicMock() + m_galaxy_file.open = lambda: io.BytesIO(b"name: b\nnamespace: a\n") + m_path = MagicMock() + m_path.__truediv__.return_value = m_galaxy_file + assert read_collection_name(m_path) == "a.b" + + +def test_list_pyimport() -> None: + """Test list_pyimport.""" + assert list(list_pyimport("ansible_collections.amazon.aws.plugins.", "modules", MY_MODULE)) == [ + "ansible_collections.amazon.aws.plugins.module_utils.core", + "ipaddress", + "time", + "botocore.exceptions", + ] + + assert list( + list_pyimport("ansible_collections.kubernetes.core.plugins.", "modules", MY_MODULE_2) + ) == [ + "ansible_collections.kubernetes.core.plugins.module_utils.k8sdynamicclient", + "ansible_collections.kubernetes.core.plugins.module_utils.common", + ] + + assert list( + list_pyimport("ansible_collections.amazon.aws.plugins.", "module_utils", MY_MODULE_3) + ) == [ + "ansible_collections.amazon.aws.plugins.module_utils.modules", + "ipaddress", + "time", + "botocore.exceptions", + ] + + +@patch("list_changed_common.read_collection_name") +def test_what_changed_files(m_read_collection_name: MagicMock) -> None: + """Test changes from WhatHaveChanged class. + + :param m_read_collection_name: read_collection mock method + """ + m_read_collection_name.return_value = "a.b" + whc = WhatHaveChanged(PosixPath("a"), "b") + whc.files = [ + PosixPath("tests/something"), + PosixPath("plugins/module_utils/core.py"), + PosixPath("plugins/plugin_utils/base.py"), + PosixPath("plugins/connection/aws_ssm.py"), + PosixPath("plugins/modules/ec2.py"), + PosixPath("plugins/lookup/aws_test.py"), + PosixPath("tests/integration/targets/k8s_target_1/action.yaml"), + PosixPath("tests/integration/targets/k8s_target_2/file.txt"), + PosixPath("tests/integration/targets/k8s_target_3/tasks/main.yaml"), + ] + assert list(whc.modules()) == [PosixPath("plugins/modules/ec2.py")] + assert list(whc.plugin_utils()) == [ + ( + PosixPath("plugins/plugin_utils/base.py"), + "ansible_collections.a.b.plugins.plugin_utils.base", + ) + ] + assert list(whc.module_utils()) == [ + ( + PosixPath("plugins/module_utils/core.py"), + "ansible_collections.a.b.plugins.module_utils.core", + ) + ] + assert list(whc.lookup()) == [PosixPath("plugins/lookup/aws_test.py")] + assert list(whc.targets()) == [ + "k8s_target_1", + "k8s_target_2", + "k8s_target_3", + ] + assert list(whc.connection()) == [PosixPath("plugins/connection/aws_ssm.py")] + + +def build_collection(aliases: list[Any]) -> Collection: + """Build Collection. + + :param aliases: aliases + :returns: Mock collection + """ + with patch("list_changed_common.read_collection_name") as m_read_collection_name: + m_read_collection_name.return_value = "some.collection" + mycollection = Collection(PosixPath("nowhere")) + m_c_path = MagicMock() + mycollection.collection_path = m_c_path + m_c_path.glob.return_value = aliases + return mycollection + + +def build_alias(name: str, text: str) -> MagicMock: + """Build target alias. + + :param name: collection name + :param text: alias file content + :returns: Mock target + """ + m_alias_file = MagicMock() + m_alias_file.read_text.return_value = text + m_alias_file.parent.name = name + return m_alias_file + + +def test_c_targets() -> None: + """Test add targets method from Collection class.""" + mycollection = build_collection([]) + assert not list(mycollection.targets()) + + mycollection = build_collection([build_alias("a", "ec2\n")]) + assert len(list(mycollection.targets())) == 1 + assert list(mycollection.targets())[0].name == "a" + assert list(mycollection.targets())[0].is_alias_of("ec2") + + mycollection = build_collection([build_alias("a", "#ec2\n")]) + assert len(list(mycollection.targets())) == 1 + assert list(mycollection.targets())[0].name == "a" + assert list(mycollection.targets())[0].execution_time() == 180 + + mycollection = build_collection([build_alias("a", "time=30\n")]) + assert len(list(mycollection.targets())) == 1 + assert list(mycollection.targets())[0].name == "a" + assert list(mycollection.targets())[0].execution_time() == 30 + + +def test_2_targets_for_one_module() -> None: + """Test 2 targets.""" + collection = build_collection( + [build_alias("a", "ec2_instance\n"), build_alias("b", "ec2_instance\n")] + ) + assert collection.regular_targets_to_test() == [] + collection.add_target_to_plan("ec2_instance") + assert collection.regular_targets_to_test() == ["a", "b"] + + +@patch("list_changed_common.read_collection_name") +def test_c_disabled_unstable(m_read_collection_name: MagicMock) -> None: + """Test disable/unstable targets. + + :param m_read_collection_name: read_collection_name patched method + """ + m_read_collection_name.return_value = "some.collection" + collection = Collection(PosixPath("nowhere")) + m_c_path = MagicMock() + collection.collection_path = m_c_path + m_c_path.glob.return_value = [ + build_alias("a", "disabled\n"), + build_alias("b", "unstable\n"), + ] + + # all, we should ignore the disabled,unstable jobs + collection.cover_all() + assert len(collection.regular_targets_to_test()) == 0 + # if the module is targets, we continue to ignore the disabled + collection.add_target_to_plan("a") + assert len(collection.regular_targets_to_test()) == 0 + # unstable targets should not be triggered if they were pulled in as a dependency + collection.add_target_to_plan("b", is_direct=False) + assert len(collection.regular_targets_to_test()) == 0 + # but the unstable is ok when directly triggered + collection.add_target_to_plan("b") + assert len(collection.regular_targets_to_test()) == 1 + + +@patch("list_changed_common.read_collection_name") +def test_c_slow_regular_targets(m_read_collection_name: MagicMock) -> None: + """Test targets* methods from Collection class. + + :param m_read_collection_name: read_collection_name patched method + """ + m_read_collection_name.return_value = "some.collection" + collection = build_collection( + [ + build_alias("tortue", "slow\nec2\n#s3\n"), + build_alias("lapin", "notslow\ncarrot\n\n"), + ] + ) + + collection.cover_all() + assert len(list(collection.targets())) == 2 + assert list(collection.targets())[0].is_slow() + assert not list(collection.targets())[1].is_slow() + assert len(collection.slow_targets_to_test()) == 1 + + +def test_c_inventory_targets() -> None: + """Test targets methods from Collection class.""" + col = build_collection( + [ + build_alias("inventory_tortue", "slow\nec2\n#s3\n"), + build_alias("lapin", "notslow\ninventory_carrot\n\n"), + ] + ) + col.cover_all() + assert len(list(col.targets())) == 2 + assert list(col.targets())[0].is_slow() + assert not list(col.targets())[1].is_slow() + assert len(col.slow_targets_to_test()) == 1 + + +@patch("list_changed_common.read_collection_name") +def test_c_with_cover(m_read_collection_name: MagicMock) -> None: + """Test add_target_to_plan method from Collection class. + + :param m_read_collection_name: read_collection_name patched method + """ + m_read_collection_name.return_value = "some.collection" + collection = Collection(PosixPath("nowhere")) + m_c_path = MagicMock() + collection.collection_path = m_c_path + + m_c_path.glob.return_value = [ + build_alias("tortue", "slow\nec2\n#s3\n"), + build_alias("lapin", "carrot\n\n"), + ] + collection.add_target_to_plan("ec2") + assert len(collection.slow_targets_to_test()) == 1 + assert collection.regular_targets_to_test() == [] + + +def test_splitter_with_time() -> None: + """Test splitter method from class ElGrandeSeparator.""" + collection_1 = build_collection( + [ + build_alias("a", "time=50m\n"), + build_alias("b", "time=10m\n"), + build_alias("c", "time=180\n"), + build_alias("d", "time=140s \n"), + build_alias("e", "time=70\n"), + ] + ) + collection_1.cover_all() + egs = ElGrandeSeparator([collection_1], ANY) + result = list(egs.build_up_batches([f"slot{i}" for i in range(2)], collection_1)) + assert result == [ + ("slot0", ["a"]), + ("slot1", ["b", "c", "d", "e"]), + ] + + collection_2 = build_collection( + [ + build_alias("a", "time=50m\n"), + build_alias("b", "time=50m\n"), + build_alias("c", "time=18\n"), + build_alias("d", "time=5m\n"), + ] + ) + collection_2.cover_all() + egs = ElGrandeSeparator([collection_2], ANY) + result = list(egs.build_up_batches([f"slot{i}" for i in range(3)], collection_2)) + assert result == [("slot0", ["a"]), ("slot1", ["b"]), ("slot2", ["d", "c"])] + + +@patch("list_changed_common.read_collection_name") +@patch("list_changed_common.run_command") +def test_what_changed_git_call(m_run_command: MagicMock, m_read_collection_name: MagicMock) -> None: + """Test changed_files method from WhatHaveChanged class. + + :param m_run_command: run_command patched method + :param m_read_collection_name: read_collection_name patched method + """ + m_run_command.return_value = "plugins/modules/foo.py\n" + m_read_collection_name.return_value = "a.b" + + whc = WhatHaveChanged(PosixPath("a"), "stable-2.1") + assert whc.changed_files() == [PosixPath("plugins/modules/foo.py")] + + m_run_command.assert_called_with( + command="git diff origin/stable-2.1 --name-only", + chdir=PosixPath("a"), + ) + + +def test_make_unique() -> None: + """Test test_make_unique function.""" + assert make_unique(["a", "b", "a"]) == ["a", "b"] + assert make_unique(["a", "b"]) == ["a", "b"] + + +def test_read_test_all_the_targets(monkeypatch: pytest.MonkeyPatch) -> None: + """Test read_test_all_the_targets function. + + :param monkeypatch: monkey patch + """ + # default value when environment variable is not defined + assert read_test_all_the_targets() is False + + # ANSIBLE_TEST_ALL_THE_TARGETS -> 'any' + monkeypatch.setenv("ANSIBLE_TEST_ALL_THE_TARGETS", "any") + assert read_test_all_the_targets() is False + + # ANSIBLE_TEST_ALL_THE_TARGETS -> 'TRUE' + monkeypatch.setenv("ANSIBLE_TEST_ALL_THE_TARGETS", "TRUE") + assert read_test_all_the_targets() is True + + # ANSIBLE_TEST_ALL_THE_TARGETS -> 'True' + monkeypatch.setenv("ANSIBLE_TEST_ALL_THE_TARGETS", "True") + assert read_test_all_the_targets() is True + + +def test_read_total_jobs(monkeypatch: pytest.MonkeyPatch) -> None: + """Test read_total_jobs function. + + :param monkeypatch: monkey patch + """ + # default value when environment variable is not defined + assert read_total_jobs() == 3 + + # TOTAL_JOBS -> 'any' + monkeypatch.setenv("TOTAL_JOBS", "any") + assert read_total_jobs() == 3 + + # TOTAL_JOBS -> '07' + monkeypatch.setenv("TOTAL_JOBS", "07") + assert read_total_jobs() == 7 + + # TOTAL_JOBS -> '5' + monkeypatch.setenv("TOTAL_JOBS", "5") + assert read_total_jobs() == 5 + + +def test_read_targets_to_test(monkeypatch: pytest.MonkeyPatch) -> None: + """Test read_targets_to_test function. + + :param monkeypatch: monkey patch + """ + # default value when environment variable is not defined + assert not read_targets_to_test() + + body = "No target to test set here" + monkeypatch.setenv("PULL_REQUEST_BODY", body) + assert not read_targets_to_test() + + body = ( + "This is the first line of my pull request description\n" + "TargetsToTest=collection1:target_01,target_02;collection2:target_2" + ) + monkeypatch.setenv("PULL_REQUEST_BODY", body) + print(body) + assert read_targets_to_test() == { + "collection1": ["target_01", "target_02"], + "collection2": ["target_2"], + } + + body = ( + "This is the first line of my pull request description\n" + "TARGETSTOTEST=collection1:target_01,target_02;collection2:target_2;" + ) + monkeypatch.setenv("PULL_REQUEST_BODY", body) + assert read_targets_to_test() == { + "collection1": ["target_01", "target_02"], + "collection2": ["target_2"], + } + + +def test_read_collections_to_test(monkeypatch: pytest.MonkeyPatch) -> None: + """Test read_collections_to_test function. + + :param monkeypatch: monkey patch + """ + collection_to_test = "col1,col2\n ,col3" + monkeypatch.setenv("COLLECTIONS_TO_TEST", collection_to_test) + assert read_collections_to_test() == [PosixPath("col1"), PosixPath("col2"), PosixPath("col3")] diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8b5bb1f0..bb65854a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -86,3 +86,4 @@ repos: additional_dependencies: - pytest - pygithub + - pyyaml