diff --git a/docs/ansible_runner.config.rst b/docs/ansible_runner.config.rst index 9f145e4e7..56bfa83bb 100644 --- a/docs/ansible_runner.config.rst +++ b/docs/ansible_runner.config.rst @@ -7,4 +7,5 @@ Submodules ansible_runner.config.runner module ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +.. autoclass:: ansible_runner.config.runner.BaseConfig .. autoclass:: ansible_runner.config.runner.RunnerConfig diff --git a/docs/ansible_runner.rst b/docs/ansible_runner.rst index cec321ae1..f1fe6e65a 100644 --- a/docs/ansible_runner.rst +++ b/docs/ansible_runner.rst @@ -25,6 +25,7 @@ ansible_runner.interface module .. automodule:: ansible_runner.interface :members: + :exclude-members: init_runner :undoc-members: :show-inheritance: @@ -44,14 +45,6 @@ ansible_runner.runner module :undoc-members: :show-inheritance: -ansible_runner.runner\_config module -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. automodule:: ansible_runner.runner_config - :members: - :undoc-members: - :show-inheritance: - ansible_runner.utils module ^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/docs/conf.py b/docs/conf.py index bfcc5cbe4..0fdbbc7bf 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -26,6 +26,10 @@ def _get_version(): nitpicky = True +nitpick_ignore = { + ('py:class', '_io.FileIO') +} + default_role = 'any' # This catches single backticks (incorrectly) used for inline code formatting project = 'ansible-runner' copyright = f'2018-{datetime.datetime.today().year}, Red Hat, Inc' diff --git a/docs/intro.rst b/docs/configuration.rst similarity index 99% rename from docs/intro.rst rename to docs/configuration.rst index 9c8a543e4..899a2ab1a 100644 --- a/docs/intro.rst +++ b/docs/configuration.rst @@ -1,7 +1,7 @@ -.. _intro: +.. _config: -Introduction to Ansible Runner -============================== +Configuring Ansible Runner +========================== **Runner** is intended to be most useful as part of automation and tooling that needs to invoke Ansible and consume its results. Most of the parameterization of the **Ansible** command line is also available on the **Runner** command line but **Runner** also diff --git a/docs/index.rst b/docs/index.rst index 328402f9d..f679e336a 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -15,7 +15,7 @@ want to manage the complexities of the interface on their own (such as CI/CD pla for running ``ansible`` and ``ansible-playbook`` tasks and gathers the output from it. It does this by presenting a common interface that doesn't change, even as **Ansible** itself grows and evolves. -Part of what makes this tooling useful is that it can gather its inputs in a flexible way (See :ref:`intro`:). It also has a system for storing the +Part of what makes this tooling useful is that it can gather its inputs in a flexible way (See :ref:`config`). It also has a system for storing the output (stdout) and artifacts (host-level event data, fact data, etc) of the playbook run. There are 3 primary ways of interacting with **Runner** @@ -30,14 +30,18 @@ Examples of this could include: * Sending status to Ansible AWX * Sending events to an external logging service - .. toctree:: :maxdepth: 1 - :caption: Contents: + :caption: Installation, Upgrade & Configuration - intro install - community + configuration + porting_guides/porting_guide + +.. toctree:: + :maxdepth: 1 + :caption: Using Ansible Runner + external_interface standalone python_interface @@ -45,6 +49,12 @@ Examples of this could include: remote_jobs modules +.. toctree:: + :maxdepth: 1 + :caption: Getting Help + + community + Indices and tables ================== diff --git a/docs/porting_guides/porting_guide.rst b/docs/porting_guides/porting_guide.rst new file mode 100644 index 000000000..2cda565c4 --- /dev/null +++ b/docs/porting_guides/porting_guide.rst @@ -0,0 +1,12 @@ +***************************** +Ansible Runner Porting Guides +***************************** + +This section lists porting guides that can help you when upgrading to newer +versions of ``ansible-runner``. + + +.. toctree:: + :maxdepth: 1 + + porting_guide_v2.5 diff --git a/docs/porting_guides/porting_guide_v2.5.rst b/docs/porting_guides/porting_guide_v2.5.rst new file mode 100644 index 000000000..c0bf47aaf --- /dev/null +++ b/docs/porting_guides/porting_guide_v2.5.rst @@ -0,0 +1,46 @@ +******************************** +Ansible Runner 2.5 Porting Guide +******************************** + +This section discusses the behavioral changes between ``ansible-runner`` version 2.4 and version 2.5. + +.. contents:: Topics + +Changes to the Python Interface +=============================== + +:ref:`Remote job execution ` could experience problems when transmitting job arguments +to a worker node with an older version of ``ansible-runner``. If the older worker received a job +argument that it did not understand, like a new API value, that worker would terminate abnormally. +To address this, a two-stage plan was devised: + +#. Modify the streaming job arguments so that only job arguments that have non-default values are + streamed to the worker node. A new API job argument will not be problematic for older workers + unless that argument is actually used. +#. Have the worker node fail gracefully on unrecognized job arguments. + +The first step of this process is implemented in this version of ``ansible-runner``. The second +step will be completed in a future version. + +For this change, it was necessary to modify how the ``run()`` and ``run_async()`` functions +of the :ref:`Python API ` are implemented. The API function arguments are now completely +defined in the ``RunnerConfig`` object where we can have better control of the job arguments, and both +functions now take an optional ``config`` parameter. + +For backward compatibility, keyword arguments to the ``run()/run_async()`` API functions are passed +along to the ``RunnerConfig`` object initialization for you. Alternatively, you may choose to use +the more up-to-date signature of those API functions where you pass in a manually created ``RunnerConfig`` +object. For example: + +.. code-block:: python + + import ansible_runner + config = ansible_runner.RunnerConfig(private_data_dir='/tmp/demo', playbook='test.yml') + r = ansible_runner.interface.run(config=config) + +The above is identical to the more familiar usage of the API: + +.. code-block:: python + + import ansible_runner + r = ansible_runner.interface.run(private_data_dir='/tmp/demo', playbook='test.yml') diff --git a/pyproject.toml b/pyproject.toml index 74840194f..ace258c41 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,6 +21,7 @@ disable = [ "C0115", # missing-class-docstring "C0116", # missing-function-docstring "C0301", # line-too-long + "C0302", # too-many-lines "R0401", # cyclic-import "R0801", # duplicate-code "R0902", # too-many-instance-attributes diff --git a/src/ansible_runner/__main__.py b/src/ansible_runner/__main__.py index 7f4e8e386..8b04c324b 100644 --- a/src/ansible_runner/__main__.py +++ b/src/ansible_runner/__main__.py @@ -44,7 +44,9 @@ from ansible_runner import run from ansible_runner import output from ansible_runner import cleanup -from ansible_runner.utils import dump_artifact, Bunch, register_for_cleanup +from ansible_runner._internal._dump_artifacts import dump_artifact +from ansible_runner.defaults import default_process_isolation_executable +from ansible_runner.utils import Bunch, register_for_cleanup from ansible_runner.utils.capacity import get_cpu_count, get_mem_in_bytes, ensure_uuid from ansible_runner.utils.importlib_compat import importlib_metadata from ansible_runner.runner import Runner @@ -822,14 +824,11 @@ def main(sys_args=None): else: vargs['inventory'] = abs_inv - output.configure() - - # enable or disable debug mode - output.set_debug('enable' if vargs.get('debug') else 'disable') - - # set the output logfile + debug = bool(vargs.get('debug')) + logfile = '' if ('logfile' in args) and vargs.get('logfile'): - output.set_logfile(vargs.get('logfile')) + logfile = vargs.get('logfile') + output.configure(debug, logfile) output.debug('starting debug logging') @@ -886,8 +885,8 @@ def main(sys_args=None): "project_dir": vargs.get('project_dir'), "artifact_dir": vargs.get('artifact_dir'), "roles_path": [vargs.get('roles_path')] if vargs.get('roles_path') else None, - "process_isolation": vargs.get('process_isolation'), - "process_isolation_executable": vargs.get('process_isolation_executable'), + "process_isolation": bool(vargs.get('process_isolation')), + "process_isolation_executable": vargs.get('process_isolation_executable') or default_process_isolation_executable, "process_isolation_path": vargs.get('process_isolation_path'), "process_isolation_hide_paths": vargs.get('process_isolation_hide_paths'), "process_isolation_show_paths": vargs.get('process_isolation_show_paths'), diff --git a/src/ansible_runner/_internal/__init__.py b/src/ansible_runner/_internal/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/ansible_runner/_internal/_dump_artifacts.py b/src/ansible_runner/_internal/_dump_artifacts.py new file mode 100644 index 000000000..48a1e6ade --- /dev/null +++ b/src/ansible_runner/_internal/_dump_artifacts.py @@ -0,0 +1,127 @@ +from __future__ import annotations + +import fcntl +import hashlib +import json +import os +import stat +import tempfile + +from collections.abc import MutableMapping +from typing import Any + +from ansible_runner.config.runner import RunnerConfig +from ansible_runner.utils import isinventory, isplaybook + + +def dump_artifacts(config: RunnerConfig) -> None: + """Introspect the arguments and dump objects to disk""" + private_data_dir = config.private_data_dir or "" + + if config.role: + role: dict[str, Any] = {'name': config.role} + if config.role_vars: + role['vars'] = config.role_vars + + hosts = config.host_pattern or 'all' + play: list[dict[str, Any]] = [{'hosts': hosts, 'roles': [role]}] + + if config.role_skip_facts: + play[0]['gather_facts'] = False + + config.playbook = play + + if config.envvars is None: + config.envvars = {} + + roles_path = config.roles_path + if not roles_path: + roles_path = os.path.join(private_data_dir, 'roles') + else: + roles_path += f":{os.path.join(private_data_dir, 'roles')}" + + config.envvars['ANSIBLE_ROLES_PATH'] = roles_path + + playbook = config.playbook + if playbook: + # Ensure the play is a list of dictionaries + if isinstance(playbook, MutableMapping): + playbook = [playbook] + + if isplaybook(playbook): + path = os.path.join(private_data_dir, 'project') + config.playbook = dump_artifact(json.dumps(playbook), path, 'main.json') + + obj = config.inventory + if obj and isinventory(obj): + path = os.path.join(private_data_dir, 'inventory') + if isinstance(obj, MutableMapping): + config.inventory = dump_artifact(json.dumps(obj), path, 'hosts.json') + elif isinstance(obj, str): + if not os.path.exists(os.path.join(path, obj)): + config.inventory = dump_artifact(obj, path, 'hosts') + elif os.path.isabs(obj): + config.inventory = obj + else: + config.inventory = os.path.join(path, obj) + + if not config.suppress_env_files: + for key in ('envvars', 'extravars', 'passwords', 'settings'): + obj = getattr(config, key, None) + if obj and not os.path.exists(os.path.join(private_data_dir, 'env', key)): + path = os.path.join(private_data_dir, 'env') + dump_artifact(json.dumps(obj), path, key) + + for key in ('ssh_key', 'cmdline'): + obj = getattr(config, key, None) + if obj and not os.path.exists(os.path.join(private_data_dir, 'env', key)): + path = os.path.join(private_data_dir, 'env') + dump_artifact(obj, path, key) + + +def dump_artifact(obj: str, + path: str, + filename: str | None = None + ) -> str: + """Write the artifact to disk at the specified path + + :param str obj: The string object to be dumped to disk in the specified + path. The artifact filename will be automatically created. + :param str path: The full path to the artifacts data directory. + :param str filename: The name of file to write the artifact to. + If the filename is not provided, then one will be generated. + + :return: The full path filename for the artifact that was generated. + """ + if not os.path.exists(path): + os.makedirs(path, mode=0o700) + + p_sha1 = hashlib.sha1() + p_sha1.update(obj.encode(encoding='UTF-8')) + + if filename is None: + _, fn = tempfile.mkstemp(dir=path) + else: + fn = os.path.join(path, filename) + + if os.path.exists(fn): + c_sha1 = hashlib.sha1() + with open(fn) as f: + contents = f.read() + c_sha1.update(contents.encode(encoding='UTF-8')) + + if not os.path.exists(fn) or p_sha1.hexdigest() != c_sha1.hexdigest(): + lock_fp = os.path.join(path, '.artifact_write_lock') + lock_fd = os.open(lock_fp, os.O_RDWR | os.O_CREAT, stat.S_IRUSR | stat.S_IWUSR) + fcntl.lockf(lock_fd, fcntl.LOCK_EX) + + try: + with open(fn, 'w') as f: + os.chmod(fn, stat.S_IRUSR | stat.S_IWUSR) + f.write(str(obj)) + finally: + fcntl.lockf(lock_fd, fcntl.LOCK_UN) + os.close(lock_fd) + os.remove(lock_fp) + + return fn diff --git a/src/ansible_runner/config/_base.py b/src/ansible_runner/config/_base.py index 819640df3..7ce75b759 100644 --- a/src/ansible_runner/config/_base.py +++ b/src/ansible_runner/config/_base.py @@ -29,9 +29,10 @@ import tempfile import shutil from base64 import b64encode +from dataclasses import dataclass, field from enum import Enum from uuid import uuid4 -from collections.abc import Mapping +from collections.abc import Callable, Mapping from typing import Any import pexpect @@ -61,96 +62,154 @@ class BaseExecutionMode(Enum): GENERIC_COMMANDS = 2 +# Metadata string values +class MetaValues(Enum): + # When True, value is emitted from Transmitter to send to Worker. + TRANSMIT = 'transmit' + + +@dataclass class BaseConfig: + """The base configuration object. + + This object has multiple initialization responsibilities, including: + - guaranteeing the ``private_data_dir`` directory exists + - guaranteeing that ``ident`` value is set + - setting the various work directory attributes based on ``private_data_dir`` + + :param str private_data_dir: The directory containing all runner metadata needed to invoke the runner + module. Output artifacts will also be stored here for later consumption. + :param str artifact_dir: The path to the directory where artifacts should live. This defaults to + ``artifacts`` under ``private_data_dir``. + :param bool check_job_event_data: Check if job events data is completely generated. If event data is + not completely generated and if value is set to ``True`` it will raise an `AnsibleRunnerException` exception. + If set to ``False``, it log a debug message and continue execution. Default value is ``False``. + :param dict container_auth_data: Container registry authentication data containing ``host``, ``username``, and + ``password`` entries. + :param str container_image: Container image to use when running an ansible task. + :param list container_options: List of container options to pass to execution engine. + :param list container_volume_mounts: List of bind mounts in the form 'host_dir:/container_dir`. Default to ``None``. + :param str container_workdir: The working directory within the container. + :param dict envvars: Environment variables to be used when running Ansible. Environment variables will also be + read from ``env/envvars`` in ``private_data_dir``. + :param str fact_cache: A string that will be used as the name for the subdirectory of the fact cache in + artifacts directory. This is only used for ``jsonfile`` type fact caches. + :param str fact_cache_type: A string of the type of fact cache to use. Defaults to ``jsonfile``. + :param str host_cwd: The host current working directory to be mounted within the container (if enabled) and will be + the work directory within container. + :param str ident: The run identifier for this invocation of Runner. Will be used to create and name + the artifact directory holding the results of the invocation. + :param bool json_mode: Store event data in place of stdout on the console and in the stdout file. + :param int keepalive_seconds: Use within the streaming Worker object to inject a keepalive event. + :param dict passwords: A dictionary containing password prompt patterns and response values used when processing + output from Ansible. Passwords will also be read from ``env/passwords`` in ``private_data_dir``. + :param bool process_isolation: Enable process isolation, using either a container engine (e.g. podman) or a + sandbox (e.g. bwrap). + :param str process_isolation_executable: Process isolation executable or container engine used to isolate + execution. (default: podman) + :param str project_dir: The path to the playbook content. Defaults to ``project`` within ``private_data_dir``. + :param bool quiet: Disable all output. + :param int rotate_artifacts: Keep at most n artifact directories. Disable with a value of ``0`` (the default). + :param dict settings: A dictionary containing settings values for the ``ansible-runner`` runtime environment. + These will also be read from ``env/settings`` in ``private_data_dir``. + :param str ssh_key: The ssh private key passed to ``ssh-agent`` as part of the ansible-playbook run. + :param bool suppress_env_files: Disable the writing of files into the ``env`` which may store sensitive information. + :param int timeout: The timeout value, in seconds, that will be passed to either ``pexpect`` of ``subprocess`` + invocation (based on ``runner_mode`` selected) while executing command. It the timeout is triggered it will + force cancel the execution. + :param Callable event_handler: An optional callback that will be invoked any time an event is received by Runner itself. + Return ``True`` to keep the event + :param Callable cancel_callback: An optional callback that can inform runner to cancel (returning ``True``) or not + (returning ``False``). + :param Callable finished_callback: An optional callback that will be invoked at shutdown after process cleanup. + :param Callable status_handler: An optional callback that will be invoked any time the status changes (e.g...started, running, failed, successful, timeout) + :param Callable artifacts_handler: An optional callback that will be invoked at the end of the run to deal with the artifacts from the run. + """ + + # This MUST be the first field we define to handle the use case where a RunnerConfig object + # is instantiated with private_data_dir as the first positional argument (non-keyword). + # No other config objects make use of positional parameters, so this should be fine. + # + # Example use case: RunnerConfig("/tmp/demo", playbook="main.yml", ...) + private_data_dir: str | None = field(metadata={MetaValues.TRANSMIT: False}, default=None) + + artifact_dir: str | None = field(metadata={MetaValues.TRANSMIT: False}, default=None) + check_job_event_data: bool = False + container_auth_data: dict[str, str] | None = None + container_image: str | None = None + container_options: list[str] | None = None + container_volume_mounts: list[str] | None = None + container_workdir: str | None = None + envvars: dict[str, Any] | None = None + fact_cache: str | None = field(metadata={MetaValues.TRANSMIT: False}, default=None) + fact_cache_type: str = 'jsonfile' + host_cwd: str | None = None + ident: str | None = field(metadata={MetaValues.TRANSMIT: False}, default=None) + json_mode: bool = False + keepalive_seconds: int | None = field(metadata={MetaValues.TRANSMIT: False}, default=None) + passwords: dict[str, str] | None = None + process_isolation: bool = False + process_isolation_executable: str = defaults.default_process_isolation_executable + project_dir: str | None = field(metadata={MetaValues.TRANSMIT: False}, default=None) + quiet: bool = False + rotate_artifacts: int = 0 + settings: dict | None = None + ssh_key: str | None = None + suppress_env_files: bool = False + timeout: int | None = None + + event_handler: Callable[[dict], None] | None = field(metadata={MetaValues.TRANSMIT: False}, default=None) + status_handler: Callable[[dict, BaseConfig], bool] | None = field(metadata={MetaValues.TRANSMIT: False}, default=None) + artifacts_handler: Callable[[str], None] | None = field(metadata={MetaValues.TRANSMIT: False}, default=None) + cancel_callback: Callable[[], bool] | None = field(metadata={MetaValues.TRANSMIT: False}, default=None) + finished_callback: Callable[[BaseConfig], None] | None = field(metadata={MetaValues.TRANSMIT: False}, default=None) - def __init__(self, - private_data_dir: str | None = None, - host_cwd: str | None = None, - envvars: dict[str, Any] | None = None, - passwords=None, - settings=None, - project_dir: str | None = None, - artifact_dir: str | None = None, - fact_cache_type: str = 'jsonfile', - fact_cache=None, - process_isolation: bool = False, - process_isolation_executable: str | None = None, - container_image: str = "", - container_volume_mounts=None, - container_options=None, - container_workdir: str | None = None, - container_auth_data=None, - ident: str | None = None, - rotate_artifacts: int = 0, - timeout: int | None = None, - ssh_key: str | None = None, - quiet: bool = False, - json_mode: bool = False, - check_job_event_data: bool = False, - suppress_env_files: bool = False, - keepalive_seconds: int | None = None - ): + _CONTAINER_ENGINES = ('docker', 'podman') + + def __post_init__(self) -> None: # pylint: disable=W0613 - # common params - self.host_cwd = host_cwd - self.envvars = envvars - self.ssh_key_data = ssh_key self.command: list[str] = [] - - # container params - self.process_isolation = process_isolation - self.process_isolation_executable = process_isolation_executable or defaults.default_process_isolation_executable - self.container_image = container_image - self.container_volume_mounts = container_volume_mounts - self.container_workdir = container_workdir - self.container_auth_data = container_auth_data self.registry_auth_path: str self.container_name: str = "" # like other properties, not accurate until prepare is called - self.container_options = container_options - - # runner params - self.rotate_artifacts = rotate_artifacts - self.quiet = quiet - self.json_mode = json_mode - self.passwords = passwords - self.settings = settings - self.timeout = timeout - self.check_job_event_data = check_job_event_data - self.suppress_env_files = suppress_env_files + if self.container_image is None: + self.container_image = '' + # ignore this for now since it's worker-specific and would just trip up old runners # self.keepalive_seconds = keepalive_seconds # setup initial environment - if private_data_dir: - self.private_data_dir = os.path.abspath(private_data_dir) + if self.private_data_dir: + self.private_data_dir = os.path.abspath(self.private_data_dir) # Note that os.makedirs, exist_ok=True is dangerous. If there's a directory writable # by someone other than the user anywhere in the path to be created, an attacker can # attempt to compromise the directories via a race. - os.makedirs(self.private_data_dir, exist_ok=True, mode=0o700) + try: + os.makedirs(self.private_data_dir, exist_ok=True, mode=0o700) + except Exception as error: + raise ConfigurationError(f"Unable to create private_data_dir {self.private_data_dir}") from error else: self.private_data_dir = tempfile.mkdtemp(prefix=defaults.AUTO_CREATE_NAMING, dir=defaults.AUTO_CREATE_DIR) + register_for_cleanup(self.private_data_dir) - if artifact_dir is None: - artifact_dir = os.path.join(self.private_data_dir, 'artifacts') + if self.artifact_dir is None: + self.artifact_dir = os.path.join(self.private_data_dir, 'artifacts') else: - artifact_dir = os.path.abspath(artifact_dir) + self.artifact_dir = os.path.abspath(self.artifact_dir) - if ident is None: + if self.ident is None: self.ident = str(uuid4()) + self.ident_set_by_user = False else: - self.ident = str(ident) + self.ident_set_by_user = True + self.ident = str(self.ident) - self.artifact_dir = os.path.join(artifact_dir, self.ident) + self.artifact_dir = os.path.join(self.artifact_dir, self.ident) - if not project_dir: + if not self.project_dir: self.project_dir = os.path.join(self.private_data_dir, 'project') - else: - self.project_dir = project_dir - self.rotate_artifacts = rotate_artifacts - self.fact_cache_type = fact_cache_type - self.fact_cache = os.path.join(self.artifact_dir, fact_cache or 'fact_cache') if self.fact_cache_type == 'jsonfile' else None + self.fact_cache = os.path.join(self.artifact_dir, self.fact_cache or 'fact_cache') if self.fact_cache_type == 'jsonfile' else None self.loader = ArtifactLoader(self.private_data_dir) @@ -162,37 +221,53 @@ def __init__(self, os.makedirs(self.artifact_dir, exist_ok=True, mode=0o700) - _CONTAINER_ENGINES = ('docker', 'podman') - @property def containerized(self): return self.process_isolation and self.process_isolation_executable in self._CONTAINER_ENGINES + @property + def ssh_key_data(self): + """ Alias for backward compatibility. """ + return self.ssh_key + + @ssh_key_data.setter + def ssh_key_data(self, value): + self.ssh_key = value + def prepare_env(self, runner_mode: str = 'pexpect') -> None: """ Manages reading environment metadata files under ``private_data_dir`` and merging/updating with existing values so the :py:class:`ansible_runner.runner.Runner` object can read and use them easily """ + if self.ident is None: + raise ConfigurationError("ident value cannot be None") + if self.artifact_dir is None: + raise ConfigurationError("artifact_dir value cannot be None") + self.runner_mode = runner_mode try: if self.settings and isinstance(self.settings, dict): self.settings.update(self.loader.load_file('env/settings', Mapping)) # type: ignore else: - self.settings = self.loader.load_file('env/settings', Mapping) + self.settings = self.loader.load_file('env/settings', Mapping) # type: ignore except ConfigurationError: debug("Not loading settings") self.settings = {} + # Safety net + if self.settings is None: + self.settings = {} + if self.runner_mode == 'pexpect': try: if self.passwords and isinstance(self.passwords, dict): self.passwords.update(self.loader.load_file('env/passwords', Mapping)) # type: ignore else: - self.passwords = self.passwords or self.loader.load_file('env/passwords', Mapping) + self.passwords = self.passwords or self.loader.load_file('env/passwords', Mapping) # type: ignore except ConfigurationError: debug('Not loading passwords') - self.expect_passwords = {} + self.expect_passwords: dict[Any, Any] = {} try: if self.passwords: self.expect_passwords = { @@ -268,16 +343,16 @@ def prepare_env(self, runner_mode: str = 'pexpect') -> None: # Still need to pass default environment to pexpect try: - if self.ssh_key_data is None: - self.ssh_key_data = self.loader.load_file('env/ssh_key', str) # type: ignore + if self.ssh_key is None: + self.ssh_key = self.loader.load_file('env/ssh_key', str) # type: ignore except ConfigurationError: debug("Not loading ssh key") - self.ssh_key_data = None + self.ssh_key = None # write the SSH key data into a fifo read by ssh-agent - if self.ssh_key_data: + if self.ssh_key: self.ssh_key_path = os.path.join(self.artifact_dir, 'ssh_key_data') - open_fifo_write(self.ssh_key_path, self.ssh_key_data) + open_fifo_write(self.ssh_key_path, self.ssh_key) self.suppress_output_file = self.settings.get('suppress_output_file', False) self.suppress_ansible_output = self.settings.get('suppress_ansible_output', self.quiet) @@ -340,7 +415,7 @@ def prepare_env(self, runner_mode: str = 'pexpect') -> None: debug(f' {k}: {v}') def handle_command_wrap(self, execution_mode: BaseExecutionMode, cmdline_args: list[str]) -> None: - if self.ssh_key_data: + if self.ssh_key: logger.debug('ssh key data added') self.command = self.wrap_args_with_ssh_agent(self.command, self.ssh_key_path) @@ -492,6 +567,15 @@ def wrap_args_for_containerization(self, execution_mode: BaseExecutionMode, cmdline_args: list[str] ) -> list[str]: + if self.artifact_dir is None: + raise ConfigurationError("artifact_dir value cannot be None") + + if self.private_data_dir is None: + raise ConfigurationError("private_data_dir value cannot be None") + + if self.container_image is None: + raise ConfigurationError("container_image value cannot be None") + new_args = [self.process_isolation_executable] new_args.extend(['run', '--rm']) @@ -642,6 +726,9 @@ def wrap_args_with_ssh_agent(self, Given an existing command line and parameterization this will return the same command line wrapped with the necessary calls to ``ssh-agent`` """ + if self.ident is None: + raise ConfigurationError("ident value cannot be None") + if self.containerized: artifact_dir = os.path.join("/runner/artifacts", self.ident) ssh_key_path = os.path.join(artifact_dir, "ssh_key_data") diff --git a/src/ansible_runner/config/runner.py b/src/ansible_runner/config/runner.py index 0633a23fa..e6c7c3af1 100644 --- a/src/ansible_runner/config/runner.py +++ b/src/ansible_runner/config/runner.py @@ -16,6 +16,7 @@ # specific language governing permissions and limitations # under the License. # +from __future__ import annotations # pylint: disable=W0201 @@ -23,12 +24,15 @@ import logging import os import shlex +import shutil import stat import tempfile -import shutil + +from dataclasses import dataclass, field, fields +from typing import Any, BinaryIO from ansible_runner import output -from ansible_runner.config._base import BaseConfig, BaseExecutionMode +from ansible_runner.config._base import BaseConfig, BaseExecutionMode, MetaValues from ansible_runner.exceptions import ConfigurationError from ansible_runner.output import debug from ansible_runner.utils import register_for_cleanup @@ -44,13 +48,13 @@ class ExecutionMode(): RAW = 3 +@dataclass class RunnerConfig(BaseConfig): - """ - A ``Runner`` configuration object that's meant to encapsulate the configuration used by the + """A ``Runner`` configuration object that's meant to encapsulate the configuration used by the :py:mod:`ansible_runner.runner.Runner` object to launch and manage the invocation of ``ansible`` and ``ansible-playbook`` - Typically this object is initialized for you when using the standard ``run`` interfaces in :py:mod:`ansible_runner.interface` + Typically, this object is initialized for you when using the standard ``run`` interfaces in :py:mod:`ansible_runner.interface` but can be used to construct the ``Runner`` configuration to be invoked elsewhere. It can also be overridden to provide different functionality to the Runner object. @@ -60,52 +64,179 @@ class RunnerConfig(BaseConfig): >>> r = Runner(config=rc) >>> r.run() + This class inherites all the initialization parameters of the `BaseConfig` parent class, plus: + + :param BinaryIO _input: An optional file or file-like object for use as input in a streaming pipeline. + :param BinaryIO _output: An optional file or file-like object for use as output in a streaming pipeline. + :param str binary: Path to an alternative ansible command. + :param str cmdline: Command line options passed to Ansible read from ``env/cmdline`` in ``private_data_dir`` + :param str directory_isolation_base_path: An optional path will be used as the base path to create a temp directory. + The project contents will be copied to this location which will then be used as the working directory during + playbook execution. + :param dict extravars: Extra variables to be passed to Ansible at runtime using ``-e``. Extra vars will also be + read from ``env/extravars`` in ``private_data_dir``. + :param int forks: Control Ansible parallel concurrency. + :param str host_pattern: The host pattern to match when running in ad-hoc mode. + :param str or dict or list inventory: Overrides the inventory directory/file (supplied at ``private_data_dir/inventory``) with + a specific host or list of hosts. This can take the form of: + + - Path to the inventory file in the ``private_data_dir/inventory`` directory or + an absolute path to the inventory file + - Native python dict supporting the YAML/json inventory structure + - A text INI formatted string + - A list of inventory sources, or an empty list to disable passing inventory + :param str limit: Matches ansible's ``--limit`` parameter to further constrain the inventory to be used. + :param str module: The module that will be invoked in ad-hoc mode by runner when executing Ansible. + :param str module_args: The module arguments that will be supplied to ad-hoc mode. + :param bool omit_event_data: Omits extra ansible event data from event payload (stdout and event still included). + :param bool only_failed_event_data: Omits extra ansible event data unless it's a failed event (stdout and event still included). + :param bool only_transmit_kwargs: If ``True``, the streaming Transmitter process will only send job arguments. + :param str or dict or list playbook: The playbook (either a list or dictionary of plays, or as a path relative to + ``private_data_dir/project``) that will be invoked by runner when executing Ansible. + :param str or list process_isolation_hide_paths: A path or list of paths on the system that should be hidden from the playbook run. + :param str or list process_isolation_ro_paths: A path or list of paths on the system that should be exposed to the playbook run as read-only. + :param str or list process_isolation_show_paths: A path or list of paths on the system that should be exposed to the playbook run. + :param str process_isolation_path: Path that an isolated playbook run will use for staging. (default: ``/tmp``) + :param str role: Name of the role to execute. + :param bool role_skip_facts: If ``True``, ``gather_facts`` will be set to ``False`` for execution of the namedgit ``role``. + :param str or list roles_path: Directory or list of directories to assign to ``ANSIBLE_ROLES_PATH``. + :param dict role_vars: Variables and their values to use with the named ``role``. + :param str skip_tags: Value to pass to the ``--skip-tags`` option of ``ansible-playbook``. + :param bool suppress_ansible_output: If ``True``, Ansible output will not appear to stdout. + :param bool suppress_output_file: If ``True``, Ansible output will not be written to a file in the artifacts directory. + :param str tags: Value to pass to the ``--tags`` option of ``ansible-playbook``. + :param int verbosity: Control the verbosity level of ansible-playbook. """ - def __init__(self, - private_data_dir, playbook=None, inventory=None, roles_path=None, limit=None, - module=None, module_args=None, verbosity=None, host_pattern=None, binary=None, - extravars=None, suppress_output_file=False, suppress_ansible_output=False, process_isolation_path=None, - process_isolation_hide_paths=None, process_isolation_show_paths=None, - process_isolation_ro_paths=None, tags=None, skip_tags=None, - directory_isolation_base_path=None, forks=None, cmdline=None, omit_event_data=False, - only_failed_event_data=False, **kwargs): - + _input: BinaryIO | None = field(metadata={MetaValues.TRANSMIT: False}, default=None) + _output: BinaryIO | None = field(metadata={MetaValues.TRANSMIT: False}, default=None) + + # 'binary' comes from the --binary CLI opt for an alternative ansible command path + binary: str | None = None + cmdline: str | None = None + directory_isolation_base_path: str | None = None + extravars: dict | None = None + forks: int | None = None + host_pattern: str | None = None + inventory: str | dict | list | None = None + limit: str | None = None + module: str | None = None + module_args: str | None = None + omit_event_data: bool = False + only_failed_event_data: bool = False + only_transmit_kwargs: bool = field(metadata={MetaValues.TRANSMIT: False}, default=False) + playbook: str | dict | list | None = None + process_isolation_hide_paths: str | list | None = None + process_isolation_ro_paths: str | list | None = None + process_isolation_show_paths: str | list | None = None + process_isolation_path: str | None = None + role: str = "" + role_skip_facts: bool = False + roles_path: str | None = None + role_vars: dict[str, str] | None = None + skip_tags: str | None = None + suppress_ansible_output: bool = False + suppress_output_file: bool = False + tags: str | None = None + verbosity: int | None = None + + def __post_init__(self) -> None: + # NOTE: Cannot call base class __init__() here as that causes some recursion madness. + # We can call its __post_init__(). + super().__post_init__() # TODO: Should we rename this in base class? self.runner_mode = "pexpect" - - super().__init__(private_data_dir, **kwargs) - - self.playbook = playbook - self.inventory = inventory - self.roles_path = roles_path - self.limit = limit - self.module = module - self.module_args = module_args - self.host_pattern = host_pattern - self.binary = binary - self.extra_vars = extravars - self.process_isolation_path = process_isolation_path self.process_isolation_path_actual = None - self.process_isolation_hide_paths = process_isolation_hide_paths - self.process_isolation_show_paths = process_isolation_show_paths - self.process_isolation_ro_paths = process_isolation_ro_paths - self.directory_isolation_path = directory_isolation_base_path - self.verbosity = verbosity - self.suppress_output_file = suppress_output_file - self.suppress_ansible_output = suppress_ansible_output - self.tags = tags - self.skip_tags = skip_tags self.execution_mode = ExecutionMode.NONE - self.forks = forks - self.cmdline_args = cmdline - - self.omit_event_data = omit_event_data - self.only_failed_event_data = only_failed_event_data @property def sandboxed(self): return self.process_isolation and self.process_isolation_executable not in self._CONTAINER_ENGINES + @property + def cmdline_args(self): + """Alias for backward compatibility.""" + return self.cmdline + + @cmdline_args.setter + def cmdline_args(self, value): + self.cmdline = value + + @property + def directory_isolation_path(self): + """Alias for backward compatibility.""" + return self.directory_isolation_base_path + + @directory_isolation_path.setter + def directory_isolation_path(self, value): + self.directory_isolation_base_path = value + + @property + def hosts(self): + """Alias for backward compatibility. + + dump_artifacts() makes reference to 'hosts' kwargs (API) value, even though it + is undocumented as an API parameter to interface.run(). We make it equivalent + to 'host_pattern' here to not break anyone. + """ + return self.host_pattern + + @hosts.setter + def hosts(self, value): + self.host_pattern = value + + @property + def extra_vars(self): + """Alias for backward compatibility.""" + return self.extravars + + @extra_vars.setter + def extra_vars(self, value): + self.extravars = value + + # Create internal aliases for '_input' and '_output' attributes since those are named like + # private attributes, yet they can be set from our public interfaces... go figure. + @property + def input(self): + return self._input + + @input.setter + def input(self, value): + self._input = value + + @property + def output(self): + return self._output + + @output.setter + def output(self, value): + self._output = value + + def streamable_attributes(self) -> dict[str, Any]: + """Get the set of streamable attributes that have a value that is different from the default. + + The field metadata indicates if the attribute is streamable from Transmit. By default, an attribute + is considered streamable (must be explicitly disabled). + + :return: A dict of attribute names and their values. + """ + retval = {} + for field_obj in fields(self): + if field_obj.metadata and not field_obj.metadata.get(MetaValues.TRANSMIT, True): + continue + current_value = getattr(self, field_obj.name) + + if field_obj.default == current_value: + continue + + # Treat an empty current value (e.g., {} or "") as the same as a default of None to prevent + # streaming unnecessary empty values. + if field_obj.default is None and current_value in ({}, "", []): + continue + + retval[field_obj.name] = current_value + + return retval + def prepare(self): """ Performs basic checks and then properly invokes @@ -115,7 +246,7 @@ def prepare(self): - prepare_command It's also responsible for wrapping the command with the proper ssh agent invocation - and setting early ANSIBLE_ environment variables. + and setting early ``ANSIBLE_`` environment variables. """ # ansible_path = find_executable('ansible') # if ansible_path is None or not os.access(ansible_path, os.X_OK): @@ -131,11 +262,11 @@ def prepare(self): # we must call prepare_env() before we can reference it. self.prepare_env() - if self.sandboxed and self.directory_isolation_path is not None: - self.directory_isolation_path = tempfile.mkdtemp(prefix='runner_di_', dir=self.directory_isolation_path) + if self.sandboxed and self.directory_isolation_base_path is not None: + self.directory_isolation_base_path = tempfile.mkdtemp(prefix='runner_di_', dir=self.directory_isolation_base_path) if os.path.exists(self.project_dir): - output.debug(f"Copying directory tree from {self.project_dir} to {self.directory_isolation_path} for working directory isolation") - shutil.copytree(self.project_dir, self.directory_isolation_path, dirs_exist_ok=True, symlinks=True) + output.debug(f"Copying directory tree from {self.project_dir} to {self.directory_isolation_base_path} for working directory isolation") + shutil.copytree(self.project_dir, self.directory_isolation_base_path, dirs_exist_ok=True, symlinks=True) self.prepare_inventory() self.prepare_command() @@ -186,14 +317,14 @@ def prepare_env(self): self.process_isolation_hide_paths = self.settings.get('process_isolation_hide_paths', self.process_isolation_hide_paths) self.process_isolation_show_paths = self.settings.get('process_isolation_show_paths', self.process_isolation_show_paths) self.process_isolation_ro_paths = self.settings.get('process_isolation_ro_paths', self.process_isolation_ro_paths) - self.directory_isolation_path = self.settings.get('directory_isolation_base_path', self.directory_isolation_path) + self.directory_isolation_base_path = self.settings.get('directory_isolation_base_path', self.directory_isolation_base_path) self.directory_isolation_cleanup = bool(self.settings.get('directory_isolation_cleanup', True)) if 'AD_HOC_COMMAND_ID' in self.env or not os.path.exists(self.project_dir): self.cwd = self.private_data_dir else: - if self.directory_isolation_path is not None: - self.cwd = self.directory_isolation_path + if self.directory_isolation_base_path is not None: + self.cwd = self.directory_isolation_base_path else: self.cwd = self.project_dir @@ -240,8 +371,8 @@ def generate_ansible_command(self): exec_list = [base_command] try: - if self.cmdline_args: - cmdline_args = self.cmdline_args + if self.cmdline: + cmdline_args = self.cmdline else: cmdline_args = self.loader.load_file('env/cmdline', str, encoding=None) @@ -271,11 +402,11 @@ def generate_ansible_command(self): extravars_path = self.loader.abspath('env/extravars') exec_list.extend(['-e', f'@{extravars_path}']) - if self.extra_vars: - if isinstance(self.extra_vars, dict) and self.extra_vars: + if self.extravars: + if isinstance(self.extravars, dict) and self.extravars: extra_vars_list = [] - for k in self.extra_vars: - extra_vars_list.append(f"\"{k}\":{json.dumps(self.extra_vars[k])}") + for k in self.extravars: + extra_vars_list.append(f"\"{k}\":{json.dumps(self.extravars[k])}") exec_list.extend( [ @@ -283,8 +414,8 @@ def generate_ansible_command(self): f'{{{",".join(extra_vars_list)}}}' ] ) - elif self.loader.isfile(self.extra_vars): - exec_list.extend(['-e', f'@{self.loader.abspath(self.extra_vars)}']) + elif self.loader.isfile(self.extravars): + exec_list.extend(['-e', f'@{self.loader.abspath(self.extravars)}']) if self.verbosity: v = 'v' * self.verbosity @@ -385,8 +516,8 @@ def wrap_args_for_sandbox(self, args): if self.execution_mode == ExecutionMode.ANSIBLE_PLAYBOOK: # playbook runs should cwd to the SCM checkout dir - if self.directory_isolation_path is not None: - new_args.extend(['--chdir', os.path.realpath(self.directory_isolation_path)]) + if self.directory_isolation_base_path is not None: + new_args.extend(['--chdir', os.path.realpath(self.directory_isolation_base_path)]) else: new_args.extend(['--chdir', os.path.realpath(self.project_dir)]) elif self.execution_mode == ExecutionMode.ANSIBLE: @@ -398,7 +529,7 @@ def wrap_args_for_sandbox(self, args): def handle_command_wrap(self): # wrap args for ssh-agent - if self.ssh_key_data: + if self.ssh_key: debug('ssh-agent agrs added') self.command = self.wrap_args_with_ssh_agent(self.command, self.ssh_key_path) @@ -413,6 +544,6 @@ def handle_command_wrap(self): # container volume mount is handled explicitly for run API's # using 'container_volume_mounts' arguments base_execution_mode = BaseExecutionMode.NONE - self.command = self.wrap_args_for_containerization(self.command, base_execution_mode, self.cmdline_args) + self.command = self.wrap_args_for_containerization(self.command, base_execution_mode, self.cmdline) else: debug('containerization disabled') diff --git a/src/ansible_runner/interface.py b/src/ansible_runner/interface.py index 0b3dbc627..387860fd4 100644 --- a/src/ansible_runner/interface.py +++ b/src/ansible_runner/interface.py @@ -17,6 +17,8 @@ # specific language governing permissions and limitations # under the License. # +from __future__ import annotations + import os import json import sys @@ -24,6 +26,7 @@ import logging from ansible_runner import output +from ansible_runner._internal._dump_artifacts import dump_artifacts from ansible_runner.config.runner import RunnerConfig from ansible_runner.config.command import CommandConfig from ansible_runner.config.inventory import InventoryConfig @@ -32,7 +35,6 @@ from ansible_runner.runner import Runner from ansible_runner.streaming import Transmitter, Worker, Processor from ansible_runner.utils import ( - dump_artifacts, check_isolation_executable_installed, sanitize_json_response, signal_handler, @@ -41,7 +43,7 @@ logging.getLogger('ansible-runner').addHandler(logging.NullHandler()) -def init_runner(**kwargs): +def init_runner(config: RunnerConfig, streamer: str): ''' Initialize the Runner() instance @@ -51,170 +53,120 @@ def init_runner(**kwargs): See parameters given to :py:func:`ansible_runner.interface.run` ''' - # Handle logging first thing - debug = kwargs.pop('debug', None) - logfile = kwargs.pop('logfile', None) - - if not kwargs.pop("ignore_logging", True): - output.configure() - if debug in (True, False): - output.set_debug('enable' if debug is True else 'disable') - - if logfile: - output.set_logfile(logfile) - # If running via the transmit-worker-process method, we must only extract things as read-only # inside of one of these commands. That could be either transmit or worker. - if kwargs.get('streamer') not in ('worker', 'process'): - dump_artifacts(kwargs) + if streamer not in ('worker', 'process'): + dump_artifacts(config) - if kwargs.get('streamer'): + if streamer: # undo any full paths that were dumped by dump_artifacts above in the streamer case - private_data_dir = kwargs['private_data_dir'] + private_data_dir = config.private_data_dir or "" project_dir = os.path.join(private_data_dir, 'project') - playbook_path = kwargs.get('playbook') or '' - if os.path.isabs(playbook_path) and playbook_path.startswith(project_dir): - kwargs['playbook'] = os.path.relpath(playbook_path, project_dir) + playbook_path = config.playbook or '' + if isinstance(playbook_path, str) and os.path.isabs(playbook_path) and playbook_path.startswith(project_dir): + config.playbook = os.path.relpath(playbook_path, project_dir) - inventory_path = kwargs.get('inventory') or '' - if os.path.isabs(inventory_path) and inventory_path.startswith(private_data_dir): - kwargs['inventory'] = os.path.relpath(inventory_path, private_data_dir) + inventory_path = config.inventory or '' + if isinstance(inventory_path, str) and os.path.isabs(inventory_path) and inventory_path.startswith(private_data_dir): + config.inventory = os.path.relpath(inventory_path, private_data_dir) - roles_path = kwargs.get('envvars', {}).get('ANSIBLE_ROLES_PATH') or '' + if config.envvars is None: + config.envvars = {} + roles_path = config.envvars.get('ANSIBLE_ROLES_PATH') or '' if os.path.isabs(roles_path) and roles_path.startswith(private_data_dir): - kwargs['envvars']['ANSIBLE_ROLES_PATH'] = os.path.relpath(roles_path, private_data_dir) + config.envvars['ANSIBLE_ROLES_PATH'] = os.path.relpath(roles_path, private_data_dir) - event_callback_handler = kwargs.pop('event_handler', None) - status_callback_handler = kwargs.pop('status_handler', None) - artifacts_handler = kwargs.pop('artifacts_handler', None) - cancel_callback = kwargs.pop('cancel_callback', None) - if cancel_callback is None: + if config.cancel_callback is None: # attempt to load signal handler. # will return None if we are not in the main thread - cancel_callback = signal_handler() - finished_callback = kwargs.pop('finished_callback', None) + config.cancel_callback = signal_handler() - streamer = kwargs.pop('streamer', None) - if streamer: - if streamer == 'transmit': - stream_transmitter = Transmitter(**kwargs) - return stream_transmitter - - if streamer == 'worker': - stream_worker = Worker(**kwargs) - return stream_worker - - if streamer == 'process': - stream_processor = Processor(event_handler=event_callback_handler, - status_handler=status_callback_handler, - artifacts_handler=artifacts_handler, - cancel_callback=cancel_callback, - finished_callback=finished_callback, - **kwargs) - return stream_processor - - if kwargs.get("process_isolation", False): - pi_executable = kwargs.get("process_isolation_executable", "podman") + if streamer == 'transmit': + stream_transmitter = Transmitter(config) + return stream_transmitter + + if streamer == 'worker': + stream_worker = Worker(config) + return stream_worker + + if streamer == 'process': + stream_processor = Processor(config) + return stream_processor + + if config.process_isolation: + pi_executable = config.process_isolation_executable if not check_isolation_executable_installed(pi_executable): print(f'Unable to find process isolation executable: {pi_executable}') sys.exit(1) - kwargs.pop('_input', None) - kwargs.pop('_output', None) - rc = RunnerConfig(**kwargs) - rc.prepare() + config.prepare() - return Runner(rc, - event_handler=event_callback_handler, - status_handler=status_callback_handler, - artifacts_handler=artifacts_handler, - cancel_callback=cancel_callback, - finished_callback=finished_callback) + return Runner(config, + event_handler=config.event_handler, + status_handler=config.status_handler, + artifacts_handler=config.artifacts_handler, + cancel_callback=config.cancel_callback, + finished_callback=config.finished_callback) -def run(**kwargs): - ''' - Run an Ansible Runner task in the foreground and return a Runner object when complete. +def run(config: RunnerConfig | None = None, + streamer: str = "", + debug: bool = False, + logfile: str = "", + ignore_logging: bool = True, + **kwargs): + """Run an Ansible Runner task in the foreground and return a Runner object when complete. - :param str private_data_dir: The directory containing all runner metadata needed to invoke the runner - module. Output artifacts will also be stored here for later consumption. - :param str ident: The run identifier for this invocation of Runner. Will be used to create and name - the artifact directory holding the results of the invocation. - :param bool json_mode: Store event data in place of stdout on the console and in the stdout file - :param str or list playbook: The playbook (either a list or dictionary of plays, or as a path relative to - ``private_data_dir/project``) that will be invoked by runner when executing Ansible. - :param str module: The module that will be invoked in ad-hoc mode by runner when executing Ansible. - :param str module_args: The module arguments that will be supplied to ad-hoc mode. - :param str host_pattern: The host pattern to match when running in ad-hoc mode. - :param str or dict or list inventory: Overrides the inventory directory/file (supplied at ``private_data_dir/inventory``) with - a specific host or list of hosts. This can take the form of: - - - Path to the inventory file in the ``private_data_dir/inventory`` directory or - an absolute path to the inventory file - - Native python dict supporting the YAML/json inventory structure - - A text INI formatted string - - A list of inventory sources, or an empty list to disable passing inventory - - :param str role: Name of the role to execute. - :param str or list roles_path: Directory or list of directories to assign to ANSIBLE_ROLES_PATH - :param dict envvars: Environment variables to be used when running Ansible. Environment variables will also be - read from ``env/envvars`` in ``private_data_dir`` - :param dict extravars: Extra variables to be passed to Ansible at runtime using ``-e``. Extra vars will also be - read from ``env/extravars`` in ``private_data_dir``. - :param dict passwords: A dictionary containing password prompt patterns and response values used when processing output from - Ansible. Passwords will also be read from ``env/passwords`` in ``private_data_dir``. - :param dict settings: A dictionary containing settings values for the ``ansible-runner`` runtime environment. These will also - be read from ``env/settings`` in ``private_data_dir``. - :param str ssh_key: The ssh private key passed to ``ssh-agent`` as part of the ansible-playbook run. - :param str cmdline: Command line options passed to Ansible read from ``env/cmdline`` in ``private_data_dir`` - :param bool suppress_env_files: Disable the writing of files into the ``env`` which may store sensitive information - :param str limit: Matches ansible's ``--limit`` parameter to further constrain the inventory to be used - :param int forks: Control Ansible parallel concurrency - :param int verbosity: Control how verbose the output of ansible-playbook is - :param bool quiet: Disable all output - :param str artifact_dir: The path to the directory where artifacts should live, this defaults to 'artifacts' under the private data dir - :param str project_dir: The path to the playbook content, this defaults to 'project' within the private data dir - :param int rotate_artifacts: Keep at most n artifact directories, disable with a value of 0 which is the default - :param int timeout: The timeout value in seconds that will be passed to either ``pexpect`` of ``subprocess`` invocation - (based on ``runner_mode`` selected) while executing command. It the timeout is triggered it will force cancel the - execution. - :param str streamer: Optionally invoke ansible-runner as one of the steps in the streaming pipeline - :param io.FileIO _input: An optional file or file-like object for use as input in a streaming pipeline - :param io.FileIO _output: An optional file or file-like object for use as output in a streaming pipeline - :param Callable event_handler: An optional callback that will be invoked any time an event is received by Runner itself, return True to keep the event - :param Callable cancel_callback: An optional callback that can inform runner to cancel (returning True) or not (returning False) - :param Callable finished_callback: An optional callback that will be invoked at shutdown after process cleanup. - :param Callable status_handler: An optional callback that will be invoked any time the status changes (e.g...started, running, failed, successful, timeout) - :param Callable artifacts_handler: An optional callback that will be invoked at the end of the run to deal with the artifacts from the run. - :param bool process_isolation: Enable process isolation, using either a container engine (e.g. podman) or a sandbox (e.g. bwrap). - :param str process_isolation_executable: Process isolation executable or container engine used to isolate execution. (default: podman) - :param str process_isolation_path: Path that an isolated playbook run will use for staging. (default: /tmp) - :param str or list process_isolation_hide_paths: A path or list of paths on the system that should be hidden from the playbook run. - :param str or list process_isolation_show_paths: A path or list of paths on the system that should be exposed to the playbook run. - :param str or list process_isolation_ro_paths: A path or list of paths on the system that should be exposed to the playbook run as read-only. - :param str container_image: Container image to use when running an ansible task - :param list container_volume_mounts: List of bind mounts in the form 'host_dir:/container_dir. (default: None) - :param list container_options: List of container options to pass to execution engine. - :param str directory_isolation_base_path: An optional path will be used as the base path to create a temp directory, the project contents will be - copied to this location which will then be used as the working directory during playbook execution. - :param str fact_cache: A string that will be used as the name for the subdirectory of the fact cache in artifacts directory. - This is only used for 'jsonfile' type fact caches. - :param str fact_cache_type: A string of the type of fact cache to use. Defaults to 'jsonfile'. - :param bool omit_event_data: Omits extra ansible event data from event payload (stdout and event still included) - :param bool only_failed_event_data: Omits extra ansible event data unless it's a failed event (stdout and event still included) - :param bool check_job_event_data: Check if job events data is completely generated. If event data is not completely generated and if - value is set to 'True' it will raise 'AnsibleRunnerException' exception, - if set to 'False' it log a debug message and continue execution. Default value is 'False' + This function may be called in two different formats, both of which are equivalent. The first uses only + keyword arguments and does NOT specify a value for the ``config`` parameter. + + .. code-block:: python + + run(private_data_dir='/tmp/demo', playbook='test.yml') + run(private_data_dir='/tmp/streaming-demo', playbook='test.yml', streamer='transmit') + + The second format uses a `RunnerConfig` object supplied in the ``config`` parameter for all available + run values. All other ``**kwarg`` keywords and their values are ignored. + + .. code-block:: python + + config = RunnerConfig(private_data_dir='/tmp/demo', playbook='test.yml') + run(config=config) + + streaming_config = RunnerConfig(private_data_dir='/tmp/streaming-demo', playbook='test.yml') + run(config=streaming_config, streamer='transmit') + + :param RunnerConfig config: A configuration object describing the run characteristics. + :param str streamer: Optionally invoke ansible-runner as one of the steps in the streaming pipeline. + Valid values are ``transmit``, ``worker``, or ``process``. + :param bool debug: If ``True``, debugging output from ansible-runner is enabled. + :param str logfile: Path to a file where logging output will be sent. + :param bool ignore_logging: If ``True``, ansible-runner log output will be disabled. + :param dict kwargs: Keyword arguments that match the parameters for a `RunnerConfig` object. :returns: A :py:class:`ansible_runner.runner.Runner` object, or a simple object containing ``rc`` if run remotely - ''' - r = init_runner(**kwargs) + """ + + # Initialize logging + if not ignore_logging: + output.configure(debug, logfile) + + if not config: + config = RunnerConfig(**kwargs) + + r = init_runner(config=config, streamer=streamer) r.run() return r -def run_async(**kwargs): +def run_async( + config: RunnerConfig | None = None, + streamer: str = "", + debug: bool = False, + logfile: str = "", + ignore_logging: bool = True, + **kwargs): ''' Runs an Ansible Runner task in the background which will start immediately. Returns the thread object and a Runner object. @@ -222,7 +174,15 @@ def run_async(**kwargs): :returns: A tuple containing a :py:class:`threading.Thread` object and a :py:class:`ansible_runner.runner.Runner` object ''' - r = init_runner(**kwargs) + + # Initialize logging + if not ignore_logging: + output.configure(debug, logfile) + + if not config: + config = RunnerConfig(**kwargs) + + r = init_runner(config=config, streamer=streamer) runner_thread = threading.Thread(target=r.run) runner_thread.start() return runner_thread, r diff --git a/src/ansible_runner/output.py b/src/ansible_runner/output.py index e3e15d3e6..0d2a6d071 100644 --- a/src/ansible_runner/output.py +++ b/src/ansible_runner/output.py @@ -64,7 +64,7 @@ def set_traceback(value: str) -> None: TRACEBACK_ENABLED = value.lower() == 'enable' -def configure() -> None: +def configure(enable_debug: bool, logfile: str) -> None: ''' Configures the logging facility @@ -89,3 +89,7 @@ def configure() -> None: formatter = logging.Formatter('%(message)s') stdout_handler.setFormatter(formatter) _display_logger.addHandler(stdout_handler) + + set_debug('enable' if enable_debug is True else 'disable') + if logfile: + set_logfile(logfile) diff --git a/src/ansible_runner/runner.py b/src/ansible_runner/runner.py index 062205476..06e4ad764 100644 --- a/src/ansible_runner/runner.py +++ b/src/ansible_runner/runner.py @@ -45,7 +45,7 @@ def __init__(self, config, cancel_callback=None, remove_partials=True, event_han # default runner mode to pexpect self.runner_mode = self.config.runner_mode if hasattr(self.config, 'runner_mode') else 'pexpect' - self.directory_isolation_path = self.config.directory_isolation_path if hasattr(self.config, 'directory_isolation_path') else None + self.directory_isolation_path = self.config.directory_isolation_base_path if hasattr(self.config, 'directory_isolation_path') else None self.directory_isolation_cleanup = self.config.directory_isolation_cleanup if hasattr(self.config, 'directory_isolation_cleanup') else None self.process_isolation = self.config.process_isolation if hasattr(self.config, 'process_isolation') else None self.process_isolation_path_actual = self.config.process_isolation_path_actual if hasattr(self.config, 'process_isolation_path_actual') else None diff --git a/src/ansible_runner/streaming.py b/src/ansible_runner/streaming.py index bf205d22b..ab1d8bf40 100644 --- a/src/ansible_runner/streaming.py +++ b/src/ansible_runner/streaming.py @@ -5,7 +5,6 @@ import os import stat import sys -import tempfile import uuid import traceback @@ -14,10 +13,10 @@ from threading import Event, RLock, Thread import ansible_runner +from ansible_runner.config.runner import RunnerConfig from ansible_runner.exceptions import ConfigurationError from ansible_runner.loader import ArtifactLoader import ansible_runner.plugins -from ansible_runner.utils import register_for_cleanup from ansible_runner.utils.streaming import stream_dir, unstream_dir @@ -37,16 +36,13 @@ def __init__(self, settings): class Transmitter: - def __init__(self, _output=None, **kwargs): - if _output is None: - _output = sys.stdout.buffer - self._output = _output - self.private_data_dir = os.path.abspath(kwargs.pop('private_data_dir')) - self.only_transmit_kwargs = kwargs.pop('only_transmit_kwargs', False) - if 'keepalive_seconds' in kwargs: - kwargs.pop('keepalive_seconds') # don't confuse older runners with this Worker-only arg + def __init__(self, config: RunnerConfig): + self._output = config.output if config.output else sys.stdout.buffer - self.kwargs = kwargs + self.private_data_dir = os.path.abspath(config.private_data_dir) if config.private_data_dir else "" + self.only_transmit_kwargs = config.only_transmit_kwargs + + self.kwargs = config.streamable_attributes() self.status = "unstarted" self.rc = None @@ -69,12 +65,11 @@ def run(self): class Worker: - def __init__(self, _input=None, _output=None, keepalive_seconds: float | None = None, **kwargs): - if _input is None: - _input = sys.stdin.buffer - if _output is None: - _output = sys.stdout.buffer + def __init__(self, config: RunnerConfig): + self._input = config.input if config.input else sys.stdin.buffer + self._output = config.output if config.output else sys.stdout.buffer + keepalive_seconds: float | int | None = config.keepalive_seconds if keepalive_seconds is None: # if we didn't get an explicit int value, fall back to envvar # FIXME: emit/log a warning and silently continue if this value won't parse keepalive_seconds = float(os.environ.get('ANSIBLE_RUNNER_KEEPALIVE_SECONDS', 0)) @@ -84,17 +79,10 @@ def __init__(self, _input=None, _output=None, keepalive_seconds: float | None = self._output_event = Event() self._output_lock = RLock() - self._input = _input - self._output = _output - - self.kwargs = kwargs + self.kwargs = config.streamable_attributes() self.job_kwargs = None - private_data_dir = kwargs.get('private_data_dir') - if private_data_dir is None: - private_data_dir = tempfile.mkdtemp() - register_for_cleanup(private_data_dir) - self.private_data_dir = private_data_dir + self.private_data_dir = config.private_data_dir self.status = "unstarted" self.rc = None @@ -250,43 +238,34 @@ def finished_callback(self, runner_obj): class Processor: - def __init__(self, _input=None, status_handler=None, event_handler=None, - artifacts_handler=None, cancel_callback=None, finished_callback=None, **kwargs): - if _input is None: - _input = sys.stdin.buffer - self._input = _input - - self.quiet = kwargs.get('quiet') - - private_data_dir = kwargs.get('private_data_dir') - if private_data_dir is None: - private_data_dir = tempfile.mkdtemp() - self.private_data_dir = private_data_dir + def __init__(self, config: RunnerConfig): + self._input = config.input if config.input else sys.stdin.buffer + + self.quiet = config.quiet + + self.private_data_dir: str = config.private_data_dir or '' self._loader = ArtifactLoader(self.private_data_dir) - settings = kwargs.get('settings') + settings = config.settings if settings is None: try: - settings = self._loader.load_file('env/settings', Mapping) + settings = self._loader.load_file('env/settings', Mapping) # type: ignore except ConfigurationError: settings = {} self.config = MockConfig(settings) - if kwargs.get('artifact_dir'): - self.artifact_dir = os.path.abspath(kwargs.get('artifact_dir')) - else: - project_artifacts = os.path.abspath(os.path.join(self.private_data_dir, 'artifacts')) - if ident := kwargs.get('ident'): - self.artifact_dir = os.path.join(project_artifacts, str(ident)) - else: - self.artifact_dir = project_artifacts + self.artifact_dir = config.artifact_dir + if self.artifact_dir and not config.ident_set_by_user: + # If an ident value was not explicitly supplied, for some reason, we don't bother with + # using a subdir named with the ident value. + self.artifact_dir, _ = os.path.split(self.artifact_dir) - self.status_handler = status_handler - self.event_handler = event_handler - self.artifacts_handler = artifacts_handler + self.status_handler = config.status_handler + self.event_handler = config.event_handler + self.artifacts_handler = config.artifacts_handler - self.cancel_callback = cancel_callback # FIXME: unused - self.finished_callback = finished_callback + self.cancel_callback = config.cancel_callback # FIXME: unused + self.finished_callback = config.finished_callback self.status = "unstarted" self.rc = None diff --git a/src/ansible_runner/utils/__init__.py b/src/ansible_runner/utils/__init__.py index f4a661ee5..85ff60da8 100644 --- a/src/ansible_runner/utils/__init__.py +++ b/src/ansible_runner/utils/__init__.py @@ -6,10 +6,7 @@ import re import os import stat -import fcntl import shutil -import hashlib -import tempfile import subprocess import base64 import threading @@ -120,55 +117,6 @@ def check_isolation_executable_installed(isolation_executable: str) -> bool: return False -def dump_artifact(obj: str, - path: str, - filename: str | None = None - ) -> str: - ''' - Write the artifact to disk at the specified path - - :param str obj: The string object to be dumped to disk in the specified - path. The artifact filename will be automatically created. - :param str path: The full path to the artifacts data directory. - :param str filename: The name of file to write the artifact to. - If the filename is not provided, then one will be generated. - - :return: The full path filename for the artifact that was generated. - ''' - if not os.path.exists(path): - os.makedirs(path, mode=0o700) - - p_sha1 = hashlib.sha1() - p_sha1.update(obj.encode(encoding='UTF-8')) - - if filename is None: - _, fn = tempfile.mkstemp(dir=path) - else: - fn = os.path.join(path, filename) - - if os.path.exists(fn): - c_sha1 = hashlib.sha1() - with open(fn) as f: - contents = f.read() - c_sha1.update(contents.encode(encoding='UTF-8')) - - if not os.path.exists(fn) or p_sha1.hexdigest() != c_sha1.hexdigest(): - lock_fp = os.path.join(path, '.artifact_write_lock') - lock_fd = os.open(lock_fp, os.O_RDWR | os.O_CREAT, stat.S_IRUSR | stat.S_IWUSR) - fcntl.lockf(lock_fd, fcntl.LOCK_EX) - - try: - with open(fn, 'w') as f: - os.chmod(fn, stat.S_IRUSR | stat.S_IWUSR) - f.write(str(obj)) - finally: - fcntl.lockf(lock_fd, fcntl.LOCK_UN) - os.close(lock_fd) - os.remove(lock_fp) - - return fn - - def cleanup_artifact_dir(path: str, num_keep: int = 0) -> None: # 0 disables artifact dir cleanup/rotation if num_keep < 1: @@ -182,80 +130,6 @@ def cleanup_artifact_dir(path: str, num_keep: int = 0) -> None: shutil.rmtree(all_paths[f]) -def dump_artifacts(kwargs: dict) -> None: - ''' - Introspect the kwargs and dump objects to disk - ''' - private_data_dir = kwargs.get('private_data_dir') - if not private_data_dir: - private_data_dir = tempfile.mkdtemp() - kwargs['private_data_dir'] = private_data_dir - - if not os.path.exists(private_data_dir): - raise ValueError('private_data_dir path is either invalid or does not exist') - - if 'role' in kwargs: - role = {'name': kwargs.pop('role')} - if 'role_vars' in kwargs: - role['vars'] = kwargs.pop('role_vars') - - play = [{'hosts': kwargs.pop('hosts', 'all'), 'roles': [role]}] - - if kwargs.pop('role_skip_facts', False): - play[0]['gather_facts'] = False - - kwargs['playbook'] = play - - if 'envvars' not in kwargs: - kwargs['envvars'] = {} - - roles_path = kwargs.pop('roles_path', None) - if not roles_path: - roles_path = os.path.join(private_data_dir, 'roles') - else: - roles_path += f":{os.path.join(private_data_dir, 'roles')}" - - kwargs['envvars']['ANSIBLE_ROLES_PATH'] = roles_path - - playbook = kwargs.get('playbook') - if playbook: - # Ensure the play is a list of dictionaries - if isinstance(playbook, MutableMapping): - playbook = [playbook] - - if isplaybook(playbook): - path = os.path.join(private_data_dir, 'project') - kwargs['playbook'] = dump_artifact(json.dumps(playbook), path, 'main.json') - - obj = kwargs.get('inventory') - if obj and isinventory(obj): - path = os.path.join(private_data_dir, 'inventory') - if isinstance(obj, MutableMapping): - kwargs['inventory'] = dump_artifact(json.dumps(obj), path, 'hosts.json') - elif isinstance(obj, str): - if not os.path.exists(os.path.join(path, obj)): - kwargs['inventory'] = dump_artifact(obj, path, 'hosts') - elif os.path.isabs(obj): - kwargs['inventory'] = obj - else: - kwargs['inventory'] = os.path.join(path, obj) - - if not kwargs.get('suppress_env_files', False): - for key in ('envvars', 'extravars', 'passwords', 'settings'): - obj = kwargs.get(key) - if obj and not os.path.exists(os.path.join(private_data_dir, 'env', key)): - path = os.path.join(private_data_dir, 'env') - dump_artifact(json.dumps(obj), path, key) - kwargs.pop(key) - - for key in ('ssh_key', 'cmdline'): - obj = kwargs.get(key) - if obj and not os.path.exists(os.path.join(private_data_dir, 'env', key)): - path = os.path.join(private_data_dir, 'env') - dump_artifact(str(kwargs[key]), path, key) - kwargs.pop(key) - - def collect_new_events(event_path: str, old_events: dict) -> Iterator[tuple[dict, dict]]: ''' Collect new events for the 'events' generator property diff --git a/src/ansible_runner/utils/streaming.py b/src/ansible_runner/utils/streaming.py index 0c004c8ac..236864cc6 100644 --- a/src/ansible_runner/utils/streaming.py +++ b/src/ansible_runner/utils/streaming.py @@ -1,6 +1,5 @@ # pylint: disable=R0914 -import io import time import tempfile import zipfile @@ -9,11 +8,12 @@ import sys import stat from pathlib import Path +from typing import BinaryIO from .base64io import Base64IO -def stream_dir(source_directory: str, stream: io.FileIO) -> None: +def stream_dir(source_directory: str, stream: BinaryIO) -> None: with tempfile.NamedTemporaryFile() as tmp: with zipfile.ZipFile( tmp.name, "w", compression=zipfile.ZIP_DEFLATED, allowZip64=True, strict_timestamps=False @@ -60,7 +60,7 @@ def stream_dir(source_directory: str, stream: io.FileIO) -> None: encoded_target.write(line) -def unstream_dir(stream: io.FileIO, length: int, target_directory: str) -> None: +def unstream_dir(stream: BinaryIO, length: int, target_directory: str) -> None: # NOTE: caller needs to process exceptions with tempfile.NamedTemporaryFile() as tmp: with open(tmp.name, "wb") as target: diff --git a/test/integration/test_display_callback.py b/test/integration/test_display_callback.py index 28da9c8a2..bac60122f 100644 --- a/test/integration/test_display_callback.py +++ b/test/integration/test_display_callback.py @@ -6,6 +6,7 @@ import pytest +from ansible_runner import RunnerConfig from ansible_runner.interface import init_runner @@ -29,13 +30,15 @@ def executor(tmp_path, request): inventory = 'localhost ansible_connection=local ansible_python_interpreter="{{ ansible_playbook_python }}"' - r = init_runner( - private_data_dir=private_data_dir, + rc = RunnerConfig( + private_data_dir=str(private_data_dir), inventory=inventory, envvars=envvars, playbook=yaml.safe_load(playbook) ) + r = init_runner(rc, '') + return r @@ -355,12 +358,14 @@ def test_output_when_given_invalid_playbook(tmp_path): # # But no test validated it. This does that. private_data_dir = str(tmp_path) - ex = init_runner( + + rc = RunnerConfig( private_data_dir=private_data_dir, inventory='localhost ansible_connection=local ansible_python_interpreter="{{ ansible_playbook_python }}"', envvars={"ANSIBLE_DEPRECATION_WARNINGS": "False"}, playbook=os.path.join(private_data_dir, 'fake_playbook.yml') ) + ex = init_runner(rc, '') ex.run() with ex.stdout as f: @@ -392,11 +397,12 @@ def test_output_when_given_non_playbook_script(tmp_path): with open(os.path.join(private_data_dir, "env", "settings"), 'w') as settings_file: settings_file.write("pexpect_timeout: 0.2") - ex = init_runner( + rc = RunnerConfig( private_data_dir=private_data_dir, inventory='localhost ansible_connection=local ansible_python_interpreter="{{ ansible_playbook_python }}"', envvars={"ANSIBLE_DEPRECATION_WARNINGS": "False"} ) + ex = init_runner(rc, '') ex.run() diff --git a/test/integration/test_runner.py b/test/integration/test_runner.py index e00112b08..fb37bac55 100644 --- a/test/integration/test_runner.py +++ b/test/integration/test_runner.py @@ -271,7 +271,7 @@ def test_set_extra_vars(rc): rc.module = "debug" rc.module_args = "var=test_extra_vars" rc.host_pattern = "localhost" - rc.extra_vars = {'test_extra_vars': 'hello there'} + rc.extravars = {'test_extra_vars': 'hello there'} rc.prepare() runner = Runner(config=rc) runner.run() diff --git a/test/integration/test_transmit_worker_process.py b/test/integration/test_transmit_worker_process.py index cc70a6ac7..a61ae6601 100644 --- a/test/integration/test_transmit_worker_process.py +++ b/test/integration/test_transmit_worker_process.py @@ -9,7 +9,8 @@ import pytest -from ansible_runner import run +from ansible_runner.config.runner import RunnerConfig +from ansible_runner.exceptions import ConfigurationError from ansible_runner.streaming import Transmitter, Worker, Processor import ansible_runner.interface # AWX import pattern @@ -26,7 +27,7 @@ def status_handler(self, status_data, runner_config=None): # pylint: disable=W0 self.status_data = status_data def get_job_kwargs(self, job_type): - """For this test scenaro, the ansible-runner interface kwargs""" + """For this test scenario, the ansible-runner interface kwargs""" if job_type == 'run': job_kwargs = {'playbook': 'debug.yml'} else: @@ -72,41 +73,42 @@ def test_remote_job_interface(self, tmp_path, project_fixtures, job_type): outgoing_buffer_file = tmp_path / 'buffer_out' outgoing_buffer_file.touch() - outgoing_buffer = outgoing_buffer_file.open('b+r') - transmitter = Transmitter(_output=outgoing_buffer, private_data_dir=transmit_dir, **job_kwargs) + with outgoing_buffer_file.open('b+r') as outgoing_buffer: + config = RunnerConfig(private_data_dir=str(transmit_dir), _output=outgoing_buffer, **job_kwargs) + transmitter = Transmitter(config) - for key, value in job_kwargs.items(): - assert transmitter.kwargs.get(key, '') == value + for key, value in job_kwargs.items(): + assert transmitter.kwargs.get(key, '') == value - status, rc = transmitter.run() - assert rc in (None, 0) - assert status == 'unstarted' + status, rc = transmitter.run() + assert rc in (None, 0) + assert status == 'unstarted' - outgoing_buffer.seek(0) - sent = outgoing_buffer.read() - assert sent # should not be blank at least - assert b'zipfile' in sent + outgoing_buffer.seek(0) + sent = outgoing_buffer.read() + assert sent # should not be blank at least + assert b'zipfile' in sent - incoming_buffer_file = tmp_path / 'buffer_in' - incoming_buffer_file.touch() - incoming_buffer = incoming_buffer_file.open('b+r') + incoming_buffer_file = tmp_path / 'buffer_in' + incoming_buffer_file.touch() - outgoing_buffer.seek(0) + with incoming_buffer_file.open('b+r') as incoming_buffer: + outgoing_buffer.seek(0) - worker = Worker(_input=outgoing_buffer, _output=incoming_buffer, private_data_dir=worker_dir) - worker.run() + rc = RunnerConfig(private_data_dir=str(worker_dir), _input=outgoing_buffer, _output=incoming_buffer) + worker = Worker(rc) + worker.run() - outgoing_buffer.seek(0) - assert set(os.listdir(worker_dir)) == {'artifacts', 'inventory', 'project', 'env'}, outgoing_buffer.read() + outgoing_buffer.seek(0) + assert set(os.listdir(worker_dir)) == {'artifacts', 'inventory', 'project', 'env'}, outgoing_buffer.read() - incoming_buffer.seek(0) # again, be kind, rewind + incoming_buffer.seek(0) # again, be kind, rewind - processor = Processor(_input=incoming_buffer, private_data_dir=process_dir) - processor.run() + rc = RunnerConfig(private_data_dir=str(process_dir), _input=incoming_buffer) + processor = Processor(rc) + processor.run() - outgoing_buffer.close() - incoming_buffer.close() self.check_artifacts(str(process_dir), job_type) @pytest.mark.parametrize("keepalive_setting", [ @@ -141,20 +143,24 @@ def test_keepalive_setting(self, tmp_path, project_fixtures, keepalive_setting): for buffer in (outgoing_buffer, incoming_buffer): buffer.name = 'foo' - status, rc = Transmitter( - _output=outgoing_buffer, private_data_dir=project_fixtures / 'sleep', - playbook='sleep.yml', extravars={'sleep_interval': 2}, verbosity=verbosity - ).run() + config = RunnerConfig(private_data_dir=project_fixtures / 'sleep', + playbook='sleep.yml', + extravars={'sleep_interval': 2}, + verbosity=verbosity, + only_transmit_kwargs=False, + _output=outgoing_buffer, + ) + + status, rc = Transmitter(config).run() assert rc in (None, 0) assert status == 'unstarted' outgoing_buffer.seek(0) worker_start_time = time.time() - worker = Worker( - _input=outgoing_buffer, _output=incoming_buffer, private_data_dir=worker_dir, - keepalive_seconds=keepalive_setting - ) + rc = RunnerConfig(private_data_dir=str(worker_dir), keepalive_seconds=keepalive_setting, + _input=outgoing_buffer, _output=incoming_buffer) + worker = Worker(rc) worker.run() assert time.time() - worker_start_time > 2.0 # task sleeps for 2 second @@ -164,7 +170,8 @@ def test_keepalive_setting(self, tmp_path, project_fixtures, keepalive_setting): assert not worker._keepalive_thread.is_alive() # make sure it's dead incoming_buffer.seek(0) - Processor(_input=incoming_buffer, private_data_dir=process_dir).run() + rc = RunnerConfig(private_data_dir=str(process_dir), _input=incoming_buffer) + Processor(rc).run() stdout = self.get_stdout(process_dir) assert 'Sleep for a specified interval' in stdout @@ -256,6 +263,9 @@ def process_method(results_sockfile_read): break time.sleep(0.05) # additionally, AWX calls cancel_callback() + res = process_future.result() + assert res.status == 'successful' + for s in ( transmit_socket_write, transmit_socket_read, results_socket_write, results_socket_read, transmit_socket_write_file, transmit_socket_read_file, results_socket_write_file, results_socket_read_file, @@ -291,7 +301,7 @@ def test_process_isolation_executable_not_exist(self, tmp_path, mocker): **job_kwargs, ) - # valide process_isolation kwargs are passed to transmitter + # valid process_isolation kwargs are passed to transmitter assert transmitter.kwargs['process_isolation'] == job_kwargs['process_isolation'] assert transmitter.kwargs['process_isolation_executable'] == job_kwargs['process_isolation_executable'] @@ -329,7 +339,7 @@ def test_process_isolation_executable_not_exist(self, tmp_path, mocker): _output=incoming_buffer, private_data_dir=worker_dir, ) - assert exc.value.code == 1 + assert exc.value.code == 1 outgoing_buffer.close() incoming_buffer.close() @@ -340,8 +350,14 @@ def transmit_stream(project_fixtures, tmp_path): outgoing_buffer.touch() transmit_dir = project_fixtures / 'debug' + with outgoing_buffer.open('wb') as f: - transmitter = Transmitter(_output=f, private_data_dir=transmit_dir, playbook='debug.yml') + config = RunnerConfig(private_data_dir=str(transmit_dir), + playbook='debug.yml', + only_transmit_kwargs=False, + _output=f, + ) + transmitter = Transmitter(config) status, rc = transmitter.run() assert rc in (None, 0) @@ -358,7 +374,8 @@ def worker_stream(transmit_stream, tmp_path): # pylint: disable=W0621 worker_dir.mkdir() with transmit_stream.open('rb') as out: with ingoing_buffer.open('wb') as f: - worker = Worker(_input=out, _output=f, private_data_dir=worker_dir) + config = RunnerConfig(private_data_dir=str(worker_dir), _input=out, _output=f) + worker = Worker(config) status, rc = worker.run() assert rc in (None, 0) @@ -447,15 +464,15 @@ def test_missing_private_dir_transmit(): outgoing_buffer = io.BytesIO() # Transmit - with pytest.raises(ValueError) as excinfo: - run( + with pytest.raises(ConfigurationError) as excinfo: + ansible_runner.interface.run( streamer='transmit', _output=outgoing_buffer, private_data_dir='/foo/bar/baz', playbook='debug.yml', ) - assert "private_data_dir path is either invalid or does not exist" in str(excinfo.value) + assert "Unable to create private_data_dir /foo/bar/baz" in str(excinfo.value) def test_garbage_private_dir_worker(tmp_path): @@ -466,7 +483,7 @@ def test_garbage_private_dir_worker(tmp_path): outgoing_buffer = io.BytesIO() # Worker - run( + ansible_runner.interface.run( streamer='worker', _input=incoming_buffer, _output=outgoing_buffer, @@ -487,7 +504,7 @@ def test_unparsable_line_worker(tmp_path): outgoing_buffer = io.BytesIO() # Worker - run( + ansible_runner.interface.run( streamer='worker', _input=incoming_buffer, _output=outgoing_buffer, @@ -511,9 +528,9 @@ def status_receiver(status_data, runner_config): # pylint: disable=W0613 assert 'not-json-data with extra garbage:ffffffffff' in status_data['job_explanation'] assert len(status_data['job_explanation']) < 2000 - run( + ansible_runner.interface.run( streamer='process', _input=incoming_buffer, - private_data_dir=process_dir, + private_data_dir=str(process_dir), status_handler=status_receiver ) diff --git a/test/unit/config/test__base.py b/test/unit/config/test__base.py index 35df30e74..66569db5c 100644 --- a/test/unit/config/test__base.py +++ b/test/unit/config/test__base.py @@ -2,6 +2,7 @@ import os import re +import tempfile from functools import partial from test.utils.common import RSAKey @@ -22,6 +23,19 @@ def load_file_side_effect(path, value, *args, **kwargs): raise ConfigurationError +def test_base_config_empty_pvt_data_dir(): + """Make sure we create a pvt data dir even when not supplied""" + rc = BaseConfig() + tmpdir = tempfile.gettempdir() + assert tmpdir in rc.private_data_dir + + +def test_base_config_invalid_pvt_data_dir(): + """A ConfigurationError should be raised if we cannot create the requested pvt data dir""" + with pytest.raises(ConfigurationError, match="Unable to create private_data_dir"): + BaseConfig("/not/a/writable/path") + + def test_base_config_init_defaults(tmp_path): rc = BaseConfig(private_data_dir=tmp_path.as_posix()) assert rc.private_data_dir == tmp_path.as_posix() @@ -159,7 +173,7 @@ def test_prepare_env_settings(mocker): def test_prepare_env_sshkey_defaults(): rc = BaseConfig() rc.prepare_env() - assert rc.ssh_key_data is None + assert rc.ssh_key is None def test_prepare_env_sshkey(mocker): @@ -171,7 +185,7 @@ def test_prepare_env_sshkey(mocker): mocker.patch.object(rc.loader, 'load_file', side_effect=sshkey_side_effect) rc.prepare_env() - assert rc.ssh_key_data == rsa_private_key_value + assert rc.ssh_key == rsa_private_key_value def test_prepare_env_defaults(): @@ -191,7 +205,7 @@ def test_prepare_env_ansible_vars(mocker, tmp_path): artifact_dir = tmp_path.joinpath('some_artifacts') rc = BaseConfig(artifact_dir=artifact_dir.as_posix()) - rc.ssh_key_data = None + rc.ssh_key = None rc.env = {} rc.execution_mode = BaseExecutionMode.ANSIBLE_COMMANDS @@ -215,7 +229,7 @@ def test_prepare_with_ssh_key(mocker, tmp_path): rc.env = {} rc.execution_mode = BaseExecutionMode.ANSIBLE_COMMANDS rsa_key = RSAKey() - rc.ssh_key_data = rsa_key.private + rc.ssh_key = rsa_key.private rc.command = 'ansible-playbook' rc.cmdline_args = [] rc.prepare_env() diff --git a/test/unit/config/test_runner.py b/test/unit/config/test_runner.py index ea4ca97f4..949463e36 100644 --- a/test/unit/config/test_runner.py +++ b/test/unit/config/test_runner.py @@ -139,7 +139,7 @@ def test_prepare_env_extra_vars_defaults(mocker): rc = RunnerConfig('/') rc.prepare_env() - assert rc.extra_vars is None + assert rc.extravars is None def test_prepare_env_settings_defaults(mocker): @@ -169,7 +169,7 @@ def test_prepare_env_sshkey_defaults(mocker): rc = RunnerConfig('/') rc.prepare_env() - assert rc.ssh_key_data is None + assert rc.ssh_key is None def test_prepare_env_sshkey(mocker): @@ -184,7 +184,7 @@ def test_prepare_env_sshkey(mocker): mocker.patch.object(rc.loader, 'load_file', side_effect=sshkey_side_effect) rc.prepare_env() - assert rc.ssh_key_data == rsa_private_key_value + assert rc.ssh_key == rsa_private_key_value def test_prepare_env_defaults(mocker): @@ -206,7 +206,7 @@ def test_prepare_env_directory_isolation(mocker): path_exists.return_value = True rc = RunnerConfig('/') - rc.directory_isolation_path = '/tmp/foo' + rc.directory_isolation_base_path = '/tmp/foo' rc.prepare_env() assert rc.cwd == '/tmp/foo' @@ -235,11 +235,11 @@ def test_prepare_env_directory_isolation_from_settings(mocker, project_fixtures) assert os.path.exists(rc.project_dir) # `directory_isolation_path` should be used to create a new temp path underneath - assert rc.directory_isolation_path == '/tmp/runner/runner_di_XYZ' + assert rc.directory_isolation_base_path == '/tmp/runner/runner_di_XYZ' mkdtemp.assert_called_once_with(prefix='runner_di_', dir='/tmp/runner') # The project files should be copied to the isolation path. - copy_tree.assert_called_once_with(rc.project_dir, rc.directory_isolation_path, dirs_exist_ok=True, symlinks=True) + copy_tree.assert_called_once_with(rc.project_dir, rc.directory_isolation_base_path, dirs_exist_ok=True, symlinks=True) def test_prepare_inventory(mocker): @@ -282,7 +282,7 @@ def test_generate_ansible_command_extra_vars(mocker, extra_vars, expected): mocker.patch.object(rc.loader, 'isfile', side_effect=lambda x: True) - rc.extra_vars = extra_vars + rc.extravars = extra_vars cmd = rc.generate_ansible_command() assert cmd == expected @@ -293,15 +293,15 @@ def test_generate_ansible_command(mocker): rc = RunnerConfig(private_data_dir='/', playbook='main.yaml') rc.prepare_inventory() - rc.extra_vars = None + rc.extravars = None cmd = rc.generate_ansible_command() assert cmd == ['ansible-playbook', '-i', '/inventory', 'main.yaml'] - rc.extra_vars = {'test': 'key'} + rc.extravars = {'test': 'key'} cmd = rc.generate_ansible_command() assert cmd == ['ansible-playbook', '-i', '/inventory', '-e', '{"test":"key"}', 'main.yaml'] - rc.extra_vars = None + rc.extravars = None rc.inventory = "localhost," cmd = rc.generate_ansible_command() @@ -386,7 +386,7 @@ def test_generate_ansible_command_with_cmdline_args(cmdline, tokens, mocker): path_exists.return_value = True rc.prepare_inventory() - rc.extra_vars = {} + rc.extravars = {} cmdline_side_effect = partial(load_file_side_effect, 'env/cmdline', cmdline) mocker.patch.object(rc.loader, 'load_file', side_effect=cmdline_side_effect) @@ -421,7 +421,7 @@ def test_prepare_with_defaults(mocker): rc.prepare_env = mocker.Mock() rc.prepare_command = mocker.Mock() - rc.ssh_key_data = None + rc.ssh_key = None rc.artifact_dir = '/' rc.env = {} @@ -440,7 +440,7 @@ def test_prepare(mocker): rc = RunnerConfig('/') rc.prepare_inventory = mocker.Mock() rc.prepare_command = mocker.Mock() - rc.ssh_key_data = None + rc.ssh_key = None rc.artifact_dir = '/' rc.env = {} rc.execution_mode = ExecutionMode.ANSIBLE_PLAYBOOK @@ -470,13 +470,13 @@ def test_prepare_with_ssh_key(mocker): rc.wrap_args_with_ssh_agent = mocker.Mock() - rc.ssh_key_data = None + rc.ssh_key = None rc.artifact_dir = '/' rc.env = {} rc.execution_mode = ExecutionMode.ANSIBLE_PLAYBOOK rc.playbook = 'main.yaml' rsa_key = RSAKey() - rc.ssh_key_data = rsa_key.private + rc.ssh_key = rsa_key.private rc.command = 'ansible-playbook' mocker.patch.dict('os.environ', {'AWX_LIB_DIRECTORY': '/'}) @@ -583,7 +583,7 @@ def isfile(self, _): rc = RunnerConfig('/') rc.artifact_dir = tmp_path / 'artifacts' - rc.directory_isolation_path = tmp_path / 'dirisolation' + rc.directory_isolation_base_path = tmp_path / 'dirisolation' rc.playbook = 'main.yaml' rc.command = 'ansible-playbook' rc.process_isolation = True @@ -605,7 +605,7 @@ def isfile(self, _): '--symlink', 'usr/lib', '/lib', '--symlink', 'usr/lib64', '/lib64', '--bind', '/', '/', - '--chdir', os.path.realpath(rc.directory_isolation_path), + '--chdir', os.path.realpath(rc.directory_isolation_base_path), 'ansible-playbook', '-i', '/inventory', 'main.yaml', ] @@ -735,3 +735,27 @@ def test_containerization_settings(tmp_path, runtime, mocker): ['my_container', 'ansible-playbook', '-i', '/runner/inventory', 'main.yaml'] assert expected_command_start == rc.command + + +def test_streamable_attributes_all_defaults(): + """Test that all default values return an empty dict.""" + rc = RunnerConfig() + assert not rc.streamable_attributes() + + +def test_streamable_attributes_non_default(tmp_path): + """Test that non-default, streamable values are returned.""" + rc = RunnerConfig(private_data_dir=str(tmp_path), + keepalive_seconds=10, + host_pattern="hostA,", + json_mode=True, + playbook=[], + verbosity=3) + + # Don't expect private_data_dir or keepalive_seconds since they are not streamable. + # Don't expect playbook since it is an empty value. + assert rc.streamable_attributes() == { + "host_pattern": "hostA,", + "json_mode": True, + "verbosity": 3, + } diff --git a/test/unit/test_interface.py b/test/unit/test_interface.py index c8e734a29..596f19093 100644 --- a/test/unit/test_interface.py +++ b/test/unit/test_interface.py @@ -1,5 +1,6 @@ import pytest +from ansible_runner import RunnerConfig from ansible_runner.interface import init_runner @@ -7,18 +8,19 @@ def test_default_callback_set(mocker): mocker.patch('ansible_runner.interface.signal_handler', side_effect=AttributeError('Raised intentionally')) with pytest.raises(AttributeError, match='Raised intentionally'): - init_runner(ignore_logging=True) + init_runner(RunnerConfig(), "") def test_set_cancel_callback(mocker): mock_runner = mocker.patch('ansible_runner.interface.Runner', side_effect=AttributeError('Raised intentionally')) - mock_runner_config = mocker.patch('ansible_runner.interface.RunnerConfig') - mock_runner_config.prepare.return_value = None + mock_runner_config_prepare = mocker.patch('ansible_runner.interface.RunnerConfig.prepare') + mock_runner_config_prepare.return_value = None def custom_cancel_callback(): - return 'custom' + return True with pytest.raises(AttributeError, match='Raised intentionally'): - init_runner(ignore_logging=True, cancel_callback=custom_cancel_callback) + rc = RunnerConfig(cancel_callback=custom_cancel_callback) + init_runner(rc, "") assert mock_runner.call_args.kwargs['cancel_callback'] is custom_cancel_callback diff --git a/test/unit/test_streaming.py b/test/unit/test_streaming.py index c185cf0e4..dd10d0823 100644 --- a/test/unit/test_streaming.py +++ b/test/unit/test_streaming.py @@ -1,5 +1,6 @@ import os +from ansible_runner.config.runner import RunnerConfig from ansible_runner.streaming import Processor @@ -10,7 +11,8 @@ def test_artifact_dir_with_int_ident(self, tmp_path): 'private_data_dir': str(tmp_path), 'ident': 123, } - p = Processor(**kwargs) + rc = RunnerConfig(**kwargs) + p = Processor(rc) assert p.artifact_dir == os.path.join(kwargs['private_data_dir'], 'artifacts', str(kwargs['ident'])) diff --git a/test/unit/test_utils.py b/test/unit/test_utils.py index 6ad09a6cf..c45f94cf1 100644 --- a/test/unit/test_utils.py +++ b/test/unit/test_utils.py @@ -1,7 +1,7 @@ import os import stat -from ansible_runner.utils import dump_artifact +from ansible_runner._internal._dump_artifacts import dump_artifact def test_artifact_permissions(tmp_path): diff --git a/test/unit/utils/test_dump_artifacts.py b/test/unit/utils/test_dump_artifacts.py index ef5cfb400..8b8c30565 100644 --- a/test/unit/utils/test_dump_artifacts.py +++ b/test/unit/utils/test_dump_artifacts.py @@ -1,27 +1,7 @@ import pytest -from ansible_runner.utils import dump_artifacts - - -def test_dump_artifacts_private_data_dir_does_not_exists(): - data_dir = '/not/a/path' - kwargs = {'private_data_dir': data_dir} - - with pytest.raises(ValueError, match='invalid or does not exist'): - dump_artifacts(kwargs) - - assert kwargs['private_data_dir'] == data_dir - - -def test_dump_artifacts_private_data_dir_create_tempfile(mocker): - mocker.patch('ansible_runner.utils.os.path.exists', side_effect=AttributeError('Raised intentionally')) - mocker.patch('ansible_runner.utils.tempfile.mkdtemp', return_value='/tmp/dir') - - kwargs = {} - with pytest.raises(AttributeError, match='Raised intentionally'): - dump_artifacts(kwargs) - - assert kwargs['private_data_dir'] == '/tmp/dir' +from ansible_runner.config.runner import RunnerConfig +from ansible_runner._internal._dump_artifacts import dump_artifacts @pytest.mark.parametrize( @@ -31,20 +11,20 @@ def test_dump_artifacts_private_data_dir_create_tempfile(mocker): ) ) def test_dump_artifacts_playbook_object(mocker, playbook): - mock_dump_artifact = mocker.patch('ansible_runner.utils.dump_artifact', side_effect=AttributeError('Raised intentionally')) + mock_dump_artifact = mocker.patch('ansible_runner._internal._dump_artifacts.dump_artifact', side_effect=AttributeError('Raised intentionally')) mocker.patch('ansible_runner.utils.isplaybook', return_value=True) playbook_string = '[{"playbook": [{"hosts": "all"}]}]' kwargs = {'private_data_dir': '/tmp', 'playbook': playbook} with pytest.raises(AttributeError, match='Raised intentionally'): - dump_artifacts(kwargs) + dump_artifacts(RunnerConfig(**kwargs)) mock_dump_artifact.assert_called_once_with(playbook_string, '/tmp/project', 'main.json') def test_dump_artifacts_role(mocker): - mock_dump_artifact = mocker.patch('ansible_runner.utils.dump_artifact') + mock_dump_artifact = mocker.patch('ansible_runner._internal._dump_artifacts.dump_artifact') kwargs = { 'private_data_dir': '/tmp', @@ -52,14 +32,14 @@ def test_dump_artifacts_role(mocker): 'playbook': [{'playbook': [{'hosts': 'all'}]}], } - dump_artifacts(kwargs) + dump_artifacts(RunnerConfig(**kwargs)) assert mock_dump_artifact.call_count == 2 mock_dump_artifact.assert_called_with('{"ANSIBLE_ROLES_PATH": "/tmp/roles"}', '/tmp/env', 'envvars') def test_dump_artifacts_roles_path(mocker): - mock_dump_artifact = mocker.patch('ansible_runner.utils.dump_artifact') + mock_dump_artifact = mocker.patch('ansible_runner._internal._dump_artifacts.dump_artifact') kwargs = { 'private_data_dir': '/tmp', @@ -68,14 +48,14 @@ def test_dump_artifacts_roles_path(mocker): 'playbook': [{'playbook': [{'hosts': 'all'}]}], } - dump_artifacts(kwargs) + dump_artifacts(RunnerConfig(**kwargs)) assert mock_dump_artifact.call_count == 2 mock_dump_artifact.assert_called_with('{"ANSIBLE_ROLES_PATH": "/tmp/altrole:/tmp/roles"}', '/tmp/env', 'envvars') def test_dump_artifacts_role_vars(mocker): - mock_dump_artifact = mocker.patch('ansible_runner.utils.dump_artifact', side_effect=AttributeError('Raised intentionally')) + mock_dump_artifact = mocker.patch('ansible_runner._internal._dump_artifacts.dump_artifact', side_effect=AttributeError('Raised intentionally')) kwargs = { 'private_data_dir': '/tmp', @@ -85,7 +65,7 @@ def test_dump_artifacts_role_vars(mocker): } with pytest.raises(AttributeError, match='Raised intentionally'): - dump_artifacts(kwargs) + dump_artifacts(RunnerConfig(**kwargs)) mock_dump_artifact.assert_called_once_with( '[{"hosts": "all", "roles": [{"name": "test", "vars": {"name": "nginx"}}]}]', @@ -95,7 +75,7 @@ def test_dump_artifacts_role_vars(mocker): def test_dump_artifacts_role_skip_facts(mocker): - mock_dump_artifact = mocker.patch('ansible_runner.utils.dump_artifact', side_effect=AttributeError('Raised intentionally')) + mock_dump_artifact = mocker.patch('ansible_runner._internal._dump_artifacts.dump_artifact', side_effect=AttributeError('Raised intentionally')) kwargs = { 'private_data_dir': '/tmp', @@ -105,7 +85,7 @@ def test_dump_artifacts_role_skip_facts(mocker): } with pytest.raises(AttributeError, match='Raised intentionally'): - dump_artifacts(kwargs) + dump_artifacts(RunnerConfig(**kwargs)) mock_dump_artifact.assert_called_once_with( '[{"hosts": "all", "roles": [{"name": "test"}], "gather_facts": false}]', @@ -115,21 +95,21 @@ def test_dump_artifacts_role_skip_facts(mocker): def test_dump_artifacts_inventory_string(mocker): - mock_dump_artifact = mocker.patch('ansible_runner.utils.dump_artifact') + mock_dump_artifact = mocker.patch('ansible_runner._internal._dump_artifacts.dump_artifact') inv = '[all]\nlocalhost' kwargs = {'private_data_dir': '/tmp', 'inventory': inv} - dump_artifacts(kwargs) + dump_artifacts(RunnerConfig(**kwargs)) mock_dump_artifact.assert_called_once_with(inv, '/tmp/inventory', 'hosts') def test_dump_artifacts_inventory_path(mocker): - mock_dump_artifact = mocker.patch('ansible_runner.utils.dump_artifact') + mock_dump_artifact = mocker.patch('ansible_runner._internal._dump_artifacts.dump_artifact') inv = '/tmp' kwargs = {'private_data_dir': '/tmp', 'inventory': inv} - dump_artifacts(kwargs) + dump_artifacts(RunnerConfig(**kwargs)) assert mock_dump_artifact.call_count == 0 assert mock_dump_artifact.called is False @@ -137,12 +117,12 @@ def test_dump_artifacts_inventory_path(mocker): def test_dump_artifacts_inventory_object(mocker): - mock_dump_artifact = mocker.patch('ansible_runner.utils.dump_artifact') + mock_dump_artifact = mocker.patch('ansible_runner._internal._dump_artifacts.dump_artifact') inv = {'foo': 'bar'} inv_string = '{"foo": "bar"}' kwargs = {'private_data_dir': '/tmp', 'inventory': inv} - dump_artifacts(kwargs) + dump_artifacts(RunnerConfig(**kwargs)) mock_dump_artifact.assert_called_once_with(inv_string, '/tmp/inventory', 'hosts.json') @@ -152,9 +132,10 @@ def test_dump_artifacts_inventory_string_path(mocker): inv_string = 'site1' kwargs = {'private_data_dir': '/tmp', 'inventory': inv_string} - dump_artifacts(kwargs) + rc = RunnerConfig(**kwargs) + dump_artifacts(rc) - assert kwargs['inventory'] == '/tmp/inventory/site1' + assert rc.inventory == '/tmp/inventory/site1' def test_dump_artifacts_inventory_string_abs_path(mocker): @@ -162,13 +143,14 @@ def test_dump_artifacts_inventory_string_abs_path(mocker): inv_string = '/tmp/site1' kwargs = {'private_data_dir': '/tmp', 'inventory': inv_string} - dump_artifacts(kwargs) + rc = RunnerConfig(**kwargs) + dump_artifacts(rc) - assert kwargs['inventory'] == '/tmp/site1' + assert rc.inventory == '/tmp/site1' def test_dump_artifacts_passwords(mocker): - mock_dump_artifact = mocker.patch('ansible_runner.utils.dump_artifact') + mock_dump_artifact = mocker.patch('ansible_runner._internal._dump_artifacts.dump_artifact') kwargs = { 'private_data_dir': '/tmp', @@ -177,7 +159,7 @@ def test_dump_artifacts_passwords(mocker): 'ssh_key': 'asdfg1234', } - dump_artifacts(kwargs) + dump_artifacts(RunnerConfig(**kwargs)) assert mock_dump_artifact.call_count == 3 mock_dump_artifact.assert_any_call('{"a": "b"}', '/tmp/env', 'passwords') @@ -186,7 +168,7 @@ def test_dump_artifacts_passwords(mocker): def test_dont_dump_artifacts_passwords(mocker): - mock_dump_artifact = mocker.patch('ansible_runner.utils.dump_artifact') + mock_dump_artifact = mocker.patch('ansible_runner._internal._dump_artifacts.dump_artifact') kwargs = { 'private_data_dir': '/tmp', @@ -196,7 +178,7 @@ def test_dont_dump_artifacts_passwords(mocker): 'suppress_env_files': True } - dump_artifacts(kwargs) + dump_artifacts(RunnerConfig(**kwargs)) assert mock_dump_artifact.call_count == 0 @@ -211,12 +193,12 @@ def test_dont_dump_artifacts_passwords(mocker): ) ) def test_dump_artifacts_extra_keys(mocker, key, value, value_str): - mock_dump_artifact = mocker.patch('ansible_runner.utils.dump_artifact') + mock_dump_artifact = mocker.patch('ansible_runner._internal._dump_artifacts.dump_artifact') kwargs = {'private_data_dir': '/tmp'} kwargs.update({key: value}) - dump_artifacts(kwargs) + rc = RunnerConfig(**kwargs) + dump_artifacts(rc) mock_dump_artifact.assert_called_once_with(value_str, '/tmp/env', key) - assert 'settings' not in kwargs