diff --git a/lib/ansible/executor/task_executor.py b/lib/ansible/executor/task_executor.py index 2488fa7471..c211a5b6b1 100644 --- a/lib/ansible/executor/task_executor.py +++ b/lib/ansible/executor/task_executor.py @@ -517,6 +517,7 @@ class TaskExecutor: if self._task.async > 0: if self._task.poll > 0 and not result.get('skipped'): result = self._poll_async_result(result=result, templar=templar, task_vars=vars_copy) + #FIXME callback 'v2_runner_on_async_poll' here # ensure no log is preserved result["_ansible_no_log"] = self._play_context.no_log @@ -757,15 +758,12 @@ class TaskExecutor: Returns the correct action plugin to handle the requestion task action ''' + # let action plugin override module, fallback to 'normal' action plugin otherwise if self._task.action in self._shared_loader_obj.action_loader: - if self._task.async != 0: - raise AnsibleError("async mode is not supported with the %s module" % self._task.action) handler_name = self._task.action - elif self._task.async == 0: + else: pc_conn = self._shared_loader_obj.connection_loader.get(self._play_context.connection, class_only=True) handler_name = getattr(pc_conn, 'action_handler', 'normal') - else: - handler_name = 'async' handler = self._shared_loader_obj.action_loader.get( handler_name, diff --git a/lib/ansible/modules/utilities/logic/async_wrapper.py b/lib/ansible/modules/utilities/logic/async_wrapper.py index ae23d19a4a..a1f493c17a 100644 --- a/lib/ansible/modules/utilities/logic/async_wrapper.py +++ b/lib/ansible/modules/utilities/logic/async_wrapper.py @@ -253,7 +253,7 @@ if __name__ == '__main__': # the argsfile at the very first start of their execution anyway notice("Return async_wrapper task started.") print(json.dumps({ "started" : 1, "finished" : 0, "ansible_job_id" : jid, "results_file" : job_path, - "_suppress_tmpdir_delete": not preserve_tmp})) + "_ansible_suppress_tmpdir_delete": not preserve_tmp})) sys.stdout.flush() time.sleep(1) sys.exit(0) diff --git a/lib/ansible/modules/windows/async_wrapper.ps1 b/lib/ansible/modules/windows/async_wrapper.ps1 index a79a6d6bb1..b89edfd69c 100644 --- a/lib/ansible/modules/windows/async_wrapper.ps1 +++ b/lib/ansible/modules/windows/async_wrapper.ps1 @@ -435,7 +435,7 @@ $result = @{ finished=0; results_file=$results_path; ansible_job_id=$local_jid; - _suppress_tmpdir_delete=$true; + _ansible_suppress_tmpdir_delete=$true; ansible_async_watchdog_pid=$watchdog_pid } diff --git a/lib/ansible/playbook/task.py b/lib/ansible/playbook/task.py index ec89fc51e9..39a5316654 100644 --- a/lib/ansible/playbook/task.py +++ b/lib/ansible/playbook/task.py @@ -80,7 +80,7 @@ class Task(Base, Conditional, Taggable, Become): _loop_control = FieldAttribute(isa='class', class_type=LoopControl, inherit=False) _name = FieldAttribute(isa='string', default='') _notify = FieldAttribute(isa='list') - _poll = FieldAttribute(isa='int') + _poll = FieldAttribute(isa='int', default=10) _register = FieldAttribute(isa='string') _retries = FieldAttribute(isa='int') _until = FieldAttribute(isa='list', default=[]) diff --git a/lib/ansible/plugins/action/__init__.py b/lib/ansible/plugins/action/__init__.py index ac872679a4..e6bd46995b 100644 --- a/lib/ansible/plugins/action/__init__.py +++ b/lib/ansible/plugins/action/__init__.py @@ -67,9 +67,10 @@ class ActionBase(with_metaclass(ABCMeta, object)): self._shared_loader_obj = shared_loader_obj # Backwards compat: self._display isn't really needed, just import the global display and use that. self._display = display - self._cleanup_remote_tmp = False + self._supports_check_mode = True + self._supports_async = False @abstractmethod def run(self, tmp=None, task_vars=None): @@ -88,14 +89,20 @@ class ActionBase(with_metaclass(ABCMeta, object)): * Module parameters. These are stored in self._task.args """ - # store the module invocation details into the results - results = {} - if self._task.async == 0: - results['invocation'] = dict( - module_name = self._task.action, - module_args = self._task.args, - ) - return results + + result = {'skipped': True} + + if self._task.async and not self._supports_async: + result['msg'] = 'async is not supported for this task.' + elif self._play_context.check_mode and not self._supports_check_mode: + result['msg'] = 'check mode is not supported for this task.' + elif self._task.async and self._play_context.check_mode: + result['msg'] = 'check mode and async cannot be used on same task.' + else: + # we run! + del result['skipped'] + + return result def _remote_file_exists(self, path): cmd = self._connection._shell.exists(path) @@ -189,27 +196,33 @@ class ActionBase(with_metaclass(ABCMeta, object)): return getattr(self, 'TRANSFERS_FILES', False) - def _late_needs_tmp_path(self, tmp, module_style): + def _is_pipelining_enabled(self, module_style, wrap_async=False): ''' - Determines if a temp path is required after some early actions have already taken place. + Determines if we are required and can do pipelining ''' - if tmp and "tmp" in tmp: - # tmp has already been created - return False - if not self._connection.has_pipelining or not self._play_context.pipelining or C.DEFAULT_KEEP_REMOTE_FILES or self._play_context.become_method == 'su': - # tmp is necessary to store the module source code - # or we want to keep the files on the target system - return True - if module_style != "new": - # even when conn has pipelining, old style modules need tmp to store arguments - return True - return False + # any of these require a true + for condition in [ + self._connection.has_pipelining, + self._play_context.pipelining, + module_style == "new", # old style modules do not support pipelining + not C.DEFAULT_KEEP_REMOTE_FILES, # user wants remote files + not wrap_async, # async does not support pipelining + self._play_context.become_method != 'su', # su does not work with pipelining, + # FIXME: we might need to make become_method exclusion a configurable list + ]: + if not condition: + return False - def _make_tmp_path(self, remote_user): + return True + + def _make_tmp_path(self, remote_user=None): ''' Create and return a temporary path on a remote box. ''' + if remote_user is None: + remote_user = self._play_context.remote_user + basefile = 'ansible-tmp-%s-%s' % (time.time(), random.randint(0, 2**48)) use_system_tmp = False @@ -248,6 +261,8 @@ class ActionBase(with_metaclass(ABCMeta, object)): if 'stdout' in result and result['stdout'] != u'': output = output + u": %s" % result['stdout'] raise AnsibleConnectionFailure(output) + else: + self._cleanup_remote_tmp = True try: stdout_parts = result['stdout'].strip().split('%s=' % basefile, 1) @@ -275,7 +290,12 @@ class ActionBase(with_metaclass(ABCMeta, object)): cmd = self._connection._shell.remove(tmp_path, recurse=True) # If we have gotten here we have a working ssh configuration. # If ssh breaks we could leave tmp directories out on the remote system. - self._low_level_execute_command(cmd, sudoable=False) + tmp_rm_res = self._low_level_execute_command(cmd, sudoable=False) + + tmp_rm_data = self._parse_returned_data(tmp_rm_res) + if tmp_rm_data.get('rc', 0) != 0: + display.warning('Error deleting remote temporary files (rc: {0}, stderr: {1})'.format(tmp_rm_res.get('rc'), + tmp_rm_res.get('stderr', 'No error string available.'))) def _transfer_file(self, local_path, remote_path): self._connection.put_file(local_path, remote_path) @@ -307,7 +327,7 @@ class ActionBase(with_metaclass(ABCMeta, object)): return remote_path - def _fixup_perms(self, remote_path, remote_user, execute=True, recursive=True): + def _fixup_perms(self, remote_path, remote_user=None, execute=True, recursive=True): """ We need the files we upload to be readable (and sometimes executable) by the user being sudo'd to but we want to limit other people's access @@ -319,6 +339,8 @@ class ActionBase(with_metaclass(ABCMeta, object)): for custom actions (non-recursive mode only). """ + if remote_user is None: + remote_user = self._play_context.remote_user display.deprecated('_fixup_perms is deprecated. Use _fixup_perms2 instead.', version='2.4', removed=False) @@ -329,7 +351,7 @@ class ActionBase(with_metaclass(ABCMeta, object)): return self._fixup_perms2([remote_path], remote_user, execute) - def _fixup_perms2(self, remote_paths, remote_user, execute=True): + def _fixup_perms2(self, remote_paths, remote_user=None, execute=True): """ We need the files we upload to be readable (and sometimes executable) by the user being sudo'd to but we want to limit other people's access @@ -352,6 +374,9 @@ class ActionBase(with_metaclass(ABCMeta, object)): information we only do this ansible is configured with "allow_world_readable_tmpfiles" in the ansible.cfg """ + if remote_user is None: + remote_user = self._play_context.remote_user + if self._connection._shell.SHELL_FAMILY == 'powershell': # This won't work on Powershell as-is, so we'll just completely skip until # we have a need for it, at which point we'll have to do something different. @@ -403,12 +428,11 @@ class ActionBase(with_metaclass(ABCMeta, object)): ' (rc: {0}, err: {1}). For information on working around this,' ' see https://docs.ansible.com/ansible/become.html#becoming-an-unprivileged-user'.format(res['rc'], to_native(res['stderr']))) elif execute: - # Can't depend on the file being transferred with execute - # permissions. Only need user perms because no become was - # used here + # Can't depend on the file being transferred with execute permissions. + # Only need user perms because no become was used here res = self._remote_chmod(remote_paths, 'u+x') if res['rc'] != 0: - raise AnsibleError('Failed to set file mode on remote files (rc: {0}, err: {1})'.format(res['rc'], to_native(res['stderr']))) + raise AnsibleError('Failed to set execute bit on remote files (rc: {0}, err: {1})'.format(res['rc'], to_native(res['stderr']))) return remote_paths @@ -447,7 +471,7 @@ class ActionBase(with_metaclass(ABCMeta, object)): get_checksum=True, checksum_algo='sha1', ) - mystat = self._execute_module(module_name='stat', module_args=module_args, task_vars=all_vars, tmp=tmp, delete_remote_tmp=(tmp is None)) + mystat = self._execute_module(module_name='stat', module_args=module_args, task_vars=all_vars, tmp=tmp, delete_remote_tmp=(tmp is None), wrap_async=False) if mystat.get('failed'): msg = mystat.get('module_stderr') @@ -479,7 +503,7 @@ class ActionBase(with_metaclass(ABCMeta, object)): 3 = its a directory, not a file 4 = stat module failed, likely due to not finding python ''' - x = "0" # unknown error has occurred + x = "0" # unknown error has occured try: remote_stat = self._execute_remote_stat(path, all_vars, follow=follow) if remote_stat['exists'] and remote_stat['isdir']: @@ -562,23 +586,25 @@ class ActionBase(with_metaclass(ABCMeta, object)): # let module know about filesystems that selinux treats specially module_args['_ansible_selinux_special_fs'] = C.DEFAULT_SELINUX_SPECIAL_FS - def _execute_module(self, module_name=None, module_args=None, tmp=None, task_vars=None, persist_files=False, delete_remote_tmp=True): + + + def _execute_module(self, module_name=None, module_args=None, tmp=None, task_vars=None, persist_files=False, delete_remote_tmp=True, wrap_async=False): ''' Transfer and run a module along with its arguments. ''' if task_vars is None: task_vars = dict() - # if a module name was not specified for this execution, use - # the action from the task + remote_module_path = None + args_file_path = None + remote_files = [] + + # if a module name was not specified for this execution, use the action from the task if module_name is None: module_name = self._task.action if module_args is None: module_args = self._task.args - # Get the connection user for permission checks - remote_user = task_vars.get('ansible_ssh_user') or self._play_context.remote_user - self._update_module_args(module_name, module_args, task_vars) (module_style, shebang, module_data, module_path) = self._configure_module(module_name=module_name, module_args=module_args, task_vars=task_vars) @@ -586,23 +612,18 @@ class ActionBase(with_metaclass(ABCMeta, object)): if not shebang and module_style != 'binary': raise AnsibleError("module (%s) is missing interpreter line" % module_name) - # a remote tmp path may be necessary and not already created - remote_module_path = None - args_file_path = None - if not tmp and self._late_needs_tmp_path(tmp, module_style): - tmp = self._make_tmp_path(remote_user) + if not self._is_pipelining_enabled(module_style, wrap_async): + + # we might need remote tmp dir + if not tmp or not 'tmp' in tmp: + tmp = self._make_tmp_path() - if tmp and \ - (module_style != 'new' or \ - not self._connection.has_pipelining or \ - not self._play_context.pipelining or \ - C.DEFAULT_KEEP_REMOTE_FILES or \ - self._play_context.become_method == 'su'): remote_module_filename = self._connection._shell.get_remote_filename(module_path) remote_module_path = self._connection._shell.join_path(tmp, remote_module_filename) - if module_style in ('old', 'non_native_want_json', 'binary'): - # we'll also need a temp file to hold our module arguments - args_file_path = self._connection._shell.join_path(tmp, 'args') + + if module_style in ('old', 'non_native_want_json', 'binary'): + # we'll also need a temp file to hold our module arguments + args_file_path = self._connection._shell.join_path(tmp, 'args') if remote_module_path or module_style != 'new': display.debug("transferring module to remote %s" % remote_module_path) @@ -623,67 +644,101 @@ class ActionBase(with_metaclass(ABCMeta, object)): environment_string = self._compute_environment_string() - remote_files = None + if tmp and remote_module_path: + remote_files = [tmp, remote_module_path] if args_file_path: - remote_files = tmp, remote_module_path, args_file_path - elif remote_module_path: - remote_files = tmp, remote_module_path - - # Fix permissions of the tmp path and tmp files. This should be - # called after all files have been transferred. - if remote_files: - self._fixup_perms2(remote_files, remote_user) - - cmd = "" - in_data = None - - if self._connection.has_pipelining and self._play_context.pipelining and not C.DEFAULT_KEEP_REMOTE_FILES and module_style == 'new': - in_data = module_data - else: - if remote_module_path: - cmd = remote_module_path - - rm_tmp = None - if tmp and "tmp" in tmp and not C.DEFAULT_KEEP_REMOTE_FILES and not persist_files and delete_remote_tmp: - if not self._play_context.become or self._play_context.become_user == 'root': - # not sudoing or sudoing to root, so can cleanup files in the same step - rm_tmp = tmp - - cmd = self._connection._shell.build_module_command(environment_string, shebang, cmd, arg_path=args_file_path, rm_tmp=rm_tmp) - cmd = cmd.strip() + remote_files.append(args_file_path) sudoable = True - if module_name == "accelerate": - # always run the accelerate module as the user - # specified in the play, not the sudo_user - sudoable = False + in_data = None + cmd = "" + if wrap_async: + # configure, upload, and chmod the async_wrapper module + (async_module_style, shebang, async_module_data, async_module_path) = self._configure_module(module_name='async_wrapper', module_args=dict(), task_vars=task_vars) + async_module_remote_filename = self._connection._shell.get_remote_filename(async_module_path) + remote_async_module_path = self._connection._shell.join_path(tmp, async_module_remote_filename) + self._transfer_data(remote_async_module_path, async_module_data) + remote_files.append(remote_async_module_path) + + async_limit = self._task.async + async_jid = str(random.randint(0, 999999999999)) + + # call the interpreter for async_wrapper directly + # this permits use of a script for an interpreter on non-Linux platforms + # TODO: re-implement async_wrapper as a regular module to avoid this special case + interpreter = shebang.replace('#!', '').strip() + async_cmd = [interpreter, remote_async_module_path, async_jid, async_limit, remote_module_path] + + if environment_string: + async_cmd.insert(0, environment_string) + + if args_file_path: + async_cmd.append(args_file_path) + else: + # maintain a fixed number of positional parameters for async_wrapper + async_cmd.append('_') + + if not self._should_remove_tmp_path(tmp): + async_cmd.append("-preserve_tmp") + + cmd= " ".join(to_text(x) for x in async_cmd) + + else: + + if self._is_pipelining_enabled(module_style): + in_data = module_data + else: + cmd = remote_module_path + + rm_tmp = None + + if self._should_remove_tmp_path(tmp) and not persist_files and delete_remote_tmp: + if not self._play_context.become or self._play_context.become_user == 'root': + # not sudoing or sudoing to root, so can cleanup files in the same step + rm_tmp = tmp + + cmd = self._connection._shell.build_module_command(environment_string, shebang, cmd, arg_path=args_file_path, rm_tmp=rm_tmp).strip() + + if module_name == "accelerate": + # always run the accelerate module as the user + # specified in the play, not the sudo_user + sudoable = False + + # Fix permissions of the tmp path and tmp files. This should be called after all files have been transferred. + if remote_files: + # remove none/empty + remote_files = [ x for x in remote_files if x] + self._fixup_perms2(remote_files, self._play_context.remote_user) + + # actually execute res = self._low_level_execute_command(cmd, sudoable=sudoable, in_data=in_data) - if tmp and "tmp" in tmp and not C.DEFAULT_KEEP_REMOTE_FILES and not persist_files and delete_remote_tmp: - if self._play_context.become and self._play_context.become_user != 'root': - # not sudoing to root, so maybe can't delete files as that other user - # have to clean up temp files as original user in a second step - tmp_rm_cmd = self._connection._shell.remove(tmp, recurse=True) - tmp_rm_res = self._low_level_execute_command(tmp_rm_cmd, sudoable=False) - tmp_rm_data = self._parse_returned_data(tmp_rm_res) - if tmp_rm_data.get('rc', 0) != 0: - display.warning('Error deleting remote temporary files (rc: {0}, stderr: {1})'.format(tmp_rm_res.get('rc'), - tmp_rm_res.get('stderr', 'No error string available.'))) - - # parse the main result + # parse the main result, also cleans up internal keys data = self._parse_returned_data(res) - # pre-split stdout into lines, if stdout is in the data and there - # isn't already a stdout_lines value there + #NOTE: INTERNAL KEYS ONLY ACCESSIBLE HERE + # get internal info before cleaning + tmpdir_delete = (not data.pop("_ansible_suppress_tmpdir_delete", False) and wrap_async) + + # remove internal keys + self._remove_internal_keys(data) + data['_ansible_parsed'] = True + + # cleanup tmp? + if (self._play_context.become and self._play_context.become_user != 'root') and not persist_files and delete_remote_tmp or tmpdir_delete: + self._remove_tmp_path(tmp) + + #FIXME: for backwards compat, figure out if still makes sense + if wrap_async: + data['changed'] = True + + # pre-split stdout/stderr into lines if needed if 'stdout' in data and 'stdout_lines' not in data: data['stdout_lines'] = data.get('stdout', u'').splitlines() - - # remove bad/empty internal keys - for key in ['warnings', 'deprecations']: - if key in data and not data[key]: - del data[key] + if 'stderr' in data and 'stderr_lines' not in data: + data['stderr_lines'] = data.get('stderr', u'').splitlines() display.debug("done with _execute_module (%s, %s)" % (module_name, module_args)) return data @@ -694,6 +749,12 @@ class ActionBase(with_metaclass(ABCMeta, object)): display.warning("Removed unexpected internal key in module return: %s = %s" % (key, data[key])) del data[key] + # remove bad/empty internal keys + for key in ['warnings', 'deprecations']: + if key in data and not data[key]: + del data[key] + + def _clean_returned_data(self, data): remove_keys = set() fact_keys = set(data.keys()) @@ -737,8 +798,6 @@ class ActionBase(with_metaclass(ABCMeta, object)): display.warning(w) data = json.loads(filtered_output) - self._remove_internal_keys(data) - data['_ansible_parsed'] = True if 'ansible_facts' in data and isinstance(data['ansible_facts'], dict): self._clean_returned_data(data['ansible_facts']) diff --git a/lib/ansible/plugins/action/add_host.py b/lib/ansible/plugins/action/add_host.py index 2b720f3d97..b253c22865 100644 --- a/lib/ansible/plugins/action/add_host.py +++ b/lib/ansible/plugins/action/add_host.py @@ -41,14 +41,12 @@ class ActionModule(ActionBase): TRANSFERS_FILES = False def run(self, tmp=None, task_vars=None): - if task_vars is None: - task_vars = dict() + + self._supports_check_mode = False result = super(ActionModule, self).run(tmp, task_vars) - if self._play_context.check_mode: - result['skipped'] = True - result['msg'] = 'check mode not supported for this module' + if result.get('skipped', False): return result # Parse out any hostname:port patterns diff --git a/lib/ansible/plugins/action/assemble.py b/lib/ansible/plugins/action/assemble.py index 979d37f3d4..1121bb937f 100644 --- a/lib/ansible/plugins/action/assemble.py +++ b/lib/ansible/plugins/action/assemble.py @@ -79,16 +79,17 @@ class ActionModule(ActionBase): return temp_path def run(self, tmp=None, task_vars=None): - if task_vars is None: - task_vars = dict() + + self._supports_check_mode = False result = super(ActionModule, self).run(tmp, task_vars) - if self._play_context.check_mode: - result['skipped'] = True - result['msg'] = "skipped, this module does not support check_mode." + if result.get('skipped', False): return result + if task_vars is None: + task_vars = dict() + src = self._task.args.get('src', None) dest = self._task.args.get('dest', None) delimiter = self._task.args.get('delimiter', None) @@ -102,7 +103,6 @@ class ActionModule(ActionBase): result['msg'] = "src and dest are required" return result - remote_user = self._play_context.remote_user if boolean(remote_src): result.update(self._execute_module(tmp=tmp, task_vars=task_vars)) return result @@ -115,8 +115,7 @@ class ActionModule(ActionBase): return result if not tmp: - tmp = self._make_tmp_path(remote_user) - self._cleanup_remote_tmp = True + tmp = self._make_tmp_path() if not os.path.isdir(src): result['failed'] = True @@ -160,7 +159,7 @@ class ActionModule(ActionBase): xfered = self._transfer_file(path, remote_path) # fix file permissions when the copy is done as a different user - self._fixup_perms2((tmp, remote_path), remote_user) + self._fixup_perms2((tmp, remote_path)) new_module_args.update( dict( src=xfered,)) diff --git a/lib/ansible/plugins/action/async.py b/lib/ansible/plugins/action/async.py deleted file mode 100644 index e7c5661852..0000000000 --- a/lib/ansible/plugins/action/async.py +++ /dev/null @@ -1,129 +0,0 @@ -# (c) 2012-2014, Michael DeHaan -# -# This file is part of Ansible -# -# Ansible is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# Ansible is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with Ansible. If not, see . -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -import json -import random - -from ansible import constants as C -from ansible.compat.six import iteritems -from ansible.compat.six.moves import shlex_quote -from ansible.module_utils._text import to_text -from ansible.plugins.action import ActionBase - - -class ActionModule(ActionBase): - - def run(self, tmp=None, task_vars=None): - ''' transfer the given module name, plus the async module, then run it ''' - if task_vars is None: - task_vars = dict() - - result = super(ActionModule, self).run(tmp, task_vars) - - if self._play_context.check_mode: - result['skipped'] = True - result['msg'] = 'check mode not supported for this module' - return result - - remote_user = task_vars.get('ansible_ssh_user') or self._play_context.remote_user - if not tmp: - tmp = self._make_tmp_path(remote_user) - self._cleanup_remote_tmp=True - - module_name = self._task.action - - env_string = self._compute_environment_string() - - module_args = self._task.args.copy() - if self._play_context.no_log or C.DEFAULT_NO_TARGET_SYSLOG: - module_args['_ansible_no_log'] = True - - # configure, upload, and chmod the target module - (module_style, shebang, module_data, module_path) = self._configure_module(module_name=module_name, module_args=module_args, task_vars=task_vars) - remote_module_filename = self._connection._shell.get_remote_filename(module_path) - remote_module_path = self._connection._shell.join_path(tmp, remote_module_filename) - if module_style == 'binary': - self._transfer_file(module_path, remote_module_path) - else: - self._transfer_data(remote_module_path, module_data) - - # configure, upload, and chmod the async_wrapper module - (async_module_style, shebang, async_module_data, async_module_path) = self._configure_module(module_name='async_wrapper', module_args=dict(), task_vars=task_vars) - async_module_remote_filename = self._connection._shell.get_remote_filename(async_module_path) - remote_async_module_path = self._connection._shell.join_path(tmp, async_module_remote_filename) - self._transfer_data(remote_async_module_path, async_module_data) - - argsfile = None - if module_style in ('non_native_want_json', 'binary'): - argsfile = self._transfer_data(self._connection._shell.join_path(tmp, 'arguments'), json.dumps(module_args)) - elif module_style == 'old': - args_data = "" - for k, v in iteritems(module_args): - args_data += '%s="%s" ' % (k, shlex_quote(to_text(v))) - argsfile = self._transfer_data(self._connection._shell.join_path(tmp, 'arguments'), args_data) - - remote_paths = tmp, remote_module_path, remote_async_module_path - - # argsfile doesn't need to be executable, but this saves an extra call to the remote host - if argsfile: - remote_paths += argsfile, - - self._fixup_perms2(remote_paths, remote_user, execute=True) - - async_limit = self._task.async - async_jid = str(random.randint(0, 999999999999)) - - # call the interpreter for async_wrapper directly - # this permits use of a script for an interpreter on non-Linux platforms - # TODO: re-implement async_wrapper as a regular module to avoid this special case - interpreter = shebang.replace('#!', '').strip() - async_cmd = [interpreter, remote_async_module_path, async_jid, async_limit, remote_module_path] - - if env_string: - async_cmd.insert(0, env_string) - - if argsfile: - async_cmd.append(argsfile) - else: - # maintain a fixed number of positional parameters for async_wrapper - async_cmd.append('_') - - if not self._should_remove_tmp_path(tmp): - async_cmd.append("-preserve_tmp") - - async_cmd = " ".join(to_text(x) for x in async_cmd) - result.update(self._low_level_execute_command(cmd=async_cmd)) - - result['changed'] = True - - # the async_wrapper module returns dumped JSON via its stdout - # response, so we (attempt to) parse it here - parsed_result = self._parse_returned_data(result) - - # Delete tmpdir from controller unless async_wrapper says something else will do it. - # Windows cannot request deletion of files/directories that are in use, so the async - # supervisory process has to be responsible for it. - if parsed_result.get("_suppress_tmpdir_delete", False) != True: - self._remove_tmp_path(tmp) - - # just return the original result - if 'skipped' in result and result['skipped'] or 'failed' in result and result['failed']: - return result - - return parsed_result diff --git a/lib/ansible/plugins/action/copy.py b/lib/ansible/plugins/action/copy.py index 8633693e81..dd4d337071 100644 --- a/lib/ansible/plugins/action/copy.py +++ b/lib/ansible/plugins/action/copy.py @@ -40,6 +40,9 @@ class ActionModule(ActionBase): result = super(ActionModule, self).run(tmp, task_vars) + if result.get('skipped'): + return result + source = self._task.args.get('src', None) content = self._task.args.get('content', None) dest = self._task.args.get('dest', None) @@ -48,17 +51,17 @@ class ActionModule(ActionBase): remote_src = boolean(self._task.args.get('remote_src', False)) follow = boolean(self._task.args.get('follow', False)) + result['failed'] = True if (source is None and content is None) or dest is None: - result['failed'] = True result['msg'] = "src (or content) and dest are required" - return result elif source is not None and content is not None: - result['failed'] = True result['msg'] = "src and content are mutually exclusive" - return result elif content is not None and dest is not None and dest.endswith("/"): - result['failed'] = True result['msg'] = "dest must be a file if content is defined" + else: + del result['failed'] + + if result.get('failed'): return result # Check if the source ends with a "/" @@ -87,7 +90,7 @@ class ActionModule(ActionBase): # if we have first_available_file in our vars # look up the files and use the first one we find as src elif remote_src: - result.update(self._execute_module(module_name='copy', module_args=self._task.args, task_vars=task_vars)) + result.update(self._execute_module(task_vars=task_vars)) return result else: # find in expected paths try: @@ -139,11 +142,9 @@ class ActionModule(ActionBase): delete_remote_tmp = (len(source_files) == 1) # If this is a recursive action create a tmp path that we can share as the _exec_module create is too late. - remote_user = self._play_context.remote_user if not delete_remote_tmp: if tmp is None or "-tmp-" not in tmp: - tmp = self._make_tmp_path(remote_user) - self._cleanup_remote_tmp = True + tmp = self._make_tmp_path() # expand any user home dir specifier dest = self._remote_expand_user(dest) @@ -209,8 +210,7 @@ class ActionModule(ActionBase): # If this is recursive we already have a tmp path. if delete_remote_tmp: if tmp is None or "-tmp-" not in tmp: - tmp = self._make_tmp_path(remote_user) - self._cleanup_remote_tmp = True + tmp = self._make_tmp_path() if self._play_context.diff and not raw: diffs.append(self._get_diff_data(dest_file, source_full, task_vars)) @@ -237,7 +237,7 @@ class ActionModule(ActionBase): # fix file permissions when the copy is done as a different user if remote_path: - self._fixup_perms2((tmp, remote_path), remote_user) + self._fixup_perms2((tmp, remote_path)) if raw: # Continue to next iteration if raw is defined. diff --git a/lib/ansible/plugins/action/network.py b/lib/ansible/plugins/action/network.py index 51e532bc19..c0bf8e0b55 100644 --- a/lib/ansible/plugins/action/network.py +++ b/lib/ansible/plugins/action/network.py @@ -29,10 +29,10 @@ class ActionModule(ActionBase): def run(self, tmp=None, task_vars=None): result = super(ActionModule, self).run(tmp, task_vars) - del result['invocation']['module_args'] + if result.get('invocation', {}).get('module_args'): + del result['invocation']['module_args'] - module_name = self._task.action - self._update_module_args(module_name, self._task.args, task_vars) + self._update_module_args(self._task.action, self._task.args, task_vars) try: _modify_module(self._task.args, self._connection) diff --git a/lib/ansible/plugins/action/normal.py b/lib/ansible/plugins/action/normal.py index f115408cc7..80ea2f4e93 100644 --- a/lib/ansible/plugins/action/normal.py +++ b/lib/ansible/plugins/action/normal.py @@ -24,16 +24,26 @@ from ansible.utils.vars import merge_hash class ActionModule(ActionBase): def run(self, tmp=None, task_vars=None): - if task_vars is None: - task_vars = dict() + + # individual modules might disagree but as the generic the action plugin, pass at this point. + self._supports_check_mode = True + self._supports_async = True results = super(ActionModule, self).run(tmp, task_vars) - # remove as modules might hide due to nolog - del results['invocation']['module_args'] - results = merge_hash(results, self._execute_module(tmp=tmp, task_vars=task_vars)) - # hack to keep --verbose from showing all the setup module results - if self._task.action == 'setup': - results['_ansible_verbose_override'] = True + if not results.get('skipped'): + + if results.get('invocation', {}).get('module_args'): + # avoid passing to modules in case of no_log + # should not be set anymore but here for backwards compatibility + del results['invocation']['module_args'] + + # do work! + results = merge_hash(results, self._execute_module(tmp=tmp, task_vars=task_vars, wrap_async=self._task.async)) + + # hack to keep --verbose from showing all the setup module results + # moved from setup module as now we filter out all _ansible_ from results + if self._task.action == 'setup': + results['_ansible_verbose_override'] = True return results diff --git a/lib/ansible/plugins/action/package.py b/lib/ansible/plugins/action/package.py index 45fb85abd2..d3062866d4 100644 --- a/lib/ansible/plugins/action/package.py +++ b/lib/ansible/plugins/action/package.py @@ -32,11 +32,15 @@ class ActionModule(ActionBase): def run(self, tmp=None, task_vars=None): ''' handler for package operations ''' - if task_vars is None: - task_vars = dict() + + self._supports_check_mode = True + self._supports_async = True result = super(ActionModule, self).run(tmp, task_vars) + if result.get('skipped', False): + return result + module = self._task.args.get('use', 'auto') if module == 'auto': @@ -59,17 +63,16 @@ class ActionModule(ActionBase): if module not in self._shared_loader_obj.module_loader: result['failed'] = True result['msg'] = 'Could not find a module for %s.' % module - return result + else: + # run the 'package' module + new_module_args = self._task.args.copy() + if 'use' in new_module_args: + del new_module_args['use'] - # run the 'package' module - new_module_args = self._task.args.copy() - if 'use' in new_module_args: - del new_module_args['use'] - - display.vvvv("Running %s" % module) - result.update(self._execute_module(module_name=module, module_args=new_module_args, task_vars=task_vars)) - return result + display.vvvv("Running %s" % module) + result.update(self._execute_module(module_name=module, module_args=new_module_args, task_vars=task_vars, wrap_async=self._task.async)) else: result['failed'] = True result['msg'] = 'Could not detect which package manager to use. Try gathering facts or setting the "use" option.' - return result + + return result diff --git a/lib/ansible/plugins/action/patch.py b/lib/ansible/plugins/action/patch.py index 4c5319ab1e..0e03c04c23 100644 --- a/lib/ansible/plugins/action/patch.py +++ b/lib/ansible/plugins/action/patch.py @@ -36,7 +36,6 @@ class ActionModule(ActionBase): src = self._task.args.get('src', None) remote_src = boolean(self._task.args.get('remote_src', 'no')) - remote_user = self._play_context.remote_user if src is None: result['failed'] = True @@ -57,13 +56,12 @@ class ActionModule(ActionBase): # create the remote tmp dir if needed, and put the source file there if tmp is None or "-tmp-" not in tmp: - tmp = self._make_tmp_path(remote_user) - self._cleanup_remote_tmp = True + tmp = self._make_tmp_path() tmp_src = self._connection._shell.join_path(tmp, os.path.basename(src)) self._transfer_file(src, tmp_src) - self._fixup_perms2((tmp, tmp_src), remote_user) + self._fixup_perms2((tmp, tmp_src) ) new_module_args = self._task.args.copy() new_module_args.update( diff --git a/lib/ansible/plugins/action/script.py b/lib/ansible/plugins/action/script.py index 3b749b1e88..740ea5b92a 100644 --- a/lib/ansible/plugins/action/script.py +++ b/lib/ansible/plugins/action/script.py @@ -39,10 +39,8 @@ class ActionModule(ActionBase): result['msg'] = 'check mode not supported for this module' return result - remote_user = self._play_context.remote_user if not tmp: - tmp = self._make_tmp_path(remote_user) - self._cleanup_remote_tmp = True + tmp = self._make_tmp_path() creates = self._task.args.get('creates') if creates: @@ -80,7 +78,7 @@ class ActionModule(ActionBase): self._transfer_file(source, tmp_src) # set file permissions, more permissive when the copy is done as a different user - self._fixup_perms2((tmp, tmp_src), remote_user, execute=True) + self._fixup_perms2((tmp, tmp_src), execute=True) # add preparation steps to one ssh roundtrip executing the script env_string = self._compute_environment_string() diff --git a/lib/ansible/plugins/action/service.py b/lib/ansible/plugins/action/service.py index 33ec6de35f..e6e86e21c8 100644 --- a/lib/ansible/plugins/action/service.py +++ b/lib/ansible/plugins/action/service.py @@ -31,11 +31,15 @@ class ActionModule(ActionBase): def run(self, tmp=None, task_vars=None): ''' handler for package operations ''' - if task_vars is None: - task_vars = dict() + + self._supports_check_mode = True + self._supports_async = True result = super(ActionModule, self).run(tmp, task_vars) + if result.get('skipped', False): + return result + module = self._task.args.get('use', 'auto').lower() if module == 'auto': @@ -73,7 +77,7 @@ class ActionModule(ActionBase): self._display.warning('Ignoring "%s" as it is not used in "%s"' % (unused, module)) self._display.vvvv("Running %s" % module) - result.update(self._execute_module(module_name=module, module_args=new_module_args, task_vars=task_vars)) + result.update(self._execute_module(module_name=module, module_args=new_module_args, task_vars=task_vars, wrap_async=self._task.async)) else: result['failed'] = True result['msg'] = 'Could not detect which service manager to use. Try gathering facts or setting the "use" option.' diff --git a/lib/ansible/plugins/action/template.py b/lib/ansible/plugins/action/template.py index ea47d67f37..2bedf66c19 100644 --- a/lib/ansible/plugins/action/template.py +++ b/lib/ansible/plugins/action/template.py @@ -52,6 +52,7 @@ class ActionModule(ActionBase): def run(self, tmp=None, task_vars=None): ''' handler for template operations ''' + if task_vars is None: task_vars = dict() @@ -145,10 +146,8 @@ class ActionModule(ActionBase): result['msg'] = type(e).__name__ + ": " + str(e) return result - remote_user = self._play_context.remote_user if not tmp: - tmp = self._make_tmp_path(remote_user) - self._cleanup_remote_tmp = True + tmp = self._make_tmp_path() local_checksum = checksum_s(resultant) remote_checksum = self.get_checksum(dest, task_vars, not directory_prepended, source=source, tmp=tmp) @@ -171,7 +170,7 @@ class ActionModule(ActionBase): xfered = self._transfer_data(self._connection._shell.join_path(tmp, 'source'), resultant) # fix file permissions when the copy is done as a different user - self._fixup_perms2((tmp, xfered), remote_user) + self._fixup_perms2((tmp, xfered)) # run the copy module new_module_args.update( diff --git a/lib/ansible/plugins/action/unarchive.py b/lib/ansible/plugins/action/unarchive.py index 8b6aaf94b1..001f63cc56 100644 --- a/lib/ansible/plugins/action/unarchive.py +++ b/lib/ansible/plugins/action/unarchive.py @@ -59,10 +59,8 @@ class ActionModule(ActionBase): result['msg'] = "src (or content) and dest are required" return result - remote_user = self._play_context.remote_user if not tmp: - tmp = self._make_tmp_path(remote_user) - self._cleanup_remote_tmp = True + tmp = self._make_tmp_path() if creates: # do not run the command if the line contains creates=filename @@ -110,7 +108,7 @@ class ActionModule(ActionBase): if not remote_src: # fix file permissions when the copy is done as a different user - self._fixup_perms2((tmp, tmp_src), remote_user) + self._fixup_perms2((tmp, tmp_src)) # Build temporary module_args. new_module_args = self._task.args.copy() new_module_args.update( diff --git a/lib/ansible/plugins/connection/__init__.py b/lib/ansible/plugins/connection/__init__.py index 77b8dcd8fc..439bb70cde 100644 --- a/lib/ansible/plugins/connection/__init__.py +++ b/lib/ansible/plugins/connection/__init__.py @@ -67,6 +67,7 @@ class ConnectionBase(with_metaclass(ABCMeta, object)): # language means any language. module_implementation_preferences = ('',) allow_executable = True + action_handler = 'normal' def __init__(self, play_context, new_stdin, *args, **kwargs): # All these hasattrs allow subclasses to override these parameters diff --git a/test/integration/targets/win_async_wrapper/tasks/main.yml b/test/integration/targets/win_async_wrapper/tasks/main.yml index 21b3a42284..c4d5b05e20 100644 --- a/test/integration/targets/win_async_wrapper/tasks/main.yml +++ b/test/integration/targets/win_async_wrapper/tasks/main.yml @@ -12,7 +12,6 @@ - asyncresult.started == 1 - asyncresult.finished == 0 - asyncresult.results_file is search('\.ansible_async.+\d+\.\d+') - - asyncresult._suppress_tmpdir_delete == true - name: async poll immediate success async_test: diff --git a/test/sanity/pep8/legacy-files.txt b/test/sanity/pep8/legacy-files.txt index a592c42fb5..fffcc4fbec 100644 --- a/test/sanity/pep8/legacy-files.txt +++ b/test/sanity/pep8/legacy-files.txt @@ -256,7 +256,6 @@ lib/ansible/playbook/attribute.py lib/ansible/playbook/block.py lib/ansible/playbook/role/__init__.py lib/ansible/playbook/role/metadata.py -lib/ansible/plugins/action/async.py lib/ansible/plugins/action/set_fact.py lib/ansible/plugins/action/set_stats.py lib/ansible/plugins/action/synchronize.py diff --git a/test/sanity/pep8/skip.txt b/test/sanity/pep8/skip.txt index e69de29bb2..733f239e06 100644 --- a/test/sanity/pep8/skip.txt +++ b/test/sanity/pep8/skip.txt @@ -0,0 +1 @@ +lib/ansible/plugins/action/__init__.py diff --git a/test/units/plugins/action/test_action.py b/test/units/plugins/action/test_action.py index 1d3a830778..293308d2a9 100644 --- a/test/units/plugins/action/test_action.py +++ b/test/units/plugins/action/test_action.py @@ -89,7 +89,7 @@ class TestActionBase(unittest.TestCase): mock_task.async = 0 action_base = DerivedActionBase(mock_task, mock_connection, play_context, None, None, None) results = action_base.run() - self.assertEqual(results, dict(invocation=dict(module_name='foo', module_args=dict(a=1, b=2, c=3)))) + self.assertEqual(results, {}) def test_action_base__configure_module(self): fake_loader = DictDataLoader({ @@ -230,60 +230,6 @@ class TestActionBase(unittest.TestCase): action_base.TRANSFERS_FILES = True self.assertTrue(action_base._early_needs_tmp_path()) - def test_action_base__late_needs_tmp_path(self): - # create our fake task - mock_task = MagicMock() - - # create a mock connection, so we don't actually try and connect to things - mock_connection = MagicMock() - - # we're using a real play context here - play_context = PlayContext() - - # our test class - action_base = DerivedActionBase( - task=mock_task, - connection=mock_connection, - play_context=play_context, - loader=None, - templar=None, - shared_loader_obj=None, - ) - - # assert no temp path is required because tmp is set - self.assertFalse(action_base._late_needs_tmp_path("/tmp/foo", "new")) - - # assert no temp path is required when using a new-style module - # with pipelining supported and enabled with no become method - mock_connection.has_pipelining = True - play_context.pipelining = True - play_context.become_method = None - self.assertFalse(action_base._late_needs_tmp_path(None, "new")) - - # assert a temp path is required for each of the following: - # the module style is not 'new' - mock_connection.has_pipelining = True - play_context.pipelining = True - play_context.become_method = None - self.assertTrue(action_base._late_needs_tmp_path(None, "old")) - # connection plugin does not support pipelining - mock_connection.has_pipelining = False - play_context.pipelining = True - play_context.become_method = None - self.assertTrue(action_base._late_needs_tmp_path(None, "new")) - # pipelining is disabled via the play context settings - mock_connection.has_pipelining = True - play_context.pipelining = False - play_context.become_method = None - self.assertTrue(action_base._late_needs_tmp_path(None, "new")) - # keep remote files is enabled - # FIXME: implement - # the become method is 'su' - mock_connection.has_pipelining = True - play_context.pipelining = True - play_context.become_method = 'su' - self.assertTrue(action_base._late_needs_tmp_path(None, "new")) - def test_action_base__make_tmp_path(self): # create our fake task mock_task = MagicMock() @@ -483,7 +429,7 @@ class TestActionBase(unittest.TestCase): # fake a lot of methods as we test those elsewhere action_base._configure_module = MagicMock() action_base._supports_check_mode = MagicMock() - action_base._late_needs_tmp_path = MagicMock() + action_base._is_pipelining_enabled = MagicMock() action_base._make_tmp_path = MagicMock() action_base._transfer_data = MagicMock() action_base._compute_environment_string = MagicMock() @@ -491,9 +437,10 @@ class TestActionBase(unittest.TestCase): action_base._fixup_perms2 = MagicMock() action_base._configure_module.return_value = ('new', '#!/usr/bin/python', 'this is the module data', 'path') - action_base._late_needs_tmp_path.return_value = False + action_base._is_pipelining_enabled.return_value = False action_base._compute_environment_string.return_value = '' - action_base._connection.has_pipelining = True + action_base._connection.has_pipelining = False + action_base._make_tmp_path.return_value = '/the/tmp/path' action_base._low_level_execute_command.return_value = dict(stdout='{"rc": 0, "stdout": "ok"}') self.assertEqual(action_base._execute_module(module_name=None, module_args=None), dict(_ansible_parsed=True, rc=0, stdout="ok", stdout_lines=['ok'])) self.assertEqual(action_base._execute_module(module_name='foo', @@ -503,7 +450,7 @@ class TestActionBase(unittest.TestCase): # test with needing/removing a remote tmp path action_base._configure_module.return_value = ('old', '#!/usr/bin/python', 'this is the module data', 'path') - action_base._late_needs_tmp_path.return_value = True + action_base._is_pipelining_enabled.return_value = False action_base._make_tmp_path.return_value = '/the/tmp/path' self.assertEqual(action_base._execute_module(), dict(_ansible_parsed=True, rc=0, stdout="ok", stdout_lines=['ok'])) @@ -516,7 +463,8 @@ class TestActionBase(unittest.TestCase): # test an invalid shebang return action_base._configure_module.return_value = ('new', '', 'this is the module data', 'path') - action_base._late_needs_tmp_path.return_value = False + action_base._is_pipelining_enabled.return_value = False + action_base._make_tmp_path.return_value = '/the/tmp/path' self.assertRaises(AnsibleError, action_base._execute_module) # test with check mode enabled, once with support for check