mirror of
https://github.com/ansible-collections/community.general.git
synced 2024-09-14 20:13:21 +02:00
refactoring async
- centralized skipping - also fixed module name broken by previous refactor - let action modules handle async processing - moved async into base action class's module exec - action plugins can now run final action as async - actually skip copy if base skips - fixed normal for new paths - ensure internal stat is never async - default poll to 10 as per docs - added hint for callback fix on poll - restructured late tmp, now a pipeline query - moving action handler to connection as networking does - fixed network assumption invocation is always passed - centralized key cleanup, normalized internal var - _supress_tmpdir_delete now in _ansible_xxx and gets removed from results - delay internal key removal till after we use em - nicer tmp removing, using existing methods - moved cleanup tmp flag to mking tmp func
This commit is contained in:
parent
22db51f15c
commit
c86a17b7a0
22 changed files with 254 additions and 371 deletions
|
@ -517,6 +517,7 @@ class TaskExecutor:
|
||||||
if self._task.async > 0:
|
if self._task.async > 0:
|
||||||
if self._task.poll > 0 and not result.get('skipped'):
|
if self._task.poll > 0 and not result.get('skipped'):
|
||||||
result = self._poll_async_result(result=result, templar=templar, task_vars=vars_copy)
|
result = self._poll_async_result(result=result, templar=templar, task_vars=vars_copy)
|
||||||
|
#FIXME callback 'v2_runner_on_async_poll' here
|
||||||
|
|
||||||
# ensure no log is preserved
|
# ensure no log is preserved
|
||||||
result["_ansible_no_log"] = self._play_context.no_log
|
result["_ansible_no_log"] = self._play_context.no_log
|
||||||
|
@ -757,15 +758,12 @@ class TaskExecutor:
|
||||||
Returns the correct action plugin to handle the requestion task action
|
Returns the correct action plugin to handle the requestion task action
|
||||||
'''
|
'''
|
||||||
|
|
||||||
|
# let action plugin override module, fallback to 'normal' action plugin otherwise
|
||||||
if self._task.action in self._shared_loader_obj.action_loader:
|
if self._task.action in self._shared_loader_obj.action_loader:
|
||||||
if self._task.async != 0:
|
|
||||||
raise AnsibleError("async mode is not supported with the %s module" % self._task.action)
|
|
||||||
handler_name = self._task.action
|
handler_name = self._task.action
|
||||||
elif self._task.async == 0:
|
else:
|
||||||
pc_conn = self._shared_loader_obj.connection_loader.get(self._play_context.connection, class_only=True)
|
pc_conn = self._shared_loader_obj.connection_loader.get(self._play_context.connection, class_only=True)
|
||||||
handler_name = getattr(pc_conn, 'action_handler', 'normal')
|
handler_name = getattr(pc_conn, 'action_handler', 'normal')
|
||||||
else:
|
|
||||||
handler_name = 'async'
|
|
||||||
|
|
||||||
handler = self._shared_loader_obj.action_loader.get(
|
handler = self._shared_loader_obj.action_loader.get(
|
||||||
handler_name,
|
handler_name,
|
||||||
|
|
|
@ -253,7 +253,7 @@ if __name__ == '__main__':
|
||||||
# the argsfile at the very first start of their execution anyway
|
# the argsfile at the very first start of their execution anyway
|
||||||
notice("Return async_wrapper task started.")
|
notice("Return async_wrapper task started.")
|
||||||
print(json.dumps({ "started" : 1, "finished" : 0, "ansible_job_id" : jid, "results_file" : job_path,
|
print(json.dumps({ "started" : 1, "finished" : 0, "ansible_job_id" : jid, "results_file" : job_path,
|
||||||
"_suppress_tmpdir_delete": not preserve_tmp}))
|
"_ansible_suppress_tmpdir_delete": not preserve_tmp}))
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
|
@ -435,7 +435,7 @@ $result = @{
|
||||||
finished=0;
|
finished=0;
|
||||||
results_file=$results_path;
|
results_file=$results_path;
|
||||||
ansible_job_id=$local_jid;
|
ansible_job_id=$local_jid;
|
||||||
_suppress_tmpdir_delete=$true;
|
_ansible_suppress_tmpdir_delete=$true;
|
||||||
ansible_async_watchdog_pid=$watchdog_pid
|
ansible_async_watchdog_pid=$watchdog_pid
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -80,7 +80,7 @@ class Task(Base, Conditional, Taggable, Become):
|
||||||
_loop_control = FieldAttribute(isa='class', class_type=LoopControl, inherit=False)
|
_loop_control = FieldAttribute(isa='class', class_type=LoopControl, inherit=False)
|
||||||
_name = FieldAttribute(isa='string', default='')
|
_name = FieldAttribute(isa='string', default='')
|
||||||
_notify = FieldAttribute(isa='list')
|
_notify = FieldAttribute(isa='list')
|
||||||
_poll = FieldAttribute(isa='int')
|
_poll = FieldAttribute(isa='int', default=10)
|
||||||
_register = FieldAttribute(isa='string')
|
_register = FieldAttribute(isa='string')
|
||||||
_retries = FieldAttribute(isa='int')
|
_retries = FieldAttribute(isa='int')
|
||||||
_until = FieldAttribute(isa='list', default=[])
|
_until = FieldAttribute(isa='list', default=[])
|
||||||
|
|
|
@ -67,9 +67,10 @@ class ActionBase(with_metaclass(ABCMeta, object)):
|
||||||
self._shared_loader_obj = shared_loader_obj
|
self._shared_loader_obj = shared_loader_obj
|
||||||
# Backwards compat: self._display isn't really needed, just import the global display and use that.
|
# Backwards compat: self._display isn't really needed, just import the global display and use that.
|
||||||
self._display = display
|
self._display = display
|
||||||
|
|
||||||
self._cleanup_remote_tmp = False
|
self._cleanup_remote_tmp = False
|
||||||
|
|
||||||
self._supports_check_mode = True
|
self._supports_check_mode = True
|
||||||
|
self._supports_async = False
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def run(self, tmp=None, task_vars=None):
|
def run(self, tmp=None, task_vars=None):
|
||||||
|
@ -88,14 +89,20 @@ class ActionBase(with_metaclass(ABCMeta, object)):
|
||||||
|
|
||||||
* Module parameters. These are stored in self._task.args
|
* Module parameters. These are stored in self._task.args
|
||||||
"""
|
"""
|
||||||
# store the module invocation details into the results
|
|
||||||
results = {}
|
result = {'skipped': True}
|
||||||
if self._task.async == 0:
|
|
||||||
results['invocation'] = dict(
|
if self._task.async and not self._supports_async:
|
||||||
module_name = self._task.action,
|
result['msg'] = 'async is not supported for this task.'
|
||||||
module_args = self._task.args,
|
elif self._play_context.check_mode and not self._supports_check_mode:
|
||||||
)
|
result['msg'] = 'check mode is not supported for this task.'
|
||||||
return results
|
elif self._task.async and self._play_context.check_mode:
|
||||||
|
result['msg'] = 'check mode and async cannot be used on same task.'
|
||||||
|
else:
|
||||||
|
# we run!
|
||||||
|
del result['skipped']
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
def _remote_file_exists(self, path):
|
def _remote_file_exists(self, path):
|
||||||
cmd = self._connection._shell.exists(path)
|
cmd = self._connection._shell.exists(path)
|
||||||
|
@ -189,27 +196,33 @@ class ActionBase(with_metaclass(ABCMeta, object)):
|
||||||
|
|
||||||
return getattr(self, 'TRANSFERS_FILES', False)
|
return getattr(self, 'TRANSFERS_FILES', False)
|
||||||
|
|
||||||
def _late_needs_tmp_path(self, tmp, module_style):
|
def _is_pipelining_enabled(self, module_style, wrap_async=False):
|
||||||
'''
|
'''
|
||||||
Determines if a temp path is required after some early actions have already taken place.
|
Determines if we are required and can do pipelining
|
||||||
'''
|
'''
|
||||||
if tmp and "tmp" in tmp:
|
# any of these require a true
|
||||||
# tmp has already been created
|
for condition in [
|
||||||
return False
|
self._connection.has_pipelining,
|
||||||
if not self._connection.has_pipelining or not self._play_context.pipelining or C.DEFAULT_KEEP_REMOTE_FILES or self._play_context.become_method == 'su':
|
self._play_context.pipelining,
|
||||||
# tmp is necessary to store the module source code
|
module_style == "new", # old style modules do not support pipelining
|
||||||
# or we want to keep the files on the target system
|
not C.DEFAULT_KEEP_REMOTE_FILES, # user wants remote files
|
||||||
return True
|
not wrap_async, # async does not support pipelining
|
||||||
if module_style != "new":
|
self._play_context.become_method != 'su', # su does not work with pipelining,
|
||||||
# even when conn has pipelining, old style modules need tmp to store arguments
|
# FIXME: we might need to make become_method exclusion a configurable list
|
||||||
return True
|
]:
|
||||||
return False
|
if not condition:
|
||||||
|
return False
|
||||||
|
|
||||||
def _make_tmp_path(self, remote_user):
|
return True
|
||||||
|
|
||||||
|
def _make_tmp_path(self, remote_user=None):
|
||||||
'''
|
'''
|
||||||
Create and return a temporary path on a remote box.
|
Create and return a temporary path on a remote box.
|
||||||
'''
|
'''
|
||||||
|
|
||||||
|
if remote_user is None:
|
||||||
|
remote_user = self._play_context.remote_user
|
||||||
|
|
||||||
basefile = 'ansible-tmp-%s-%s' % (time.time(), random.randint(0, 2**48))
|
basefile = 'ansible-tmp-%s-%s' % (time.time(), random.randint(0, 2**48))
|
||||||
use_system_tmp = False
|
use_system_tmp = False
|
||||||
|
|
||||||
|
@ -248,6 +261,8 @@ class ActionBase(with_metaclass(ABCMeta, object)):
|
||||||
if 'stdout' in result and result['stdout'] != u'':
|
if 'stdout' in result and result['stdout'] != u'':
|
||||||
output = output + u": %s" % result['stdout']
|
output = output + u": %s" % result['stdout']
|
||||||
raise AnsibleConnectionFailure(output)
|
raise AnsibleConnectionFailure(output)
|
||||||
|
else:
|
||||||
|
self._cleanup_remote_tmp = True
|
||||||
|
|
||||||
try:
|
try:
|
||||||
stdout_parts = result['stdout'].strip().split('%s=' % basefile, 1)
|
stdout_parts = result['stdout'].strip().split('%s=' % basefile, 1)
|
||||||
|
@ -275,7 +290,12 @@ class ActionBase(with_metaclass(ABCMeta, object)):
|
||||||
cmd = self._connection._shell.remove(tmp_path, recurse=True)
|
cmd = self._connection._shell.remove(tmp_path, recurse=True)
|
||||||
# If we have gotten here we have a working ssh configuration.
|
# If we have gotten here we have a working ssh configuration.
|
||||||
# If ssh breaks we could leave tmp directories out on the remote system.
|
# If ssh breaks we could leave tmp directories out on the remote system.
|
||||||
self._low_level_execute_command(cmd, sudoable=False)
|
tmp_rm_res = self._low_level_execute_command(cmd, sudoable=False)
|
||||||
|
|
||||||
|
tmp_rm_data = self._parse_returned_data(tmp_rm_res)
|
||||||
|
if tmp_rm_data.get('rc', 0) != 0:
|
||||||
|
display.warning('Error deleting remote temporary files (rc: {0}, stderr: {1})'.format(tmp_rm_res.get('rc'),
|
||||||
|
tmp_rm_res.get('stderr', 'No error string available.')))
|
||||||
|
|
||||||
def _transfer_file(self, local_path, remote_path):
|
def _transfer_file(self, local_path, remote_path):
|
||||||
self._connection.put_file(local_path, remote_path)
|
self._connection.put_file(local_path, remote_path)
|
||||||
|
@ -307,7 +327,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
|
||||||
|
|
||||||
return remote_path
|
return remote_path
|
||||||
|
|
||||||
def _fixup_perms(self, remote_path, remote_user, execute=True, recursive=True):
|
def _fixup_perms(self, remote_path, remote_user=None, execute=True, recursive=True):
|
||||||
"""
|
"""
|
||||||
We need the files we upload to be readable (and sometimes executable)
|
We need the files we upload to be readable (and sometimes executable)
|
||||||
by the user being sudo'd to but we want to limit other people's access
|
by the user being sudo'd to but we want to limit other people's access
|
||||||
|
@ -319,6 +339,8 @@ class ActionBase(with_metaclass(ABCMeta, object)):
|
||||||
for custom actions (non-recursive mode only).
|
for custom actions (non-recursive mode only).
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
if remote_user is None:
|
||||||
|
remote_user = self._play_context.remote_user
|
||||||
|
|
||||||
display.deprecated('_fixup_perms is deprecated. Use _fixup_perms2 instead.', version='2.4', removed=False)
|
display.deprecated('_fixup_perms is deprecated. Use _fixup_perms2 instead.', version='2.4', removed=False)
|
||||||
|
|
||||||
|
@ -329,7 +351,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
|
||||||
|
|
||||||
return self._fixup_perms2([remote_path], remote_user, execute)
|
return self._fixup_perms2([remote_path], remote_user, execute)
|
||||||
|
|
||||||
def _fixup_perms2(self, remote_paths, remote_user, execute=True):
|
def _fixup_perms2(self, remote_paths, remote_user=None, execute=True):
|
||||||
"""
|
"""
|
||||||
We need the files we upload to be readable (and sometimes executable)
|
We need the files we upload to be readable (and sometimes executable)
|
||||||
by the user being sudo'd to but we want to limit other people's access
|
by the user being sudo'd to but we want to limit other people's access
|
||||||
|
@ -352,6 +374,9 @@ class ActionBase(with_metaclass(ABCMeta, object)):
|
||||||
information we only do this ansible is configured with
|
information we only do this ansible is configured with
|
||||||
"allow_world_readable_tmpfiles" in the ansible.cfg
|
"allow_world_readable_tmpfiles" in the ansible.cfg
|
||||||
"""
|
"""
|
||||||
|
if remote_user is None:
|
||||||
|
remote_user = self._play_context.remote_user
|
||||||
|
|
||||||
if self._connection._shell.SHELL_FAMILY == 'powershell':
|
if self._connection._shell.SHELL_FAMILY == 'powershell':
|
||||||
# This won't work on Powershell as-is, so we'll just completely skip until
|
# This won't work on Powershell as-is, so we'll just completely skip until
|
||||||
# we have a need for it, at which point we'll have to do something different.
|
# we have a need for it, at which point we'll have to do something different.
|
||||||
|
@ -403,12 +428,11 @@ class ActionBase(with_metaclass(ABCMeta, object)):
|
||||||
' (rc: {0}, err: {1}). For information on working around this,'
|
' (rc: {0}, err: {1}). For information on working around this,'
|
||||||
' see https://docs.ansible.com/ansible/become.html#becoming-an-unprivileged-user'.format(res['rc'], to_native(res['stderr'])))
|
' see https://docs.ansible.com/ansible/become.html#becoming-an-unprivileged-user'.format(res['rc'], to_native(res['stderr'])))
|
||||||
elif execute:
|
elif execute:
|
||||||
# Can't depend on the file being transferred with execute
|
# Can't depend on the file being transferred with execute permissions.
|
||||||
# permissions. Only need user perms because no become was
|
# Only need user perms because no become was used here
|
||||||
# used here
|
|
||||||
res = self._remote_chmod(remote_paths, 'u+x')
|
res = self._remote_chmod(remote_paths, 'u+x')
|
||||||
if res['rc'] != 0:
|
if res['rc'] != 0:
|
||||||
raise AnsibleError('Failed to set file mode on remote files (rc: {0}, err: {1})'.format(res['rc'], to_native(res['stderr'])))
|
raise AnsibleError('Failed to set execute bit on remote files (rc: {0}, err: {1})'.format(res['rc'], to_native(res['stderr'])))
|
||||||
|
|
||||||
return remote_paths
|
return remote_paths
|
||||||
|
|
||||||
|
@ -447,7 +471,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
|
||||||
get_checksum=True,
|
get_checksum=True,
|
||||||
checksum_algo='sha1',
|
checksum_algo='sha1',
|
||||||
)
|
)
|
||||||
mystat = self._execute_module(module_name='stat', module_args=module_args, task_vars=all_vars, tmp=tmp, delete_remote_tmp=(tmp is None))
|
mystat = self._execute_module(module_name='stat', module_args=module_args, task_vars=all_vars, tmp=tmp, delete_remote_tmp=(tmp is None), wrap_async=False)
|
||||||
|
|
||||||
if mystat.get('failed'):
|
if mystat.get('failed'):
|
||||||
msg = mystat.get('module_stderr')
|
msg = mystat.get('module_stderr')
|
||||||
|
@ -479,7 +503,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
|
||||||
3 = its a directory, not a file
|
3 = its a directory, not a file
|
||||||
4 = stat module failed, likely due to not finding python
|
4 = stat module failed, likely due to not finding python
|
||||||
'''
|
'''
|
||||||
x = "0" # unknown error has occurred
|
x = "0" # unknown error has occured
|
||||||
try:
|
try:
|
||||||
remote_stat = self._execute_remote_stat(path, all_vars, follow=follow)
|
remote_stat = self._execute_remote_stat(path, all_vars, follow=follow)
|
||||||
if remote_stat['exists'] and remote_stat['isdir']:
|
if remote_stat['exists'] and remote_stat['isdir']:
|
||||||
|
@ -562,23 +586,25 @@ class ActionBase(with_metaclass(ABCMeta, object)):
|
||||||
# let module know about filesystems that selinux treats specially
|
# let module know about filesystems that selinux treats specially
|
||||||
module_args['_ansible_selinux_special_fs'] = C.DEFAULT_SELINUX_SPECIAL_FS
|
module_args['_ansible_selinux_special_fs'] = C.DEFAULT_SELINUX_SPECIAL_FS
|
||||||
|
|
||||||
def _execute_module(self, module_name=None, module_args=None, tmp=None, task_vars=None, persist_files=False, delete_remote_tmp=True):
|
|
||||||
|
|
||||||
|
def _execute_module(self, module_name=None, module_args=None, tmp=None, task_vars=None, persist_files=False, delete_remote_tmp=True, wrap_async=False):
|
||||||
'''
|
'''
|
||||||
Transfer and run a module along with its arguments.
|
Transfer and run a module along with its arguments.
|
||||||
'''
|
'''
|
||||||
if task_vars is None:
|
if task_vars is None:
|
||||||
task_vars = dict()
|
task_vars = dict()
|
||||||
|
|
||||||
# if a module name was not specified for this execution, use
|
remote_module_path = None
|
||||||
# the action from the task
|
args_file_path = None
|
||||||
|
remote_files = []
|
||||||
|
|
||||||
|
# if a module name was not specified for this execution, use the action from the task
|
||||||
if module_name is None:
|
if module_name is None:
|
||||||
module_name = self._task.action
|
module_name = self._task.action
|
||||||
if module_args is None:
|
if module_args is None:
|
||||||
module_args = self._task.args
|
module_args = self._task.args
|
||||||
|
|
||||||
# Get the connection user for permission checks
|
|
||||||
remote_user = task_vars.get('ansible_ssh_user') or self._play_context.remote_user
|
|
||||||
|
|
||||||
self._update_module_args(module_name, module_args, task_vars)
|
self._update_module_args(module_name, module_args, task_vars)
|
||||||
|
|
||||||
(module_style, shebang, module_data, module_path) = self._configure_module(module_name=module_name, module_args=module_args, task_vars=task_vars)
|
(module_style, shebang, module_data, module_path) = self._configure_module(module_name=module_name, module_args=module_args, task_vars=task_vars)
|
||||||
|
@ -586,23 +612,18 @@ class ActionBase(with_metaclass(ABCMeta, object)):
|
||||||
if not shebang and module_style != 'binary':
|
if not shebang and module_style != 'binary':
|
||||||
raise AnsibleError("module (%s) is missing interpreter line" % module_name)
|
raise AnsibleError("module (%s) is missing interpreter line" % module_name)
|
||||||
|
|
||||||
# a remote tmp path may be necessary and not already created
|
if not self._is_pipelining_enabled(module_style, wrap_async):
|
||||||
remote_module_path = None
|
|
||||||
args_file_path = None
|
# we might need remote tmp dir
|
||||||
if not tmp and self._late_needs_tmp_path(tmp, module_style):
|
if not tmp or not 'tmp' in tmp:
|
||||||
tmp = self._make_tmp_path(remote_user)
|
tmp = self._make_tmp_path()
|
||||||
|
|
||||||
if tmp and \
|
|
||||||
(module_style != 'new' or \
|
|
||||||
not self._connection.has_pipelining or \
|
|
||||||
not self._play_context.pipelining or \
|
|
||||||
C.DEFAULT_KEEP_REMOTE_FILES or \
|
|
||||||
self._play_context.become_method == 'su'):
|
|
||||||
remote_module_filename = self._connection._shell.get_remote_filename(module_path)
|
remote_module_filename = self._connection._shell.get_remote_filename(module_path)
|
||||||
remote_module_path = self._connection._shell.join_path(tmp, remote_module_filename)
|
remote_module_path = self._connection._shell.join_path(tmp, remote_module_filename)
|
||||||
if module_style in ('old', 'non_native_want_json', 'binary'):
|
|
||||||
# we'll also need a temp file to hold our module arguments
|
if module_style in ('old', 'non_native_want_json', 'binary'):
|
||||||
args_file_path = self._connection._shell.join_path(tmp, 'args')
|
# we'll also need a temp file to hold our module arguments
|
||||||
|
args_file_path = self._connection._shell.join_path(tmp, 'args')
|
||||||
|
|
||||||
if remote_module_path or module_style != 'new':
|
if remote_module_path or module_style != 'new':
|
||||||
display.debug("transferring module to remote %s" % remote_module_path)
|
display.debug("transferring module to remote %s" % remote_module_path)
|
||||||
|
@ -623,67 +644,101 @@ class ActionBase(with_metaclass(ABCMeta, object)):
|
||||||
|
|
||||||
environment_string = self._compute_environment_string()
|
environment_string = self._compute_environment_string()
|
||||||
|
|
||||||
remote_files = None
|
if tmp and remote_module_path:
|
||||||
|
remote_files = [tmp, remote_module_path]
|
||||||
|
|
||||||
if args_file_path:
|
if args_file_path:
|
||||||
remote_files = tmp, remote_module_path, args_file_path
|
remote_files.append(args_file_path)
|
||||||
elif remote_module_path:
|
|
||||||
remote_files = tmp, remote_module_path
|
|
||||||
|
|
||||||
# Fix permissions of the tmp path and tmp files. This should be
|
|
||||||
# called after all files have been transferred.
|
|
||||||
if remote_files:
|
|
||||||
self._fixup_perms2(remote_files, remote_user)
|
|
||||||
|
|
||||||
cmd = ""
|
|
||||||
in_data = None
|
|
||||||
|
|
||||||
if self._connection.has_pipelining and self._play_context.pipelining and not C.DEFAULT_KEEP_REMOTE_FILES and module_style == 'new':
|
|
||||||
in_data = module_data
|
|
||||||
else:
|
|
||||||
if remote_module_path:
|
|
||||||
cmd = remote_module_path
|
|
||||||
|
|
||||||
rm_tmp = None
|
|
||||||
if tmp and "tmp" in tmp and not C.DEFAULT_KEEP_REMOTE_FILES and not persist_files and delete_remote_tmp:
|
|
||||||
if not self._play_context.become or self._play_context.become_user == 'root':
|
|
||||||
# not sudoing or sudoing to root, so can cleanup files in the same step
|
|
||||||
rm_tmp = tmp
|
|
||||||
|
|
||||||
cmd = self._connection._shell.build_module_command(environment_string, shebang, cmd, arg_path=args_file_path, rm_tmp=rm_tmp)
|
|
||||||
cmd = cmd.strip()
|
|
||||||
|
|
||||||
sudoable = True
|
sudoable = True
|
||||||
if module_name == "accelerate":
|
in_data = None
|
||||||
# always run the accelerate module as the user
|
cmd = ""
|
||||||
# specified in the play, not the sudo_user
|
|
||||||
sudoable = False
|
|
||||||
|
|
||||||
|
if wrap_async:
|
||||||
|
# configure, upload, and chmod the async_wrapper module
|
||||||
|
(async_module_style, shebang, async_module_data, async_module_path) = self._configure_module(module_name='async_wrapper', module_args=dict(), task_vars=task_vars)
|
||||||
|
async_module_remote_filename = self._connection._shell.get_remote_filename(async_module_path)
|
||||||
|
remote_async_module_path = self._connection._shell.join_path(tmp, async_module_remote_filename)
|
||||||
|
self._transfer_data(remote_async_module_path, async_module_data)
|
||||||
|
remote_files.append(remote_async_module_path)
|
||||||
|
|
||||||
|
async_limit = self._task.async
|
||||||
|
async_jid = str(random.randint(0, 999999999999))
|
||||||
|
|
||||||
|
# call the interpreter for async_wrapper directly
|
||||||
|
# this permits use of a script for an interpreter on non-Linux platforms
|
||||||
|
# TODO: re-implement async_wrapper as a regular module to avoid this special case
|
||||||
|
interpreter = shebang.replace('#!', '').strip()
|
||||||
|
async_cmd = [interpreter, remote_async_module_path, async_jid, async_limit, remote_module_path]
|
||||||
|
|
||||||
|
if environment_string:
|
||||||
|
async_cmd.insert(0, environment_string)
|
||||||
|
|
||||||
|
if args_file_path:
|
||||||
|
async_cmd.append(args_file_path)
|
||||||
|
else:
|
||||||
|
# maintain a fixed number of positional parameters for async_wrapper
|
||||||
|
async_cmd.append('_')
|
||||||
|
|
||||||
|
if not self._should_remove_tmp_path(tmp):
|
||||||
|
async_cmd.append("-preserve_tmp")
|
||||||
|
|
||||||
|
cmd= " ".join(to_text(x) for x in async_cmd)
|
||||||
|
|
||||||
|
else:
|
||||||
|
|
||||||
|
if self._is_pipelining_enabled(module_style):
|
||||||
|
in_data = module_data
|
||||||
|
else:
|
||||||
|
cmd = remote_module_path
|
||||||
|
|
||||||
|
rm_tmp = None
|
||||||
|
|
||||||
|
if self._should_remove_tmp_path(tmp) and not persist_files and delete_remote_tmp:
|
||||||
|
if not self._play_context.become or self._play_context.become_user == 'root':
|
||||||
|
# not sudoing or sudoing to root, so can cleanup files in the same step
|
||||||
|
rm_tmp = tmp
|
||||||
|
|
||||||
|
cmd = self._connection._shell.build_module_command(environment_string, shebang, cmd, arg_path=args_file_path, rm_tmp=rm_tmp).strip()
|
||||||
|
|
||||||
|
if module_name == "accelerate":
|
||||||
|
# always run the accelerate module as the user
|
||||||
|
# specified in the play, not the sudo_user
|
||||||
|
sudoable = False
|
||||||
|
|
||||||
|
# Fix permissions of the tmp path and tmp files. This should be called after all files have been transferred.
|
||||||
|
if remote_files:
|
||||||
|
# remove none/empty
|
||||||
|
remote_files = [ x for x in remote_files if x]
|
||||||
|
self._fixup_perms2(remote_files, self._play_context.remote_user)
|
||||||
|
|
||||||
|
# actually execute
|
||||||
res = self._low_level_execute_command(cmd, sudoable=sudoable, in_data=in_data)
|
res = self._low_level_execute_command(cmd, sudoable=sudoable, in_data=in_data)
|
||||||
|
|
||||||
if tmp and "tmp" in tmp and not C.DEFAULT_KEEP_REMOTE_FILES and not persist_files and delete_remote_tmp:
|
# parse the main result, also cleans up internal keys
|
||||||
if self._play_context.become and self._play_context.become_user != 'root':
|
|
||||||
# not sudoing to root, so maybe can't delete files as that other user
|
|
||||||
# have to clean up temp files as original user in a second step
|
|
||||||
tmp_rm_cmd = self._connection._shell.remove(tmp, recurse=True)
|
|
||||||
tmp_rm_res = self._low_level_execute_command(tmp_rm_cmd, sudoable=False)
|
|
||||||
tmp_rm_data = self._parse_returned_data(tmp_rm_res)
|
|
||||||
if tmp_rm_data.get('rc', 0) != 0:
|
|
||||||
display.warning('Error deleting remote temporary files (rc: {0}, stderr: {1})'.format(tmp_rm_res.get('rc'),
|
|
||||||
tmp_rm_res.get('stderr', 'No error string available.')))
|
|
||||||
|
|
||||||
# parse the main result
|
|
||||||
data = self._parse_returned_data(res)
|
data = self._parse_returned_data(res)
|
||||||
|
|
||||||
# pre-split stdout into lines, if stdout is in the data and there
|
#NOTE: INTERNAL KEYS ONLY ACCESSIBLE HERE
|
||||||
# isn't already a stdout_lines value there
|
# get internal info before cleaning
|
||||||
|
tmpdir_delete = (not data.pop("_ansible_suppress_tmpdir_delete", False) and wrap_async)
|
||||||
|
|
||||||
|
# remove internal keys
|
||||||
|
self._remove_internal_keys(data)
|
||||||
|
data['_ansible_parsed'] = True
|
||||||
|
|
||||||
|
# cleanup tmp?
|
||||||
|
if (self._play_context.become and self._play_context.become_user != 'root') and not persist_files and delete_remote_tmp or tmpdir_delete:
|
||||||
|
self._remove_tmp_path(tmp)
|
||||||
|
|
||||||
|
#FIXME: for backwards compat, figure out if still makes sense
|
||||||
|
if wrap_async:
|
||||||
|
data['changed'] = True
|
||||||
|
|
||||||
|
# pre-split stdout/stderr into lines if needed
|
||||||
if 'stdout' in data and 'stdout_lines' not in data:
|
if 'stdout' in data and 'stdout_lines' not in data:
|
||||||
data['stdout_lines'] = data.get('stdout', u'').splitlines()
|
data['stdout_lines'] = data.get('stdout', u'').splitlines()
|
||||||
|
if 'stderr' in data and 'stderr_lines' not in data:
|
||||||
# remove bad/empty internal keys
|
data['stderr_lines'] = data.get('stderr', u'').splitlines()
|
||||||
for key in ['warnings', 'deprecations']:
|
|
||||||
if key in data and not data[key]:
|
|
||||||
del data[key]
|
|
||||||
|
|
||||||
display.debug("done with _execute_module (%s, %s)" % (module_name, module_args))
|
display.debug("done with _execute_module (%s, %s)" % (module_name, module_args))
|
||||||
return data
|
return data
|
||||||
|
@ -694,6 +749,12 @@ class ActionBase(with_metaclass(ABCMeta, object)):
|
||||||
display.warning("Removed unexpected internal key in module return: %s = %s" % (key, data[key]))
|
display.warning("Removed unexpected internal key in module return: %s = %s" % (key, data[key]))
|
||||||
del data[key]
|
del data[key]
|
||||||
|
|
||||||
|
# remove bad/empty internal keys
|
||||||
|
for key in ['warnings', 'deprecations']:
|
||||||
|
if key in data and not data[key]:
|
||||||
|
del data[key]
|
||||||
|
|
||||||
|
|
||||||
def _clean_returned_data(self, data):
|
def _clean_returned_data(self, data):
|
||||||
remove_keys = set()
|
remove_keys = set()
|
||||||
fact_keys = set(data.keys())
|
fact_keys = set(data.keys())
|
||||||
|
@ -737,8 +798,6 @@ class ActionBase(with_metaclass(ABCMeta, object)):
|
||||||
display.warning(w)
|
display.warning(w)
|
||||||
|
|
||||||
data = json.loads(filtered_output)
|
data = json.loads(filtered_output)
|
||||||
self._remove_internal_keys(data)
|
|
||||||
data['_ansible_parsed'] = True
|
|
||||||
|
|
||||||
if 'ansible_facts' in data and isinstance(data['ansible_facts'], dict):
|
if 'ansible_facts' in data and isinstance(data['ansible_facts'], dict):
|
||||||
self._clean_returned_data(data['ansible_facts'])
|
self._clean_returned_data(data['ansible_facts'])
|
||||||
|
|
|
@ -41,14 +41,12 @@ class ActionModule(ActionBase):
|
||||||
TRANSFERS_FILES = False
|
TRANSFERS_FILES = False
|
||||||
|
|
||||||
def run(self, tmp=None, task_vars=None):
|
def run(self, tmp=None, task_vars=None):
|
||||||
if task_vars is None:
|
|
||||||
task_vars = dict()
|
self._supports_check_mode = False
|
||||||
|
|
||||||
result = super(ActionModule, self).run(tmp, task_vars)
|
result = super(ActionModule, self).run(tmp, task_vars)
|
||||||
|
|
||||||
if self._play_context.check_mode:
|
if result.get('skipped', False):
|
||||||
result['skipped'] = True
|
|
||||||
result['msg'] = 'check mode not supported for this module'
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
# Parse out any hostname:port patterns
|
# Parse out any hostname:port patterns
|
||||||
|
|
|
@ -79,16 +79,17 @@ class ActionModule(ActionBase):
|
||||||
return temp_path
|
return temp_path
|
||||||
|
|
||||||
def run(self, tmp=None, task_vars=None):
|
def run(self, tmp=None, task_vars=None):
|
||||||
if task_vars is None:
|
|
||||||
task_vars = dict()
|
self._supports_check_mode = False
|
||||||
|
|
||||||
result = super(ActionModule, self).run(tmp, task_vars)
|
result = super(ActionModule, self).run(tmp, task_vars)
|
||||||
|
|
||||||
if self._play_context.check_mode:
|
if result.get('skipped', False):
|
||||||
result['skipped'] = True
|
|
||||||
result['msg'] = "skipped, this module does not support check_mode."
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
if task_vars is None:
|
||||||
|
task_vars = dict()
|
||||||
|
|
||||||
src = self._task.args.get('src', None)
|
src = self._task.args.get('src', None)
|
||||||
dest = self._task.args.get('dest', None)
|
dest = self._task.args.get('dest', None)
|
||||||
delimiter = self._task.args.get('delimiter', None)
|
delimiter = self._task.args.get('delimiter', None)
|
||||||
|
@ -102,7 +103,6 @@ class ActionModule(ActionBase):
|
||||||
result['msg'] = "src and dest are required"
|
result['msg'] = "src and dest are required"
|
||||||
return result
|
return result
|
||||||
|
|
||||||
remote_user = self._play_context.remote_user
|
|
||||||
if boolean(remote_src):
|
if boolean(remote_src):
|
||||||
result.update(self._execute_module(tmp=tmp, task_vars=task_vars))
|
result.update(self._execute_module(tmp=tmp, task_vars=task_vars))
|
||||||
return result
|
return result
|
||||||
|
@ -115,8 +115,7 @@ class ActionModule(ActionBase):
|
||||||
return result
|
return result
|
||||||
|
|
||||||
if not tmp:
|
if not tmp:
|
||||||
tmp = self._make_tmp_path(remote_user)
|
tmp = self._make_tmp_path()
|
||||||
self._cleanup_remote_tmp = True
|
|
||||||
|
|
||||||
if not os.path.isdir(src):
|
if not os.path.isdir(src):
|
||||||
result['failed'] = True
|
result['failed'] = True
|
||||||
|
@ -160,7 +159,7 @@ class ActionModule(ActionBase):
|
||||||
xfered = self._transfer_file(path, remote_path)
|
xfered = self._transfer_file(path, remote_path)
|
||||||
|
|
||||||
# fix file permissions when the copy is done as a different user
|
# fix file permissions when the copy is done as a different user
|
||||||
self._fixup_perms2((tmp, remote_path), remote_user)
|
self._fixup_perms2((tmp, remote_path))
|
||||||
|
|
||||||
new_module_args.update( dict( src=xfered,))
|
new_module_args.update( dict( src=xfered,))
|
||||||
|
|
||||||
|
|
|
@ -1,129 +0,0 @@
|
||||||
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
|
|
||||||
#
|
|
||||||
# This file is part of Ansible
|
|
||||||
#
|
|
||||||
# Ansible is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
#
|
|
||||||
# Ansible is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
import json
|
|
||||||
import random
|
|
||||||
|
|
||||||
from ansible import constants as C
|
|
||||||
from ansible.compat.six import iteritems
|
|
||||||
from ansible.compat.six.moves import shlex_quote
|
|
||||||
from ansible.module_utils._text import to_text
|
|
||||||
from ansible.plugins.action import ActionBase
|
|
||||||
|
|
||||||
|
|
||||||
class ActionModule(ActionBase):
|
|
||||||
|
|
||||||
def run(self, tmp=None, task_vars=None):
|
|
||||||
''' transfer the given module name, plus the async module, then run it '''
|
|
||||||
if task_vars is None:
|
|
||||||
task_vars = dict()
|
|
||||||
|
|
||||||
result = super(ActionModule, self).run(tmp, task_vars)
|
|
||||||
|
|
||||||
if self._play_context.check_mode:
|
|
||||||
result['skipped'] = True
|
|
||||||
result['msg'] = 'check mode not supported for this module'
|
|
||||||
return result
|
|
||||||
|
|
||||||
remote_user = task_vars.get('ansible_ssh_user') or self._play_context.remote_user
|
|
||||||
if not tmp:
|
|
||||||
tmp = self._make_tmp_path(remote_user)
|
|
||||||
self._cleanup_remote_tmp=True
|
|
||||||
|
|
||||||
module_name = self._task.action
|
|
||||||
|
|
||||||
env_string = self._compute_environment_string()
|
|
||||||
|
|
||||||
module_args = self._task.args.copy()
|
|
||||||
if self._play_context.no_log or C.DEFAULT_NO_TARGET_SYSLOG:
|
|
||||||
module_args['_ansible_no_log'] = True
|
|
||||||
|
|
||||||
# configure, upload, and chmod the target module
|
|
||||||
(module_style, shebang, module_data, module_path) = self._configure_module(module_name=module_name, module_args=module_args, task_vars=task_vars)
|
|
||||||
remote_module_filename = self._connection._shell.get_remote_filename(module_path)
|
|
||||||
remote_module_path = self._connection._shell.join_path(tmp, remote_module_filename)
|
|
||||||
if module_style == 'binary':
|
|
||||||
self._transfer_file(module_path, remote_module_path)
|
|
||||||
else:
|
|
||||||
self._transfer_data(remote_module_path, module_data)
|
|
||||||
|
|
||||||
# configure, upload, and chmod the async_wrapper module
|
|
||||||
(async_module_style, shebang, async_module_data, async_module_path) = self._configure_module(module_name='async_wrapper', module_args=dict(), task_vars=task_vars)
|
|
||||||
async_module_remote_filename = self._connection._shell.get_remote_filename(async_module_path)
|
|
||||||
remote_async_module_path = self._connection._shell.join_path(tmp, async_module_remote_filename)
|
|
||||||
self._transfer_data(remote_async_module_path, async_module_data)
|
|
||||||
|
|
||||||
argsfile = None
|
|
||||||
if module_style in ('non_native_want_json', 'binary'):
|
|
||||||
argsfile = self._transfer_data(self._connection._shell.join_path(tmp, 'arguments'), json.dumps(module_args))
|
|
||||||
elif module_style == 'old':
|
|
||||||
args_data = ""
|
|
||||||
for k, v in iteritems(module_args):
|
|
||||||
args_data += '%s="%s" ' % (k, shlex_quote(to_text(v)))
|
|
||||||
argsfile = self._transfer_data(self._connection._shell.join_path(tmp, 'arguments'), args_data)
|
|
||||||
|
|
||||||
remote_paths = tmp, remote_module_path, remote_async_module_path
|
|
||||||
|
|
||||||
# argsfile doesn't need to be executable, but this saves an extra call to the remote host
|
|
||||||
if argsfile:
|
|
||||||
remote_paths += argsfile,
|
|
||||||
|
|
||||||
self._fixup_perms2(remote_paths, remote_user, execute=True)
|
|
||||||
|
|
||||||
async_limit = self._task.async
|
|
||||||
async_jid = str(random.randint(0, 999999999999))
|
|
||||||
|
|
||||||
# call the interpreter for async_wrapper directly
|
|
||||||
# this permits use of a script for an interpreter on non-Linux platforms
|
|
||||||
# TODO: re-implement async_wrapper as a regular module to avoid this special case
|
|
||||||
interpreter = shebang.replace('#!', '').strip()
|
|
||||||
async_cmd = [interpreter, remote_async_module_path, async_jid, async_limit, remote_module_path]
|
|
||||||
|
|
||||||
if env_string:
|
|
||||||
async_cmd.insert(0, env_string)
|
|
||||||
|
|
||||||
if argsfile:
|
|
||||||
async_cmd.append(argsfile)
|
|
||||||
else:
|
|
||||||
# maintain a fixed number of positional parameters for async_wrapper
|
|
||||||
async_cmd.append('_')
|
|
||||||
|
|
||||||
if not self._should_remove_tmp_path(tmp):
|
|
||||||
async_cmd.append("-preserve_tmp")
|
|
||||||
|
|
||||||
async_cmd = " ".join(to_text(x) for x in async_cmd)
|
|
||||||
result.update(self._low_level_execute_command(cmd=async_cmd))
|
|
||||||
|
|
||||||
result['changed'] = True
|
|
||||||
|
|
||||||
# the async_wrapper module returns dumped JSON via its stdout
|
|
||||||
# response, so we (attempt to) parse it here
|
|
||||||
parsed_result = self._parse_returned_data(result)
|
|
||||||
|
|
||||||
# Delete tmpdir from controller unless async_wrapper says something else will do it.
|
|
||||||
# Windows cannot request deletion of files/directories that are in use, so the async
|
|
||||||
# supervisory process has to be responsible for it.
|
|
||||||
if parsed_result.get("_suppress_tmpdir_delete", False) != True:
|
|
||||||
self._remove_tmp_path(tmp)
|
|
||||||
|
|
||||||
# just return the original result
|
|
||||||
if 'skipped' in result and result['skipped'] or 'failed' in result and result['failed']:
|
|
||||||
return result
|
|
||||||
|
|
||||||
return parsed_result
|
|
|
@ -40,6 +40,9 @@ class ActionModule(ActionBase):
|
||||||
|
|
||||||
result = super(ActionModule, self).run(tmp, task_vars)
|
result = super(ActionModule, self).run(tmp, task_vars)
|
||||||
|
|
||||||
|
if result.get('skipped'):
|
||||||
|
return result
|
||||||
|
|
||||||
source = self._task.args.get('src', None)
|
source = self._task.args.get('src', None)
|
||||||
content = self._task.args.get('content', None)
|
content = self._task.args.get('content', None)
|
||||||
dest = self._task.args.get('dest', None)
|
dest = self._task.args.get('dest', None)
|
||||||
|
@ -48,17 +51,17 @@ class ActionModule(ActionBase):
|
||||||
remote_src = boolean(self._task.args.get('remote_src', False))
|
remote_src = boolean(self._task.args.get('remote_src', False))
|
||||||
follow = boolean(self._task.args.get('follow', False))
|
follow = boolean(self._task.args.get('follow', False))
|
||||||
|
|
||||||
|
result['failed'] = True
|
||||||
if (source is None and content is None) or dest is None:
|
if (source is None and content is None) or dest is None:
|
||||||
result['failed'] = True
|
|
||||||
result['msg'] = "src (or content) and dest are required"
|
result['msg'] = "src (or content) and dest are required"
|
||||||
return result
|
|
||||||
elif source is not None and content is not None:
|
elif source is not None and content is not None:
|
||||||
result['failed'] = True
|
|
||||||
result['msg'] = "src and content are mutually exclusive"
|
result['msg'] = "src and content are mutually exclusive"
|
||||||
return result
|
|
||||||
elif content is not None and dest is not None and dest.endswith("/"):
|
elif content is not None and dest is not None and dest.endswith("/"):
|
||||||
result['failed'] = True
|
|
||||||
result['msg'] = "dest must be a file if content is defined"
|
result['msg'] = "dest must be a file if content is defined"
|
||||||
|
else:
|
||||||
|
del result['failed']
|
||||||
|
|
||||||
|
if result.get('failed'):
|
||||||
return result
|
return result
|
||||||
|
|
||||||
# Check if the source ends with a "/"
|
# Check if the source ends with a "/"
|
||||||
|
@ -87,7 +90,7 @@ class ActionModule(ActionBase):
|
||||||
# if we have first_available_file in our vars
|
# if we have first_available_file in our vars
|
||||||
# look up the files and use the first one we find as src
|
# look up the files and use the first one we find as src
|
||||||
elif remote_src:
|
elif remote_src:
|
||||||
result.update(self._execute_module(module_name='copy', module_args=self._task.args, task_vars=task_vars))
|
result.update(self._execute_module(task_vars=task_vars))
|
||||||
return result
|
return result
|
||||||
else: # find in expected paths
|
else: # find in expected paths
|
||||||
try:
|
try:
|
||||||
|
@ -139,11 +142,9 @@ class ActionModule(ActionBase):
|
||||||
delete_remote_tmp = (len(source_files) == 1)
|
delete_remote_tmp = (len(source_files) == 1)
|
||||||
|
|
||||||
# If this is a recursive action create a tmp path that we can share as the _exec_module create is too late.
|
# If this is a recursive action create a tmp path that we can share as the _exec_module create is too late.
|
||||||
remote_user = self._play_context.remote_user
|
|
||||||
if not delete_remote_tmp:
|
if not delete_remote_tmp:
|
||||||
if tmp is None or "-tmp-" not in tmp:
|
if tmp is None or "-tmp-" not in tmp:
|
||||||
tmp = self._make_tmp_path(remote_user)
|
tmp = self._make_tmp_path()
|
||||||
self._cleanup_remote_tmp = True
|
|
||||||
|
|
||||||
# expand any user home dir specifier
|
# expand any user home dir specifier
|
||||||
dest = self._remote_expand_user(dest)
|
dest = self._remote_expand_user(dest)
|
||||||
|
@ -209,8 +210,7 @@ class ActionModule(ActionBase):
|
||||||
# If this is recursive we already have a tmp path.
|
# If this is recursive we already have a tmp path.
|
||||||
if delete_remote_tmp:
|
if delete_remote_tmp:
|
||||||
if tmp is None or "-tmp-" not in tmp:
|
if tmp is None or "-tmp-" not in tmp:
|
||||||
tmp = self._make_tmp_path(remote_user)
|
tmp = self._make_tmp_path()
|
||||||
self._cleanup_remote_tmp = True
|
|
||||||
|
|
||||||
if self._play_context.diff and not raw:
|
if self._play_context.diff and not raw:
|
||||||
diffs.append(self._get_diff_data(dest_file, source_full, task_vars))
|
diffs.append(self._get_diff_data(dest_file, source_full, task_vars))
|
||||||
|
@ -237,7 +237,7 @@ class ActionModule(ActionBase):
|
||||||
|
|
||||||
# fix file permissions when the copy is done as a different user
|
# fix file permissions when the copy is done as a different user
|
||||||
if remote_path:
|
if remote_path:
|
||||||
self._fixup_perms2((tmp, remote_path), remote_user)
|
self._fixup_perms2((tmp, remote_path))
|
||||||
|
|
||||||
if raw:
|
if raw:
|
||||||
# Continue to next iteration if raw is defined.
|
# Continue to next iteration if raw is defined.
|
||||||
|
|
|
@ -29,10 +29,10 @@ class ActionModule(ActionBase):
|
||||||
|
|
||||||
def run(self, tmp=None, task_vars=None):
|
def run(self, tmp=None, task_vars=None):
|
||||||
result = super(ActionModule, self).run(tmp, task_vars)
|
result = super(ActionModule, self).run(tmp, task_vars)
|
||||||
del result['invocation']['module_args']
|
if result.get('invocation', {}).get('module_args'):
|
||||||
|
del result['invocation']['module_args']
|
||||||
|
|
||||||
module_name = self._task.action
|
self._update_module_args(self._task.action, self._task.args, task_vars)
|
||||||
self._update_module_args(module_name, self._task.args, task_vars)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
_modify_module(self._task.args, self._connection)
|
_modify_module(self._task.args, self._connection)
|
||||||
|
|
|
@ -24,16 +24,26 @@ from ansible.utils.vars import merge_hash
|
||||||
class ActionModule(ActionBase):
|
class ActionModule(ActionBase):
|
||||||
|
|
||||||
def run(self, tmp=None, task_vars=None):
|
def run(self, tmp=None, task_vars=None):
|
||||||
if task_vars is None:
|
|
||||||
task_vars = dict()
|
# individual modules might disagree but as the generic the action plugin, pass at this point.
|
||||||
|
self._supports_check_mode = True
|
||||||
|
self._supports_async = True
|
||||||
|
|
||||||
results = super(ActionModule, self).run(tmp, task_vars)
|
results = super(ActionModule, self).run(tmp, task_vars)
|
||||||
# remove as modules might hide due to nolog
|
|
||||||
del results['invocation']['module_args']
|
|
||||||
results = merge_hash(results, self._execute_module(tmp=tmp, task_vars=task_vars))
|
|
||||||
|
|
||||||
# hack to keep --verbose from showing all the setup module results
|
if not results.get('skipped'):
|
||||||
if self._task.action == 'setup':
|
|
||||||
results['_ansible_verbose_override'] = True
|
if results.get('invocation', {}).get('module_args'):
|
||||||
|
# avoid passing to modules in case of no_log
|
||||||
|
# should not be set anymore but here for backwards compatibility
|
||||||
|
del results['invocation']['module_args']
|
||||||
|
|
||||||
|
# do work!
|
||||||
|
results = merge_hash(results, self._execute_module(tmp=tmp, task_vars=task_vars, wrap_async=self._task.async))
|
||||||
|
|
||||||
|
# hack to keep --verbose from showing all the setup module results
|
||||||
|
# moved from setup module as now we filter out all _ansible_ from results
|
||||||
|
if self._task.action == 'setup':
|
||||||
|
results['_ansible_verbose_override'] = True
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
|
@ -32,11 +32,15 @@ class ActionModule(ActionBase):
|
||||||
|
|
||||||
def run(self, tmp=None, task_vars=None):
|
def run(self, tmp=None, task_vars=None):
|
||||||
''' handler for package operations '''
|
''' handler for package operations '''
|
||||||
if task_vars is None:
|
|
||||||
task_vars = dict()
|
self._supports_check_mode = True
|
||||||
|
self._supports_async = True
|
||||||
|
|
||||||
result = super(ActionModule, self).run(tmp, task_vars)
|
result = super(ActionModule, self).run(tmp, task_vars)
|
||||||
|
|
||||||
|
if result.get('skipped', False):
|
||||||
|
return result
|
||||||
|
|
||||||
module = self._task.args.get('use', 'auto')
|
module = self._task.args.get('use', 'auto')
|
||||||
|
|
||||||
if module == 'auto':
|
if module == 'auto':
|
||||||
|
@ -59,17 +63,16 @@ class ActionModule(ActionBase):
|
||||||
if module not in self._shared_loader_obj.module_loader:
|
if module not in self._shared_loader_obj.module_loader:
|
||||||
result['failed'] = True
|
result['failed'] = True
|
||||||
result['msg'] = 'Could not find a module for %s.' % module
|
result['msg'] = 'Could not find a module for %s.' % module
|
||||||
return result
|
else:
|
||||||
|
# run the 'package' module
|
||||||
|
new_module_args = self._task.args.copy()
|
||||||
|
if 'use' in new_module_args:
|
||||||
|
del new_module_args['use']
|
||||||
|
|
||||||
# run the 'package' module
|
display.vvvv("Running %s" % module)
|
||||||
new_module_args = self._task.args.copy()
|
result.update(self._execute_module(module_name=module, module_args=new_module_args, task_vars=task_vars, wrap_async=self._task.async))
|
||||||
if 'use' in new_module_args:
|
|
||||||
del new_module_args['use']
|
|
||||||
|
|
||||||
display.vvvv("Running %s" % module)
|
|
||||||
result.update(self._execute_module(module_name=module, module_args=new_module_args, task_vars=task_vars))
|
|
||||||
return result
|
|
||||||
else:
|
else:
|
||||||
result['failed'] = True
|
result['failed'] = True
|
||||||
result['msg'] = 'Could not detect which package manager to use. Try gathering facts or setting the "use" option.'
|
result['msg'] = 'Could not detect which package manager to use. Try gathering facts or setting the "use" option.'
|
||||||
return result
|
|
||||||
|
return result
|
||||||
|
|
|
@ -36,7 +36,6 @@ class ActionModule(ActionBase):
|
||||||
|
|
||||||
src = self._task.args.get('src', None)
|
src = self._task.args.get('src', None)
|
||||||
remote_src = boolean(self._task.args.get('remote_src', 'no'))
|
remote_src = boolean(self._task.args.get('remote_src', 'no'))
|
||||||
remote_user = self._play_context.remote_user
|
|
||||||
|
|
||||||
if src is None:
|
if src is None:
|
||||||
result['failed'] = True
|
result['failed'] = True
|
||||||
|
@ -57,13 +56,12 @@ class ActionModule(ActionBase):
|
||||||
|
|
||||||
# create the remote tmp dir if needed, and put the source file there
|
# create the remote tmp dir if needed, and put the source file there
|
||||||
if tmp is None or "-tmp-" not in tmp:
|
if tmp is None or "-tmp-" not in tmp:
|
||||||
tmp = self._make_tmp_path(remote_user)
|
tmp = self._make_tmp_path()
|
||||||
self._cleanup_remote_tmp = True
|
|
||||||
|
|
||||||
tmp_src = self._connection._shell.join_path(tmp, os.path.basename(src))
|
tmp_src = self._connection._shell.join_path(tmp, os.path.basename(src))
|
||||||
self._transfer_file(src, tmp_src)
|
self._transfer_file(src, tmp_src)
|
||||||
|
|
||||||
self._fixup_perms2((tmp, tmp_src), remote_user)
|
self._fixup_perms2((tmp, tmp_src) )
|
||||||
|
|
||||||
new_module_args = self._task.args.copy()
|
new_module_args = self._task.args.copy()
|
||||||
new_module_args.update(
|
new_module_args.update(
|
||||||
|
|
|
@ -39,10 +39,8 @@ class ActionModule(ActionBase):
|
||||||
result['msg'] = 'check mode not supported for this module'
|
result['msg'] = 'check mode not supported for this module'
|
||||||
return result
|
return result
|
||||||
|
|
||||||
remote_user = self._play_context.remote_user
|
|
||||||
if not tmp:
|
if not tmp:
|
||||||
tmp = self._make_tmp_path(remote_user)
|
tmp = self._make_tmp_path()
|
||||||
self._cleanup_remote_tmp = True
|
|
||||||
|
|
||||||
creates = self._task.args.get('creates')
|
creates = self._task.args.get('creates')
|
||||||
if creates:
|
if creates:
|
||||||
|
@ -80,7 +78,7 @@ class ActionModule(ActionBase):
|
||||||
self._transfer_file(source, tmp_src)
|
self._transfer_file(source, tmp_src)
|
||||||
|
|
||||||
# set file permissions, more permissive when the copy is done as a different user
|
# set file permissions, more permissive when the copy is done as a different user
|
||||||
self._fixup_perms2((tmp, tmp_src), remote_user, execute=True)
|
self._fixup_perms2((tmp, tmp_src), execute=True)
|
||||||
|
|
||||||
# add preparation steps to one ssh roundtrip executing the script
|
# add preparation steps to one ssh roundtrip executing the script
|
||||||
env_string = self._compute_environment_string()
|
env_string = self._compute_environment_string()
|
||||||
|
|
|
@ -31,11 +31,15 @@ class ActionModule(ActionBase):
|
||||||
|
|
||||||
def run(self, tmp=None, task_vars=None):
|
def run(self, tmp=None, task_vars=None):
|
||||||
''' handler for package operations '''
|
''' handler for package operations '''
|
||||||
if task_vars is None:
|
|
||||||
task_vars = dict()
|
self._supports_check_mode = True
|
||||||
|
self._supports_async = True
|
||||||
|
|
||||||
result = super(ActionModule, self).run(tmp, task_vars)
|
result = super(ActionModule, self).run(tmp, task_vars)
|
||||||
|
|
||||||
|
if result.get('skipped', False):
|
||||||
|
return result
|
||||||
|
|
||||||
module = self._task.args.get('use', 'auto').lower()
|
module = self._task.args.get('use', 'auto').lower()
|
||||||
|
|
||||||
if module == 'auto':
|
if module == 'auto':
|
||||||
|
@ -73,7 +77,7 @@ class ActionModule(ActionBase):
|
||||||
self._display.warning('Ignoring "%s" as it is not used in "%s"' % (unused, module))
|
self._display.warning('Ignoring "%s" as it is not used in "%s"' % (unused, module))
|
||||||
|
|
||||||
self._display.vvvv("Running %s" % module)
|
self._display.vvvv("Running %s" % module)
|
||||||
result.update(self._execute_module(module_name=module, module_args=new_module_args, task_vars=task_vars))
|
result.update(self._execute_module(module_name=module, module_args=new_module_args, task_vars=task_vars, wrap_async=self._task.async))
|
||||||
else:
|
else:
|
||||||
result['failed'] = True
|
result['failed'] = True
|
||||||
result['msg'] = 'Could not detect which service manager to use. Try gathering facts or setting the "use" option.'
|
result['msg'] = 'Could not detect which service manager to use. Try gathering facts or setting the "use" option.'
|
||||||
|
|
|
@ -52,6 +52,7 @@ class ActionModule(ActionBase):
|
||||||
|
|
||||||
def run(self, tmp=None, task_vars=None):
|
def run(self, tmp=None, task_vars=None):
|
||||||
''' handler for template operations '''
|
''' handler for template operations '''
|
||||||
|
|
||||||
if task_vars is None:
|
if task_vars is None:
|
||||||
task_vars = dict()
|
task_vars = dict()
|
||||||
|
|
||||||
|
@ -145,10 +146,8 @@ class ActionModule(ActionBase):
|
||||||
result['msg'] = type(e).__name__ + ": " + str(e)
|
result['msg'] = type(e).__name__ + ": " + str(e)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
remote_user = self._play_context.remote_user
|
|
||||||
if not tmp:
|
if not tmp:
|
||||||
tmp = self._make_tmp_path(remote_user)
|
tmp = self._make_tmp_path()
|
||||||
self._cleanup_remote_tmp = True
|
|
||||||
|
|
||||||
local_checksum = checksum_s(resultant)
|
local_checksum = checksum_s(resultant)
|
||||||
remote_checksum = self.get_checksum(dest, task_vars, not directory_prepended, source=source, tmp=tmp)
|
remote_checksum = self.get_checksum(dest, task_vars, not directory_prepended, source=source, tmp=tmp)
|
||||||
|
@ -171,7 +170,7 @@ class ActionModule(ActionBase):
|
||||||
xfered = self._transfer_data(self._connection._shell.join_path(tmp, 'source'), resultant)
|
xfered = self._transfer_data(self._connection._shell.join_path(tmp, 'source'), resultant)
|
||||||
|
|
||||||
# fix file permissions when the copy is done as a different user
|
# fix file permissions when the copy is done as a different user
|
||||||
self._fixup_perms2((tmp, xfered), remote_user)
|
self._fixup_perms2((tmp, xfered))
|
||||||
|
|
||||||
# run the copy module
|
# run the copy module
|
||||||
new_module_args.update(
|
new_module_args.update(
|
||||||
|
|
|
@ -59,10 +59,8 @@ class ActionModule(ActionBase):
|
||||||
result['msg'] = "src (or content) and dest are required"
|
result['msg'] = "src (or content) and dest are required"
|
||||||
return result
|
return result
|
||||||
|
|
||||||
remote_user = self._play_context.remote_user
|
|
||||||
if not tmp:
|
if not tmp:
|
||||||
tmp = self._make_tmp_path(remote_user)
|
tmp = self._make_tmp_path()
|
||||||
self._cleanup_remote_tmp = True
|
|
||||||
|
|
||||||
if creates:
|
if creates:
|
||||||
# do not run the command if the line contains creates=filename
|
# do not run the command if the line contains creates=filename
|
||||||
|
@ -110,7 +108,7 @@ class ActionModule(ActionBase):
|
||||||
|
|
||||||
if not remote_src:
|
if not remote_src:
|
||||||
# fix file permissions when the copy is done as a different user
|
# fix file permissions when the copy is done as a different user
|
||||||
self._fixup_perms2((tmp, tmp_src), remote_user)
|
self._fixup_perms2((tmp, tmp_src))
|
||||||
# Build temporary module_args.
|
# Build temporary module_args.
|
||||||
new_module_args = self._task.args.copy()
|
new_module_args = self._task.args.copy()
|
||||||
new_module_args.update(
|
new_module_args.update(
|
||||||
|
|
|
@ -67,6 +67,7 @@ class ConnectionBase(with_metaclass(ABCMeta, object)):
|
||||||
# language means any language.
|
# language means any language.
|
||||||
module_implementation_preferences = ('',)
|
module_implementation_preferences = ('',)
|
||||||
allow_executable = True
|
allow_executable = True
|
||||||
|
action_handler = 'normal'
|
||||||
|
|
||||||
def __init__(self, play_context, new_stdin, *args, **kwargs):
|
def __init__(self, play_context, new_stdin, *args, **kwargs):
|
||||||
# All these hasattrs allow subclasses to override these parameters
|
# All these hasattrs allow subclasses to override these parameters
|
||||||
|
|
|
@ -12,7 +12,6 @@
|
||||||
- asyncresult.started == 1
|
- asyncresult.started == 1
|
||||||
- asyncresult.finished == 0
|
- asyncresult.finished == 0
|
||||||
- asyncresult.results_file is search('\.ansible_async.+\d+\.\d+')
|
- asyncresult.results_file is search('\.ansible_async.+\d+\.\d+')
|
||||||
- asyncresult._suppress_tmpdir_delete == true
|
|
||||||
|
|
||||||
- name: async poll immediate success
|
- name: async poll immediate success
|
||||||
async_test:
|
async_test:
|
||||||
|
|
|
@ -256,7 +256,6 @@ lib/ansible/playbook/attribute.py
|
||||||
lib/ansible/playbook/block.py
|
lib/ansible/playbook/block.py
|
||||||
lib/ansible/playbook/role/__init__.py
|
lib/ansible/playbook/role/__init__.py
|
||||||
lib/ansible/playbook/role/metadata.py
|
lib/ansible/playbook/role/metadata.py
|
||||||
lib/ansible/plugins/action/async.py
|
|
||||||
lib/ansible/plugins/action/set_fact.py
|
lib/ansible/plugins/action/set_fact.py
|
||||||
lib/ansible/plugins/action/set_stats.py
|
lib/ansible/plugins/action/set_stats.py
|
||||||
lib/ansible/plugins/action/synchronize.py
|
lib/ansible/plugins/action/synchronize.py
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
lib/ansible/plugins/action/__init__.py
|
|
@ -89,7 +89,7 @@ class TestActionBase(unittest.TestCase):
|
||||||
mock_task.async = 0
|
mock_task.async = 0
|
||||||
action_base = DerivedActionBase(mock_task, mock_connection, play_context, None, None, None)
|
action_base = DerivedActionBase(mock_task, mock_connection, play_context, None, None, None)
|
||||||
results = action_base.run()
|
results = action_base.run()
|
||||||
self.assertEqual(results, dict(invocation=dict(module_name='foo', module_args=dict(a=1, b=2, c=3))))
|
self.assertEqual(results, {})
|
||||||
|
|
||||||
def test_action_base__configure_module(self):
|
def test_action_base__configure_module(self):
|
||||||
fake_loader = DictDataLoader({
|
fake_loader = DictDataLoader({
|
||||||
|
@ -230,60 +230,6 @@ class TestActionBase(unittest.TestCase):
|
||||||
action_base.TRANSFERS_FILES = True
|
action_base.TRANSFERS_FILES = True
|
||||||
self.assertTrue(action_base._early_needs_tmp_path())
|
self.assertTrue(action_base._early_needs_tmp_path())
|
||||||
|
|
||||||
def test_action_base__late_needs_tmp_path(self):
|
|
||||||
# create our fake task
|
|
||||||
mock_task = MagicMock()
|
|
||||||
|
|
||||||
# create a mock connection, so we don't actually try and connect to things
|
|
||||||
mock_connection = MagicMock()
|
|
||||||
|
|
||||||
# we're using a real play context here
|
|
||||||
play_context = PlayContext()
|
|
||||||
|
|
||||||
# our test class
|
|
||||||
action_base = DerivedActionBase(
|
|
||||||
task=mock_task,
|
|
||||||
connection=mock_connection,
|
|
||||||
play_context=play_context,
|
|
||||||
loader=None,
|
|
||||||
templar=None,
|
|
||||||
shared_loader_obj=None,
|
|
||||||
)
|
|
||||||
|
|
||||||
# assert no temp path is required because tmp is set
|
|
||||||
self.assertFalse(action_base._late_needs_tmp_path("/tmp/foo", "new"))
|
|
||||||
|
|
||||||
# assert no temp path is required when using a new-style module
|
|
||||||
# with pipelining supported and enabled with no become method
|
|
||||||
mock_connection.has_pipelining = True
|
|
||||||
play_context.pipelining = True
|
|
||||||
play_context.become_method = None
|
|
||||||
self.assertFalse(action_base._late_needs_tmp_path(None, "new"))
|
|
||||||
|
|
||||||
# assert a temp path is required for each of the following:
|
|
||||||
# the module style is not 'new'
|
|
||||||
mock_connection.has_pipelining = True
|
|
||||||
play_context.pipelining = True
|
|
||||||
play_context.become_method = None
|
|
||||||
self.assertTrue(action_base._late_needs_tmp_path(None, "old"))
|
|
||||||
# connection plugin does not support pipelining
|
|
||||||
mock_connection.has_pipelining = False
|
|
||||||
play_context.pipelining = True
|
|
||||||
play_context.become_method = None
|
|
||||||
self.assertTrue(action_base._late_needs_tmp_path(None, "new"))
|
|
||||||
# pipelining is disabled via the play context settings
|
|
||||||
mock_connection.has_pipelining = True
|
|
||||||
play_context.pipelining = False
|
|
||||||
play_context.become_method = None
|
|
||||||
self.assertTrue(action_base._late_needs_tmp_path(None, "new"))
|
|
||||||
# keep remote files is enabled
|
|
||||||
# FIXME: implement
|
|
||||||
# the become method is 'su'
|
|
||||||
mock_connection.has_pipelining = True
|
|
||||||
play_context.pipelining = True
|
|
||||||
play_context.become_method = 'su'
|
|
||||||
self.assertTrue(action_base._late_needs_tmp_path(None, "new"))
|
|
||||||
|
|
||||||
def test_action_base__make_tmp_path(self):
|
def test_action_base__make_tmp_path(self):
|
||||||
# create our fake task
|
# create our fake task
|
||||||
mock_task = MagicMock()
|
mock_task = MagicMock()
|
||||||
|
@ -483,7 +429,7 @@ class TestActionBase(unittest.TestCase):
|
||||||
# fake a lot of methods as we test those elsewhere
|
# fake a lot of methods as we test those elsewhere
|
||||||
action_base._configure_module = MagicMock()
|
action_base._configure_module = MagicMock()
|
||||||
action_base._supports_check_mode = MagicMock()
|
action_base._supports_check_mode = MagicMock()
|
||||||
action_base._late_needs_tmp_path = MagicMock()
|
action_base._is_pipelining_enabled = MagicMock()
|
||||||
action_base._make_tmp_path = MagicMock()
|
action_base._make_tmp_path = MagicMock()
|
||||||
action_base._transfer_data = MagicMock()
|
action_base._transfer_data = MagicMock()
|
||||||
action_base._compute_environment_string = MagicMock()
|
action_base._compute_environment_string = MagicMock()
|
||||||
|
@ -491,9 +437,10 @@ class TestActionBase(unittest.TestCase):
|
||||||
action_base._fixup_perms2 = MagicMock()
|
action_base._fixup_perms2 = MagicMock()
|
||||||
|
|
||||||
action_base._configure_module.return_value = ('new', '#!/usr/bin/python', 'this is the module data', 'path')
|
action_base._configure_module.return_value = ('new', '#!/usr/bin/python', 'this is the module data', 'path')
|
||||||
action_base._late_needs_tmp_path.return_value = False
|
action_base._is_pipelining_enabled.return_value = False
|
||||||
action_base._compute_environment_string.return_value = ''
|
action_base._compute_environment_string.return_value = ''
|
||||||
action_base._connection.has_pipelining = True
|
action_base._connection.has_pipelining = False
|
||||||
|
action_base._make_tmp_path.return_value = '/the/tmp/path'
|
||||||
action_base._low_level_execute_command.return_value = dict(stdout='{"rc": 0, "stdout": "ok"}')
|
action_base._low_level_execute_command.return_value = dict(stdout='{"rc": 0, "stdout": "ok"}')
|
||||||
self.assertEqual(action_base._execute_module(module_name=None, module_args=None), dict(_ansible_parsed=True, rc=0, stdout="ok", stdout_lines=['ok']))
|
self.assertEqual(action_base._execute_module(module_name=None, module_args=None), dict(_ansible_parsed=True, rc=0, stdout="ok", stdout_lines=['ok']))
|
||||||
self.assertEqual(action_base._execute_module(module_name='foo',
|
self.assertEqual(action_base._execute_module(module_name='foo',
|
||||||
|
@ -503,7 +450,7 @@ class TestActionBase(unittest.TestCase):
|
||||||
|
|
||||||
# test with needing/removing a remote tmp path
|
# test with needing/removing a remote tmp path
|
||||||
action_base._configure_module.return_value = ('old', '#!/usr/bin/python', 'this is the module data', 'path')
|
action_base._configure_module.return_value = ('old', '#!/usr/bin/python', 'this is the module data', 'path')
|
||||||
action_base._late_needs_tmp_path.return_value = True
|
action_base._is_pipelining_enabled.return_value = False
|
||||||
action_base._make_tmp_path.return_value = '/the/tmp/path'
|
action_base._make_tmp_path.return_value = '/the/tmp/path'
|
||||||
self.assertEqual(action_base._execute_module(), dict(_ansible_parsed=True, rc=0, stdout="ok", stdout_lines=['ok']))
|
self.assertEqual(action_base._execute_module(), dict(_ansible_parsed=True, rc=0, stdout="ok", stdout_lines=['ok']))
|
||||||
|
|
||||||
|
@ -516,7 +463,8 @@ class TestActionBase(unittest.TestCase):
|
||||||
|
|
||||||
# test an invalid shebang return
|
# test an invalid shebang return
|
||||||
action_base._configure_module.return_value = ('new', '', 'this is the module data', 'path')
|
action_base._configure_module.return_value = ('new', '', 'this is the module data', 'path')
|
||||||
action_base._late_needs_tmp_path.return_value = False
|
action_base._is_pipelining_enabled.return_value = False
|
||||||
|
action_base._make_tmp_path.return_value = '/the/tmp/path'
|
||||||
self.assertRaises(AnsibleError, action_base._execute_module)
|
self.assertRaises(AnsibleError, action_base._execute_module)
|
||||||
|
|
||||||
# test with check mode enabled, once with support for check
|
# test with check mode enabled, once with support for check
|
||||||
|
|
Loading…
Reference in a new issue