2014-10-02 19:07:05 +02:00
|
|
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
|
2015-11-30 18:20:59 +01:00
|
|
|
#
|
2014-10-02 19:07:05 +02:00
|
|
|
# This file is part of Ansible
|
|
|
|
#
|
|
|
|
# Ansible is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# Ansible is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
2014-10-16 01:22:54 +02:00
|
|
|
# Make coding more python3-ish
|
|
|
|
from __future__ import (absolute_import, division, print_function)
|
|
|
|
__metaclass__ = type
|
|
|
|
|
2015-07-27 04:29:56 +02:00
|
|
|
import base64
|
2014-11-14 23:14:08 +01:00
|
|
|
import json
|
|
|
|
import os
|
|
|
|
import random
|
2016-01-20 21:26:45 +01:00
|
|
|
import re
|
2015-07-27 04:29:56 +02:00
|
|
|
import stat
|
2014-11-14 23:14:08 +01:00
|
|
|
import tempfile
|
|
|
|
import time
|
2015-10-23 01:07:26 +02:00
|
|
|
from abc import ABCMeta, abstractmethod
|
2014-11-14 23:14:08 +01:00
|
|
|
|
|
|
|
from ansible import constants as C
|
2016-12-13 18:14:47 +01:00
|
|
|
from ansible.compat.six import binary_type, string_types, text_type, iteritems, with_metaclass
|
2016-11-17 22:18:29 +01:00
|
|
|
from ansible.compat.six.moves import shlex_quote
|
2015-09-09 21:26:40 +02:00
|
|
|
from ansible.errors import AnsibleError, AnsibleConnectionFailure
|
2015-02-10 21:35:34 +01:00
|
|
|
from ansible.executor.module_common import modify_module
|
2016-09-07 07:54:17 +02:00
|
|
|
from ansible.module_utils._text import to_bytes, to_native, to_text
|
2016-10-02 17:03:42 +02:00
|
|
|
from ansible.module_utils.json_utils import _filter_non_json_lines
|
2014-11-14 23:14:08 +01:00
|
|
|
from ansible.parsing.utils.jsonify import jsonify
|
2016-10-24 20:05:56 +02:00
|
|
|
from ansible.playbook.play_context import MAGIC_VARIABLE_MAPPING
|
2016-09-07 07:54:17 +02:00
|
|
|
from ansible.release import __version__
|
2016-12-13 18:14:47 +01:00
|
|
|
from ansible.vars.unsafe_proxy import wrap_var
|
2016-09-07 07:54:17 +02:00
|
|
|
|
2014-11-14 23:14:08 +01:00
|
|
|
|
2015-07-23 16:24:50 +02:00
|
|
|
try:
|
|
|
|
from __main__ import display
|
|
|
|
except ImportError:
|
|
|
|
from ansible.utils.display import Display
|
|
|
|
display = Display()
|
|
|
|
|
2015-11-11 17:29:37 +01:00
|
|
|
|
2015-10-23 01:07:26 +02:00
|
|
|
class ActionBase(with_metaclass(ABCMeta, object)):
|
2014-11-14 23:14:08 +01:00
|
|
|
|
|
|
|
'''
|
|
|
|
This class is the base class for all action plugins, and defines
|
|
|
|
code common to all actions. The base class handles the connection
|
|
|
|
by putting/getting files and executing commands based on the current
|
|
|
|
action in use.
|
|
|
|
'''
|
|
|
|
|
2015-07-21 18:12:22 +02:00
|
|
|
def __init__(self, task, connection, play_context, loader, templar, shared_loader_obj):
|
2015-05-02 06:48:11 +02:00
|
|
|
self._task = task
|
|
|
|
self._connection = connection
|
2015-07-21 18:12:22 +02:00
|
|
|
self._play_context = play_context
|
2015-05-02 06:48:11 +02:00
|
|
|
self._loader = loader
|
2015-05-04 08:33:10 +02:00
|
|
|
self._templar = templar
|
2015-05-02 06:48:11 +02:00
|
|
|
self._shared_loader_obj = shared_loader_obj
|
2015-11-11 17:29:37 +01:00
|
|
|
# Backwards compat: self._display isn't really needed, just import the global display and use that.
|
2015-07-23 16:24:50 +02:00
|
|
|
self._display = display
|
2014-11-14 23:14:08 +01:00
|
|
|
|
2016-04-20 19:39:12 +02:00
|
|
|
self._cleanup_remote_tmp = False
|
2015-05-11 18:22:41 +02:00
|
|
|
self._supports_check_mode = True
|
2014-11-14 23:14:08 +01:00
|
|
|
|
2015-10-23 01:07:26 +02:00
|
|
|
@abstractmethod
|
|
|
|
def run(self, tmp=None, task_vars=None):
|
|
|
|
""" Action Plugins should implement this method to perform their
|
|
|
|
tasks. Everything else in this base class is a helper method for the
|
|
|
|
action plugin to do that.
|
|
|
|
|
|
|
|
:kwarg tmp: Temporary directory. Sometimes an action plugin sets up
|
|
|
|
a temporary directory and then calls another module. This parameter
|
|
|
|
allows us to reuse the same directory for both.
|
|
|
|
:kwarg task_vars: The variables (host vars, group vars, config vars,
|
|
|
|
etc) associated with this task.
|
|
|
|
:returns: dictionary of results from the module
|
|
|
|
|
|
|
|
Implementors of action modules may find the following variables especially useful:
|
|
|
|
|
|
|
|
* Module parameters. These are stored in self._task.args
|
|
|
|
"""
|
2015-12-19 20:09:20 +01:00
|
|
|
# store the module invocation details into the results
|
2016-09-07 07:54:17 +02:00
|
|
|
results = {}
|
2015-12-19 20:09:20 +01:00
|
|
|
if self._task.async == 0:
|
|
|
|
results['invocation'] = dict(
|
|
|
|
module_name = self._task.action,
|
|
|
|
module_args = self._task.args,
|
|
|
|
)
|
2015-10-23 01:07:26 +02:00
|
|
|
return results
|
|
|
|
|
2016-03-25 16:13:44 +01:00
|
|
|
def _remote_file_exists(self, path):
|
|
|
|
cmd = self._connection._shell.exists(path)
|
|
|
|
result = self._low_level_execute_command(cmd=cmd, sudoable=True)
|
|
|
|
if result['rc'] == 0:
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2015-10-23 01:07:26 +02:00
|
|
|
def _configure_module(self, module_name, module_args, task_vars=None):
|
2014-11-14 23:14:08 +01:00
|
|
|
'''
|
|
|
|
Handles the loading and templating of the module code through the
|
2015-02-10 21:35:34 +01:00
|
|
|
modify_module() function.
|
2014-11-14 23:14:08 +01:00
|
|
|
'''
|
2015-10-23 01:07:26 +02:00
|
|
|
if task_vars is None:
|
|
|
|
task_vars = dict()
|
2014-11-14 23:14:08 +01:00
|
|
|
|
|
|
|
# Search module path(s) for named module.
|
2015-09-10 21:55:59 +02:00
|
|
|
for mod_type in self._connection.module_implementation_preferences:
|
|
|
|
# Check to determine if PowerShell modules are supported, and apply
|
|
|
|
# some fixes (hacks) to module name + args.
|
|
|
|
if mod_type == '.ps1':
|
|
|
|
# win_stat, win_file, and win_copy are not just like their
|
|
|
|
# python counterparts but they are compatible enough for our
|
|
|
|
# internal usage
|
|
|
|
if module_name in ('stat', 'file', 'copy') and self._task.action != module_name:
|
|
|
|
module_name = 'win_%s' % module_name
|
|
|
|
|
|
|
|
# Remove extra quotes surrounding path parameters before sending to module.
|
|
|
|
if module_name in ('win_stat', 'win_file', 'win_copy', 'slurp') and module_args and hasattr(self._connection._shell, '_unquote'):
|
|
|
|
for key in ('src', 'dest', 'path'):
|
|
|
|
if key in module_args:
|
|
|
|
module_args[key] = self._connection._shell._unquote(module_args[key])
|
|
|
|
|
|
|
|
module_path = self._shared_loader_obj.module_loader.find_plugin(module_name, mod_type)
|
|
|
|
if module_path:
|
|
|
|
break
|
2015-12-13 14:54:57 +01:00
|
|
|
else: # This is a for-else: http://bit.ly/1ElPkyg
|
2015-07-24 18:39:54 +02:00
|
|
|
# Use Windows version of ping module to check module paths when
|
2015-10-22 22:03:37 +02:00
|
|
|
# using a connection that supports .ps1 suffixes. We check specifically
|
|
|
|
# for win_ping here, otherwise the code would look for ping.ps1
|
2015-09-10 21:55:59 +02:00
|
|
|
if '.ps1' in self._connection.module_implementation_preferences:
|
2015-07-24 18:39:54 +02:00
|
|
|
ping_module = 'win_ping'
|
|
|
|
else:
|
|
|
|
ping_module = 'ping'
|
2015-09-10 21:55:59 +02:00
|
|
|
module_path2 = self._shared_loader_obj.module_loader.find_plugin(ping_module, self._connection.module_implementation_preferences)
|
2014-11-14 23:14:08 +01:00
|
|
|
if module_path2 is not None:
|
|
|
|
raise AnsibleError("The module %s was not found in configured module paths" % (module_name))
|
|
|
|
else:
|
2015-11-11 17:29:37 +01:00
|
|
|
raise AnsibleError("The module %s was not found in configured module paths. "
|
|
|
|
"Additionally, core modules are missing. If this is a checkout, "
|
2017-01-09 18:11:39 +01:00
|
|
|
"run 'git pull --rebase' to correct this problem." % (module_name))
|
2014-11-14 23:14:08 +01:00
|
|
|
|
|
|
|
# insert shared code and arguments into the module
|
2016-09-07 07:54:17 +02:00
|
|
|
(module_data, module_style, module_shebang) = modify_module(module_name, module_path, module_args,
|
|
|
|
task_vars=task_vars, module_compression=self._play_context.module_compression)
|
2014-11-14 23:14:08 +01:00
|
|
|
|
2016-05-11 22:14:01 +02:00
|
|
|
return (module_style, module_shebang, module_data, module_path)
|
2014-11-14 23:14:08 +01:00
|
|
|
|
|
|
|
def _compute_environment_string(self):
|
|
|
|
'''
|
|
|
|
Builds the environment string to be used when executing the remote task.
|
|
|
|
'''
|
|
|
|
|
2015-07-21 19:52:51 +02:00
|
|
|
final_environment = dict()
|
|
|
|
if self._task.environment is not None:
|
|
|
|
environments = self._task.environment
|
|
|
|
if not isinstance(environments, list):
|
|
|
|
environments = [ environments ]
|
|
|
|
|
2015-12-17 15:44:40 +01:00
|
|
|
# the environments as inherited need to be reversed, to make
|
|
|
|
# sure we merge in the parent's values first so those in the
|
|
|
|
# block then task 'win' in precedence
|
|
|
|
environments.reverse()
|
2015-07-21 19:52:51 +02:00
|
|
|
for environment in environments:
|
2015-08-25 16:15:32 +02:00
|
|
|
if environment is None:
|
|
|
|
continue
|
2015-12-17 15:44:40 +01:00
|
|
|
temp_environment = self._templar.template(environment)
|
|
|
|
if not isinstance(temp_environment, dict):
|
|
|
|
raise AnsibleError("environment must be a dictionary, received %s (%s)" % (temp_environment, type(temp_environment)))
|
2015-10-26 22:04:28 +01:00
|
|
|
# very deliberately using update here instead of combine_vars, as
|
2015-07-21 19:52:51 +02:00
|
|
|
# these environment settings should not need to merge sub-dicts
|
2015-12-17 15:44:40 +01:00
|
|
|
final_environment.update(temp_environment)
|
2015-07-21 19:52:51 +02:00
|
|
|
|
2015-11-17 21:37:18 +01:00
|
|
|
final_environment = self._templar.template(final_environment)
|
2015-07-21 19:52:51 +02:00
|
|
|
return self._connection._shell.env_prefix(**final_environment)
|
2014-11-14 23:14:08 +01:00
|
|
|
|
|
|
|
def _early_needs_tmp_path(self):
|
|
|
|
'''
|
|
|
|
Determines if a temp path should be created before the action is executed.
|
|
|
|
'''
|
|
|
|
|
|
|
|
return getattr(self, 'TRANSFERS_FILES', False)
|
2015-04-13 18:35:20 +02:00
|
|
|
|
2014-11-14 23:14:08 +01:00
|
|
|
def _late_needs_tmp_path(self, tmp, module_style):
|
|
|
|
'''
|
|
|
|
Determines if a temp path is required after some early actions have already taken place.
|
|
|
|
'''
|
|
|
|
if tmp and "tmp" in tmp:
|
|
|
|
# tmp has already been created
|
|
|
|
return False
|
2015-12-03 17:01:05 +01:00
|
|
|
if not self._connection.has_pipelining or not self._play_context.pipelining or C.DEFAULT_KEEP_REMOTE_FILES or self._play_context.become_method == 'su':
|
2014-11-14 23:14:08 +01:00
|
|
|
# tmp is necessary to store the module source code
|
|
|
|
# or we want to keep the files on the target system
|
|
|
|
return True
|
|
|
|
if module_style != "new":
|
|
|
|
# even when conn has pipelining, old style modules need tmp to store arguments
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2016-03-21 22:17:53 +01:00
|
|
|
def _make_tmp_path(self, remote_user):
|
2014-11-14 23:14:08 +01:00
|
|
|
'''
|
|
|
|
Create and return a temporary path on a remote box.
|
|
|
|
'''
|
|
|
|
|
|
|
|
basefile = 'ansible-tmp-%s-%s' % (time.time(), random.randint(0, 2**48))
|
|
|
|
use_system_tmp = False
|
|
|
|
|
2016-03-21 22:17:53 +01:00
|
|
|
if self._play_context.become and self._play_context.become_user not in ('root', remote_user):
|
2014-11-14 23:14:08 +01:00
|
|
|
use_system_tmp = True
|
|
|
|
|
2016-03-21 22:17:53 +01:00
|
|
|
tmp_mode = 0o700
|
2014-11-14 23:14:08 +01:00
|
|
|
|
2017-01-25 19:09:36 +01:00
|
|
|
if use_system_tmp:
|
|
|
|
tmpdir = None
|
|
|
|
else:
|
2017-01-26 21:18:10 +01:00
|
|
|
tmpdir = self._remote_expand_user(C.DEFAULT_REMOTE_TMP, sudoable=False)
|
2017-01-25 19:09:36 +01:00
|
|
|
|
|
|
|
cmd = self._connection._shell.mkdtemp(basefile, use_system_tmp, tmp_mode, tmpdir)
|
2015-09-24 22:29:36 +02:00
|
|
|
result = self._low_level_execute_command(cmd, sudoable=False)
|
2014-11-14 23:14:08 +01:00
|
|
|
|
|
|
|
# error handling on this seems a little aggressive?
|
|
|
|
if result['rc'] != 0:
|
|
|
|
if result['rc'] == 5:
|
|
|
|
output = 'Authentication failure.'
|
2015-04-16 01:32:44 +02:00
|
|
|
elif result['rc'] == 255 and self._connection.transport in ('ssh',):
|
2015-06-06 06:16:35 +02:00
|
|
|
|
2015-07-21 18:12:22 +02:00
|
|
|
if self._play_context.verbosity > 3:
|
2015-10-06 23:10:25 +02:00
|
|
|
output = u'SSH encountered an unknown error. The output was:\n%s%s' % (result['stdout'], result['stderr'])
|
2015-06-06 06:16:35 +02:00
|
|
|
else:
|
2015-11-11 17:29:37 +01:00
|
|
|
output = (u'SSH encountered an unknown error during the connection.'
|
|
|
|
' We recommend you re-run the command using -vvvv, which will enable SSH debugging output to help diagnose the issue')
|
2015-06-06 06:16:35 +02:00
|
|
|
|
2015-10-06 23:10:25 +02:00
|
|
|
elif u'No space left on device' in result['stderr']:
|
2014-11-14 23:14:08 +01:00
|
|
|
output = result['stderr']
|
|
|
|
else:
|
2015-11-11 17:29:37 +01:00
|
|
|
output = ('Authentication or permission failure.'
|
|
|
|
' In some cases, you may have been able to authenticate and did not have permissions on the remote directory.'
|
|
|
|
' Consider changing the remote temp path in ansible.cfg to a path rooted in "/tmp".'
|
|
|
|
' Failed command was: %s, exited with result %d' % (cmd, result['rc']))
|
2015-10-06 23:10:25 +02:00
|
|
|
if 'stdout' in result and result['stdout'] != u'':
|
|
|
|
output = output + u": %s" % result['stdout']
|
2015-09-09 21:26:40 +02:00
|
|
|
raise AnsibleConnectionFailure(output)
|
2014-11-14 23:14:08 +01:00
|
|
|
|
2015-11-30 18:16:33 +01:00
|
|
|
try:
|
2016-04-08 17:18:35 +02:00
|
|
|
stdout_parts = result['stdout'].strip().split('%s=' % basefile, 1)
|
|
|
|
rc = self._connection._shell.join_path(stdout_parts[-1], u'').splitlines()[-1]
|
2015-11-30 18:16:33 +01:00
|
|
|
except IndexError:
|
|
|
|
# stdout was empty or just space, set to / to trigger error in next if
|
|
|
|
rc = '/'
|
2014-11-14 23:14:08 +01:00
|
|
|
|
|
|
|
# Catch failure conditions, files should never be
|
|
|
|
# written to locations in /.
|
|
|
|
if rc == '/':
|
2015-04-15 01:13:27 +02:00
|
|
|
raise AnsibleError('failed to resolve remote temporary directory from %s: `%s` returned empty string' % (basefile, cmd))
|
2014-11-14 23:14:08 +01:00
|
|
|
|
|
|
|
return rc
|
|
|
|
|
2016-09-07 02:38:12 +02:00
|
|
|
def _should_remove_tmp_path(self, tmp_path):
|
|
|
|
'''Determine if temporary path should be deleted or kept by user request/config'''
|
|
|
|
|
|
|
|
return tmp_path and self._cleanup_remote_tmp and not C.DEFAULT_KEEP_REMOTE_FILES and "-tmp-" in tmp_path
|
|
|
|
|
2014-11-14 23:14:08 +01:00
|
|
|
def _remove_tmp_path(self, tmp_path):
|
|
|
|
'''Remove a temporary path we created. '''
|
|
|
|
|
2016-09-07 02:38:12 +02:00
|
|
|
if self._should_remove_tmp_path(tmp_path):
|
2015-06-29 21:41:51 +02:00
|
|
|
cmd = self._connection._shell.remove(tmp_path, recurse=True)
|
2014-11-14 23:14:08 +01:00
|
|
|
# If we have gotten here we have a working ssh configuration.
|
|
|
|
# If ssh breaks we could leave tmp directories out on the remote system.
|
2015-09-24 22:29:36 +02:00
|
|
|
self._low_level_execute_command(cmd, sudoable=False)
|
2014-11-14 23:14:08 +01:00
|
|
|
|
2016-03-21 22:17:53 +01:00
|
|
|
def _transfer_file(self, local_path, remote_path):
|
|
|
|
self._connection.put_file(local_path, remote_path)
|
|
|
|
return remote_path
|
|
|
|
|
2014-11-14 23:14:08 +01:00
|
|
|
def _transfer_data(self, remote_path, data):
|
|
|
|
'''
|
|
|
|
Copies the module data out to the temporary module path.
|
|
|
|
'''
|
|
|
|
|
2015-05-11 18:22:41 +02:00
|
|
|
if isinstance(data, dict):
|
2014-11-14 23:14:08 +01:00
|
|
|
data = jsonify(data)
|
|
|
|
|
|
|
|
afd, afile = tempfile.mkstemp()
|
2016-06-05 01:19:57 +02:00
|
|
|
afo = os.fdopen(afd, 'wb')
|
2014-11-14 23:14:08 +01:00
|
|
|
try:
|
2016-09-07 07:54:17 +02:00
|
|
|
data = to_bytes(data, errors='surrogate_or_strict')
|
2014-11-14 23:14:08 +01:00
|
|
|
afo.write(data)
|
2015-04-13 18:35:20 +02:00
|
|
|
except Exception as e:
|
2016-09-07 07:54:17 +02:00
|
|
|
raise AnsibleError("failure writing module data to temporary file for transfer: %s" % to_native(e))
|
2014-11-14 23:14:08 +01:00
|
|
|
|
|
|
|
afo.flush()
|
|
|
|
afo.close()
|
|
|
|
|
|
|
|
try:
|
2016-03-21 22:17:53 +01:00
|
|
|
self._transfer_file(afile, remote_path)
|
2014-11-14 23:14:08 +01:00
|
|
|
finally:
|
|
|
|
os.unlink(afile)
|
|
|
|
|
|
|
|
return remote_path
|
|
|
|
|
2016-09-07 01:49:59 +02:00
|
|
|
def _fixup_perms(self, remote_path, remote_user, execute=True, recursive=True):
|
|
|
|
"""
|
|
|
|
We need the files we upload to be readable (and sometimes executable)
|
|
|
|
by the user being sudo'd to but we want to limit other people's access
|
|
|
|
(because the files could contain passwords or other private
|
|
|
|
information.
|
|
|
|
|
|
|
|
Deprecated in favor of _fixup_perms2. Ansible code has been updated to
|
|
|
|
use _fixup_perms2. This code is maintained to provide partial support
|
|
|
|
for custom actions (non-recursive mode only).
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
display.deprecated('_fixup_perms is deprecated. Use _fixup_perms2 instead.', version='2.4', removed=False)
|
|
|
|
|
|
|
|
if recursive:
|
|
|
|
raise AnsibleError('_fixup_perms with recursive=True (the default) is no longer supported. ' +
|
|
|
|
'Use _fixup_perms2 if support for previous releases is not required. '
|
|
|
|
'Otherwise use fixup_perms with recursive=False.')
|
|
|
|
|
|
|
|
return self._fixup_perms2([remote_path], remote_user, execute)
|
|
|
|
|
|
|
|
def _fixup_perms2(self, remote_paths, remote_user, execute=True):
|
2016-03-21 22:17:53 +01:00
|
|
|
"""
|
2016-04-22 17:40:34 +02:00
|
|
|
We need the files we upload to be readable (and sometimes executable)
|
|
|
|
by the user being sudo'd to but we want to limit other people's access
|
|
|
|
(because the files could contain passwords or other private
|
|
|
|
information. We achieve this in one of these ways:
|
|
|
|
|
|
|
|
* If no sudo is performed or the remote_user is sudo'ing to
|
2016-06-15 17:16:24 +02:00
|
|
|
themselves, we don't have to change permissions.
|
2016-04-22 17:40:34 +02:00
|
|
|
* If the remote_user sudo's to a privileged user (for instance, root),
|
|
|
|
we don't have to change permissions
|
2016-06-15 17:16:24 +02:00
|
|
|
* If the remote_user sudo's to an unprivileged user then we attempt to
|
|
|
|
grant the unprivileged user access via file system acls.
|
|
|
|
* If granting file system acls fails we try to change the owner of the
|
|
|
|
file with chown which only works in case the remote_user is
|
|
|
|
privileged or the remote systems allows chown calls by unprivileged
|
|
|
|
users (e.g. HP-UX)
|
|
|
|
* If the chown fails we can set the file to be world readable so that
|
|
|
|
the second unprivileged user can read the file.
|
2016-04-22 17:40:34 +02:00
|
|
|
Since this could allow other users to get access to private
|
|
|
|
information we only do this ansible is configured with
|
|
|
|
"allow_world_readable_tmpfiles" in the ansible.cfg
|
2016-03-21 22:17:53 +01:00
|
|
|
"""
|
2016-04-07 16:27:01 +02:00
|
|
|
if self._connection._shell.SHELL_FAMILY == 'powershell':
|
|
|
|
# This won't work on Powershell as-is, so we'll just completely skip until
|
|
|
|
# we have a need for it, at which point we'll have to do something different.
|
2016-08-06 03:40:28 +02:00
|
|
|
return remote_paths
|
2016-03-21 22:17:53 +01:00
|
|
|
|
|
|
|
if self._play_context.become and self._play_context.become_user not in ('root', remote_user):
|
|
|
|
# Unprivileged user that's different than the ssh user. Let's get
|
|
|
|
# to work!
|
2016-04-22 17:40:34 +02:00
|
|
|
|
2016-06-15 17:16:24 +02:00
|
|
|
# Try to use file system acls to make the files readable for sudo'd
|
|
|
|
# user
|
|
|
|
if execute:
|
2016-11-22 21:16:04 +01:00
|
|
|
chmod_mode = 'rx'
|
|
|
|
setfacl_mode = 'r-x'
|
2016-06-15 17:16:24 +02:00
|
|
|
else:
|
2016-11-22 21:16:04 +01:00
|
|
|
chmod_mode = 'rX'
|
2016-11-21 21:09:23 +01:00
|
|
|
### Note: this form fails silently on freebsd. We currently
|
|
|
|
# never call _fixup_perms2() with execute=False but if we
|
|
|
|
# start to we'll have to fix this.
|
2016-11-22 21:16:04 +01:00
|
|
|
setfacl_mode = 'r-X'
|
2016-06-15 17:16:24 +02:00
|
|
|
|
2016-11-22 21:16:04 +01:00
|
|
|
res = self._remote_set_user_facl(remote_paths, self._play_context.become_user, setfacl_mode)
|
2016-06-15 17:16:24 +02:00
|
|
|
if res['rc'] != 0:
|
|
|
|
# File system acls failed; let's try to use chown next
|
|
|
|
# Set executable bit first as on some systems an
|
|
|
|
# unprivileged user can use chown
|
2016-03-21 22:17:53 +01:00
|
|
|
if execute:
|
2016-08-06 03:40:28 +02:00
|
|
|
res = self._remote_chmod(remote_paths, 'u+x')
|
2016-03-29 07:07:14 +02:00
|
|
|
if res['rc'] != 0:
|
2016-11-22 20:19:47 +01:00
|
|
|
raise AnsibleError('Failed to set file mode on remote temporary files (rc: {0}, err: {1})'.format(res['rc'], to_native(res['stderr'])))
|
2016-04-22 17:40:34 +02:00
|
|
|
|
2016-08-06 03:40:28 +02:00
|
|
|
res = self._remote_chown(remote_paths, self._play_context.become_user)
|
2016-06-15 17:16:24 +02:00
|
|
|
if res['rc'] != 0 and remote_user == 'root':
|
|
|
|
# chown failed even if remove_user is root
|
2016-09-07 07:54:17 +02:00
|
|
|
raise AnsibleError('Failed to change ownership of the temporary files Ansible needs to create despite connecting as root.'
|
|
|
|
' Unprivileged become user would be unable to read the file.')
|
2016-06-15 17:16:24 +02:00
|
|
|
elif res['rc'] != 0:
|
2016-03-21 22:17:53 +01:00
|
|
|
if C.ALLOW_WORLD_READABLE_TMPFILES:
|
2016-06-15 17:16:24 +02:00
|
|
|
# chown and fs acls failed -- do things this insecure
|
|
|
|
# way only if the user opted in in the config file
|
2016-09-07 07:54:17 +02:00
|
|
|
display.warning('Using world-readable permissions for temporary files Ansible needs to create when becoming an unprivileged user.'
|
|
|
|
' This may be insecure. For information on securing this, see'
|
|
|
|
' https://docs.ansible.com/ansible/become.html#becoming-an-unprivileged-user')
|
2016-11-22 21:16:04 +01:00
|
|
|
res = self._remote_chmod(remote_paths, 'a+%s' % chmod_mode)
|
2016-03-29 07:07:14 +02:00
|
|
|
if res['rc'] != 0:
|
2016-11-22 20:19:47 +01:00
|
|
|
raise AnsibleError('Failed to set file mode on remote files (rc: {0}, err: {1})'.format(res['rc'], to_native(res['stderr'])))
|
2016-03-21 22:17:53 +01:00
|
|
|
else:
|
2016-09-07 07:54:17 +02:00
|
|
|
raise AnsibleError('Failed to set permissions on the temporary files Ansible needs to create when becoming an unprivileged user'
|
|
|
|
' (rc: {0}, err: {1}). For information on working around this,'
|
2016-11-22 20:19:47 +01:00
|
|
|
' see https://docs.ansible.com/ansible/become.html#becoming-an-unprivileged-user'.format(res['rc'], to_native(res['stderr'])))
|
2016-03-21 22:17:53 +01:00
|
|
|
elif execute:
|
|
|
|
# Can't depend on the file being transferred with execute
|
|
|
|
# permissions. Only need user perms because no become was
|
|
|
|
# used here
|
2016-08-06 03:40:28 +02:00
|
|
|
res = self._remote_chmod(remote_paths, 'u+x')
|
2016-03-29 07:07:14 +02:00
|
|
|
if res['rc'] != 0:
|
2016-11-22 20:19:47 +01:00
|
|
|
raise AnsibleError('Failed to set file mode on remote files (rc: {0}, err: {1})'.format(res['rc'], to_native(res['stderr'])))
|
2016-03-21 22:17:53 +01:00
|
|
|
|
2016-08-06 03:40:28 +02:00
|
|
|
return remote_paths
|
2016-03-21 22:17:53 +01:00
|
|
|
|
2016-08-06 03:40:28 +02:00
|
|
|
def _remote_chmod(self, paths, mode, sudoable=False):
|
2014-11-14 23:14:08 +01:00
|
|
|
'''
|
|
|
|
Issue a remote chmod command
|
|
|
|
'''
|
2016-08-06 03:40:28 +02:00
|
|
|
cmd = self._connection._shell.chmod(paths, mode)
|
2016-03-21 22:17:53 +01:00
|
|
|
res = self._low_level_execute_command(cmd, sudoable=sudoable)
|
|
|
|
return res
|
2014-11-14 23:14:08 +01:00
|
|
|
|
2016-08-06 03:40:28 +02:00
|
|
|
def _remote_chown(self, paths, user, sudoable=False):
|
2016-03-21 22:17:53 +01:00
|
|
|
'''
|
|
|
|
Issue a remote chown command
|
|
|
|
'''
|
2016-08-06 03:40:28 +02:00
|
|
|
cmd = self._connection._shell.chown(paths, user)
|
2016-03-21 22:17:53 +01:00
|
|
|
res = self._low_level_execute_command(cmd, sudoable=sudoable)
|
|
|
|
return res
|
|
|
|
|
2016-08-06 03:40:28 +02:00
|
|
|
def _remote_set_user_facl(self, paths, user, mode, sudoable=False):
|
2016-03-21 22:17:53 +01:00
|
|
|
'''
|
|
|
|
Issue a remote call to setfacl
|
|
|
|
'''
|
2016-08-06 03:40:28 +02:00
|
|
|
cmd = self._connection._shell.set_user_facl(paths, user, mode)
|
2015-09-24 22:29:36 +02:00
|
|
|
res = self._low_level_execute_command(cmd, sudoable=sudoable)
|
2015-02-09 23:54:44 +01:00
|
|
|
return res
|
2014-11-14 23:14:08 +01:00
|
|
|
|
2016-03-10 20:06:41 +01:00
|
|
|
def _execute_remote_stat(self, path, all_vars, follow, tmp=None):
|
2014-11-14 23:14:08 +01:00
|
|
|
'''
|
2016-02-15 23:11:49 +01:00
|
|
|
Get information from remote file.
|
2014-11-14 23:14:08 +01:00
|
|
|
'''
|
2016-02-15 23:11:49 +01:00
|
|
|
module_args=dict(
|
|
|
|
path=path,
|
|
|
|
follow=follow,
|
|
|
|
get_md5=False,
|
|
|
|
get_checksum=True,
|
|
|
|
checksum_algo='sha1',
|
|
|
|
)
|
2016-03-10 20:06:41 +01:00
|
|
|
mystat = self._execute_module(module_name='stat', module_args=module_args, task_vars=all_vars, tmp=tmp, delete_remote_tmp=(tmp is None))
|
2016-02-15 23:11:49 +01:00
|
|
|
|
2016-12-18 19:55:21 +01:00
|
|
|
if mystat.get('failed'):
|
|
|
|
msg = mystat.get('module_stderr')
|
|
|
|
if not msg:
|
|
|
|
msg = mystat.get('module_stdout')
|
|
|
|
if not msg:
|
|
|
|
msg = mystat.get('msg')
|
|
|
|
raise AnsibleError('Failed to get information on remote file (%s): %s' % (path, msg))
|
2016-02-15 23:11:49 +01:00
|
|
|
|
|
|
|
if not mystat['stat']['exists']:
|
|
|
|
# empty might be matched, 1 should never match, also backwards compatible
|
|
|
|
mystat['stat']['checksum'] = '1'
|
2014-11-14 23:14:08 +01:00
|
|
|
|
2016-02-23 21:06:37 +01:00
|
|
|
# happens sometimes when it is a dir and not on bsd
|
2016-09-07 07:54:17 +02:00
|
|
|
if 'checksum' not in mystat['stat']:
|
2016-02-23 21:06:37 +01:00
|
|
|
mystat['stat']['checksum'] = ''
|
2016-12-13 18:14:47 +01:00
|
|
|
elif not isinstance(mystat['stat']['checksum'], string_types):
|
|
|
|
raise AnsibleError("Invalid checksum returned by stat: expected a string type but got %s" % type(mystat['stat']['checksum']))
|
2016-02-23 21:06:37 +01:00
|
|
|
|
2016-02-15 23:11:49 +01:00
|
|
|
return mystat['stat']
|
2015-08-15 18:00:13 +02:00
|
|
|
|
2016-05-26 23:47:11 +02:00
|
|
|
def _remote_checksum(self, path, all_vars, follow=False):
|
2016-02-15 23:11:49 +01:00
|
|
|
'''
|
|
|
|
Produces a remote checksum given a path,
|
|
|
|
Returns a number 0-4 for specific errors instead of checksum, also ensures it is different
|
|
|
|
0 = unknown error
|
|
|
|
1 = file does not exist, this might not be an error
|
|
|
|
2 = permissions issue
|
|
|
|
3 = its a directory, not a file
|
|
|
|
4 = stat module failed, likely due to not finding python
|
|
|
|
'''
|
2016-12-11 03:50:09 +01:00
|
|
|
x = "0" # unknown error has occurred
|
2014-11-14 23:14:08 +01:00
|
|
|
try:
|
2016-05-26 23:47:11 +02:00
|
|
|
remote_stat = self._execute_remote_stat(path, all_vars, follow=follow)
|
2016-02-15 23:11:49 +01:00
|
|
|
if remote_stat['exists'] and remote_stat['isdir']:
|
2016-09-07 07:54:17 +02:00
|
|
|
x = "3" # its a directory not a file
|
2014-11-14 23:14:08 +01:00
|
|
|
else:
|
2016-09-07 07:54:17 +02:00
|
|
|
x = remote_stat['checksum'] # if 1, file is missing
|
2016-02-15 23:11:49 +01:00
|
|
|
except AnsibleError as e:
|
2016-09-07 07:54:17 +02:00
|
|
|
errormsg = to_text(e)
|
|
|
|
if errormsg.endswith(u'Permission denied'):
|
|
|
|
x = "2" # cannot read file
|
|
|
|
elif errormsg.endswith(u'MODULE FAILURE'):
|
|
|
|
x = "4" # python not found or module uncaught exception
|
2016-02-15 23:11:49 +01:00
|
|
|
finally:
|
|
|
|
return x
|
|
|
|
|
2017-01-26 21:18:10 +01:00
|
|
|
def _remote_expand_user(self, path, sudoable=True):
|
2014-11-14 23:14:08 +01:00
|
|
|
''' takes a remote path and performs tilde expansion on the remote host '''
|
2016-09-07 07:54:17 +02:00
|
|
|
if not path.startswith('~'): # FIXME: Windows paths may start with "~ instead of just ~
|
2014-11-14 23:14:08 +01:00
|
|
|
return path
|
|
|
|
|
2015-07-24 18:39:54 +02:00
|
|
|
# FIXME: Can't use os.path.sep for Windows paths.
|
2014-11-14 23:14:08 +01:00
|
|
|
split_path = path.split(os.path.sep, 1)
|
|
|
|
expand_path = split_path[0]
|
2017-01-26 21:18:10 +01:00
|
|
|
if sudoable and expand_path == '~' and self._play_context.become and self._play_context.become_user:
|
|
|
|
expand_path = '~%s' % self._play_context.become_user
|
|
|
|
|
2015-06-29 21:41:51 +02:00
|
|
|
cmd = self._connection._shell.expand_user(expand_path)
|
2015-09-24 22:29:36 +02:00
|
|
|
data = self._low_level_execute_command(cmd, sudoable=False)
|
2014-11-14 23:14:08 +01:00
|
|
|
initial_fragment = data['stdout'].strip().splitlines()[-1]
|
|
|
|
|
|
|
|
if not initial_fragment:
|
|
|
|
# Something went wrong trying to expand the path remotely. Return
|
|
|
|
# the original string
|
|
|
|
return path
|
|
|
|
|
|
|
|
if len(split_path) > 1:
|
2015-06-29 21:41:51 +02:00
|
|
|
return self._connection._shell.join_path(initial_fragment, *split_path[1:])
|
2014-11-14 23:14:08 +01:00
|
|
|
else:
|
|
|
|
return initial_fragment
|
|
|
|
|
2016-01-20 21:26:45 +01:00
|
|
|
def _strip_success_message(self, data):
|
|
|
|
'''
|
|
|
|
Removes the BECOME-SUCCESS message from the data.
|
|
|
|
'''
|
|
|
|
if data.strip().startswith('BECOME-SUCCESS-'):
|
|
|
|
data = re.sub(r'^((\r)?\n)?BECOME-SUCCESS.*(\r)?\n', '', data)
|
|
|
|
return data
|
|
|
|
|
2016-12-19 22:54:31 +01:00
|
|
|
def _update_module_args(self, module_name, module_args, task_vars):
|
2014-11-14 23:14:08 +01:00
|
|
|
|
2015-01-28 15:55:18 +01:00
|
|
|
# set check mode in the module arguments, if required
|
2016-01-07 07:37:19 +01:00
|
|
|
if self._play_context.check_mode:
|
2015-01-28 15:55:18 +01:00
|
|
|
if not self._supports_check_mode:
|
|
|
|
raise AnsibleError("check mode is not supported for this operation")
|
|
|
|
module_args['_ansible_check_mode'] = True
|
2016-01-07 07:37:19 +01:00
|
|
|
else:
|
|
|
|
module_args['_ansible_check_mode'] = False
|
2015-01-28 15:55:18 +01:00
|
|
|
|
|
|
|
# set no log in the module arguments, if required
|
2016-01-07 07:37:19 +01:00
|
|
|
module_args['_ansible_no_log'] = self._play_context.no_log or C.DEFAULT_NO_TARGET_SYSLOG
|
2015-01-28 15:55:18 +01:00
|
|
|
|
2015-09-26 05:57:03 +02:00
|
|
|
# set debug in the module arguments, if required
|
2016-01-07 07:37:19 +01:00
|
|
|
module_args['_ansible_debug'] = C.DEFAULT_DEBUG
|
|
|
|
|
|
|
|
# let module know we are in diff mode
|
|
|
|
module_args['_ansible_diff'] = self._play_context.diff
|
|
|
|
|
|
|
|
# let module know our verbosity
|
2016-05-12 02:54:01 +02:00
|
|
|
module_args['_ansible_verbosity'] = display.verbosity
|
2015-09-26 05:57:03 +02:00
|
|
|
|
2016-05-13 05:30:05 +02:00
|
|
|
# give the module information about the ansible version
|
|
|
|
module_args['_ansible_version'] = __version__
|
|
|
|
|
2016-06-10 17:48:54 +02:00
|
|
|
# give the module information about its name
|
2016-12-19 22:54:31 +01:00
|
|
|
module_args['_ansible_module_name'] = module_name
|
2016-06-10 17:48:54 +02:00
|
|
|
|
2016-05-13 05:30:05 +02:00
|
|
|
# set the syslog facility to be used in the module
|
|
|
|
module_args['_ansible_syslog_facility'] = task_vars.get('ansible_syslog_facility', C.DEFAULT_SYSLOG_FACILITY)
|
|
|
|
|
|
|
|
# let module know about filesystems that selinux treats specially
|
|
|
|
module_args['_ansible_selinux_special_fs'] = C.DEFAULT_SELINUX_SPECIAL_FS
|
|
|
|
|
2016-12-15 21:47:29 +01:00
|
|
|
def _execute_module(self, module_name=None, module_args=None, tmp=None, task_vars=None, persist_files=False, delete_remote_tmp=True):
|
|
|
|
'''
|
|
|
|
Transfer and run a module along with its arguments.
|
|
|
|
'''
|
|
|
|
if task_vars is None:
|
|
|
|
task_vars = dict()
|
|
|
|
|
|
|
|
# if a module name was not specified for this execution, use
|
|
|
|
# the action from the task
|
|
|
|
if module_name is None:
|
|
|
|
module_name = self._task.action
|
|
|
|
if module_args is None:
|
|
|
|
module_args = self._task.args
|
|
|
|
|
|
|
|
# Get the connection user for permission checks
|
|
|
|
remote_user = task_vars.get('ansible_ssh_user') or self._play_context.remote_user
|
|
|
|
|
2016-12-19 22:54:31 +01:00
|
|
|
self._update_module_args(module_name, module_args, task_vars)
|
2016-12-15 21:47:29 +01:00
|
|
|
|
2016-05-11 22:14:01 +02:00
|
|
|
(module_style, shebang, module_data, module_path) = self._configure_module(module_name=module_name, module_args=module_args, task_vars=task_vars)
|
2016-05-20 15:25:20 +02:00
|
|
|
display.vvv("Using module file %s" % module_path)
|
2016-05-11 22:14:01 +02:00
|
|
|
if not shebang and module_style != 'binary':
|
2016-01-12 17:24:43 +01:00
|
|
|
raise AnsibleError("module (%s) is missing interpreter line" % module_name)
|
2014-11-14 23:14:08 +01:00
|
|
|
|
|
|
|
# a remote tmp path may be necessary and not already created
|
|
|
|
remote_module_path = None
|
2015-10-02 18:58:15 +02:00
|
|
|
args_file_path = None
|
2015-01-02 14:51:15 +01:00
|
|
|
if not tmp and self._late_needs_tmp_path(tmp, module_style):
|
2016-03-21 22:17:53 +01:00
|
|
|
tmp = self._make_tmp_path(remote_user)
|
2015-07-24 18:39:54 +02:00
|
|
|
|
2016-09-07 18:12:41 +02:00
|
|
|
if tmp and \
|
|
|
|
(module_style != 'new' or \
|
|
|
|
not self._connection.has_pipelining or \
|
|
|
|
not self._play_context.pipelining or \
|
|
|
|
C.DEFAULT_KEEP_REMOTE_FILES or \
|
|
|
|
self._play_context.become_method == 'su'):
|
2016-01-11 18:07:05 +01:00
|
|
|
remote_module_filename = self._connection._shell.get_remote_filename(module_path)
|
2015-12-01 22:39:02 +01:00
|
|
|
remote_module_path = self._connection._shell.join_path(tmp, remote_module_filename)
|
2016-05-11 22:14:01 +02:00
|
|
|
if module_style in ('old', 'non_native_want_json', 'binary'):
|
2015-10-02 18:58:15 +02:00
|
|
|
# we'll also need a temp file to hold our module arguments
|
|
|
|
args_file_path = self._connection._shell.join_path(tmp, 'args')
|
2014-11-14 23:14:08 +01:00
|
|
|
|
2015-10-02 18:58:15 +02:00
|
|
|
if remote_module_path or module_style != 'new':
|
2016-05-20 15:25:20 +02:00
|
|
|
display.debug("transferring module to remote %s" % remote_module_path)
|
2016-05-11 22:14:01 +02:00
|
|
|
if module_style == 'binary':
|
2016-01-11 18:07:05 +01:00
|
|
|
self._transfer_file(module_path, remote_module_path)
|
2016-01-08 17:34:46 +01:00
|
|
|
else:
|
2016-01-11 18:07:05 +01:00
|
|
|
self._transfer_data(remote_module_path, module_data)
|
2015-10-02 18:58:15 +02:00
|
|
|
if module_style == 'old':
|
|
|
|
# we need to dump the module args to a k=v string in a file on
|
|
|
|
# the remote system, which can be read and parsed by the module
|
|
|
|
args_data = ""
|
|
|
|
for k,v in iteritems(module_args):
|
2016-11-17 22:18:29 +01:00
|
|
|
args_data += '%s=%s ' % (k, shlex_quote(text_type(v)))
|
2015-10-02 18:58:15 +02:00
|
|
|
self._transfer_data(args_file_path, args_data)
|
2016-05-11 22:14:01 +02:00
|
|
|
elif module_style in ('non_native_want_json', 'binary'):
|
2016-01-08 18:37:28 +01:00
|
|
|
self._transfer_data(args_file_path, json.dumps(module_args))
|
2015-11-11 17:29:37 +01:00
|
|
|
display.debug("done transferring module to remote")
|
2014-11-14 23:14:08 +01:00
|
|
|
|
|
|
|
environment_string = self._compute_environment_string()
|
|
|
|
|
2016-08-06 03:40:28 +02:00
|
|
|
remote_files = None
|
|
|
|
|
|
|
|
if args_file_path:
|
|
|
|
remote_files = tmp, remote_module_path, args_file_path
|
|
|
|
elif remote_module_path:
|
|
|
|
remote_files = tmp, remote_module_path
|
|
|
|
|
2016-03-21 22:17:53 +01:00
|
|
|
# Fix permissions of the tmp path and tmp files. This should be
|
|
|
|
# called after all files have been transferred.
|
2016-08-06 03:40:28 +02:00
|
|
|
if remote_files:
|
2016-09-07 01:49:59 +02:00
|
|
|
self._fixup_perms2(remote_files, remote_user)
|
2014-11-14 23:14:08 +01:00
|
|
|
|
|
|
|
cmd = ""
|
|
|
|
in_data = None
|
|
|
|
|
2016-01-15 02:21:15 +01:00
|
|
|
if self._connection.has_pipelining and self._play_context.pipelining and not C.DEFAULT_KEEP_REMOTE_FILES and module_style == 'new':
|
2014-11-14 23:14:08 +01:00
|
|
|
in_data = module_data
|
|
|
|
else:
|
|
|
|
if remote_module_path:
|
|
|
|
cmd = remote_module_path
|
|
|
|
|
|
|
|
rm_tmp = None
|
|
|
|
if tmp and "tmp" in tmp and not C.DEFAULT_KEEP_REMOTE_FILES and not persist_files and delete_remote_tmp:
|
2015-07-21 18:12:22 +02:00
|
|
|
if not self._play_context.become or self._play_context.become_user == 'root':
|
2014-11-14 23:14:08 +01:00
|
|
|
# not sudoing or sudoing to root, so can cleanup files in the same step
|
|
|
|
rm_tmp = tmp
|
|
|
|
|
2015-12-03 17:01:05 +01:00
|
|
|
cmd = self._connection._shell.build_module_command(environment_string, shebang, cmd, arg_path=args_file_path, rm_tmp=rm_tmp)
|
2014-11-14 23:14:08 +01:00
|
|
|
cmd = cmd.strip()
|
|
|
|
|
|
|
|
sudoable = True
|
|
|
|
if module_name == "accelerate":
|
|
|
|
# always run the accelerate module as the user
|
|
|
|
# specified in the play, not the sudo_user
|
|
|
|
sudoable = False
|
|
|
|
|
2015-09-24 22:29:36 +02:00
|
|
|
res = self._low_level_execute_command(cmd, sudoable=sudoable, in_data=in_data)
|
2014-11-14 23:14:08 +01:00
|
|
|
|
|
|
|
if tmp and "tmp" in tmp and not C.DEFAULT_KEEP_REMOTE_FILES and not persist_files and delete_remote_tmp:
|
2015-07-21 18:12:22 +02:00
|
|
|
if self._play_context.become and self._play_context.become_user != 'root':
|
2015-11-11 17:29:37 +01:00
|
|
|
# not sudoing to root, so maybe can't delete files as that other user
|
|
|
|
# have to clean up temp files as original user in a second step
|
2016-03-30 18:45:21 +02:00
|
|
|
tmp_rm_cmd = self._connection._shell.remove(tmp, recurse=True)
|
|
|
|
tmp_rm_res = self._low_level_execute_command(tmp_rm_cmd, sudoable=False)
|
|
|
|
tmp_rm_data = self._parse_returned_data(tmp_rm_res)
|
|
|
|
if tmp_rm_data.get('rc', 0) != 0:
|
2016-09-07 07:54:17 +02:00
|
|
|
display.warning('Error deleting remote temporary files (rc: {0}, stderr: {1})'.format(tmp_rm_res.get('rc'),
|
|
|
|
tmp_rm_res.get('stderr', 'No error string available.')))
|
2014-11-14 23:14:08 +01:00
|
|
|
|
2016-03-30 18:45:21 +02:00
|
|
|
# parse the main result
|
|
|
|
data = self._parse_returned_data(res)
|
|
|
|
|
|
|
|
# pre-split stdout into lines, if stdout is in the data and there
|
|
|
|
# isn't already a stdout_lines value there
|
|
|
|
if 'stdout' in data and 'stdout_lines' not in data:
|
|
|
|
data['stdout_lines'] = data.get('stdout', u'').splitlines()
|
|
|
|
|
|
|
|
display.debug("done with _execute_module (%s, %s)" % (module_name, module_args))
|
|
|
|
return data
|
|
|
|
|
2017-01-12 20:54:40 +01:00
|
|
|
def _remove_internal_keys(self, data):
|
|
|
|
for key in list(data.keys()):
|
|
|
|
if key.startswith('_ansible_') or key in C.INTERNAL_RESULT_KEYS:
|
|
|
|
display.warning("Removed unexpected internal key in module return: %s = %s" % (key, data[key]))
|
|
|
|
del data[key]
|
|
|
|
|
2016-12-13 18:14:47 +01:00
|
|
|
def _clean_returned_data(self, data):
|
|
|
|
remove_keys = set()
|
|
|
|
fact_keys = set(data.keys())
|
|
|
|
# first we add all of our magic variable names to the set of
|
|
|
|
# keys we want to remove from facts
|
|
|
|
for magic_var in MAGIC_VARIABLE_MAPPING:
|
|
|
|
remove_keys.update(fact_keys.intersection(MAGIC_VARIABLE_MAPPING[magic_var]))
|
|
|
|
# next we remove any connection plugin specific vars
|
|
|
|
for conn_path in self._shared_loader_obj.connection_loader.all(path_only=True):
|
|
|
|
try:
|
|
|
|
conn_name = os.path.splitext(os.path.basename(conn_path))[0]
|
|
|
|
re_key = re.compile('^ansible_%s_' % conn_name)
|
|
|
|
for fact_key in fact_keys:
|
|
|
|
if re_key.match(fact_key):
|
|
|
|
remove_keys.add(fact_key)
|
|
|
|
except AttributeError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
# remove some KNOWN keys
|
2017-01-12 20:54:40 +01:00
|
|
|
for hard in C.RESTRICTED_RESULT_KEYS + C.INTERNAL_RESULT_KEYS:
|
2016-12-13 18:14:47 +01:00
|
|
|
if hard in fact_keys:
|
|
|
|
remove_keys.add(hard)
|
|
|
|
|
|
|
|
# finally, we search for interpreter keys to remove
|
|
|
|
re_interp = re.compile('^ansible_.*_interpreter$')
|
|
|
|
for fact_key in fact_keys:
|
|
|
|
if re_interp.match(fact_key):
|
|
|
|
remove_keys.add(fact_key)
|
|
|
|
# then we remove them (except for ssh host keys)
|
|
|
|
for r_key in remove_keys:
|
|
|
|
if not r_key.startswith('ansible_ssh_host_key_'):
|
2017-01-12 20:54:40 +01:00
|
|
|
display.warning("Removed restricted key from module data: %s = %s" % (r_key, data[r_key]))
|
2016-12-13 18:14:47 +01:00
|
|
|
del data[r_key]
|
|
|
|
|
2017-01-12 20:54:40 +01:00
|
|
|
self._remove_internal_keys(data)
|
|
|
|
|
2016-03-30 18:45:21 +02:00
|
|
|
def _parse_returned_data(self, res):
|
2015-04-21 16:48:13 +02:00
|
|
|
try:
|
2016-10-02 17:03:42 +02:00
|
|
|
filtered_output, warnings = _filter_non_json_lines(res.get('stdout', u''))
|
|
|
|
for w in warnings:
|
|
|
|
display.warning(w)
|
2017-01-12 20:54:40 +01:00
|
|
|
|
2016-10-02 17:03:42 +02:00
|
|
|
data = json.loads(filtered_output)
|
2017-01-12 20:54:40 +01:00
|
|
|
self._remove_internal_keys(data)
|
2016-08-16 17:59:30 +02:00
|
|
|
data['_ansible_parsed'] = True
|
2017-01-12 20:54:40 +01:00
|
|
|
|
2016-10-24 20:05:56 +02:00
|
|
|
if 'ansible_facts' in data and isinstance(data['ansible_facts'], dict):
|
2016-12-13 18:14:47 +01:00
|
|
|
self._clean_returned_data(data['ansible_facts'])
|
|
|
|
data['ansible_facts'] = wrap_var(data['ansible_facts'])
|
2015-04-21 16:48:13 +02:00
|
|
|
except ValueError:
|
|
|
|
# not valid json, lets try to capture error
|
2016-08-16 17:59:30 +02:00
|
|
|
data = dict(failed=True, _ansible_parsed=False)
|
2016-03-16 04:31:40 +01:00
|
|
|
data['msg'] = "MODULE FAILURE"
|
|
|
|
data['module_stdout'] = res.get('stdout', u'')
|
|
|
|
if 'stderr' in res:
|
|
|
|
data['module_stderr'] = res['stderr']
|
|
|
|
if res['stderr'].startswith(u'Traceback'):
|
|
|
|
data['exception'] = res['stderr']
|
2016-12-07 17:00:11 +01:00
|
|
|
if 'rc' in res:
|
|
|
|
data['rc'] = res['rc']
|
2014-11-14 23:14:08 +01:00
|
|
|
return data
|
|
|
|
|
2016-09-07 07:54:17 +02:00
|
|
|
def _low_level_execute_command(self, cmd, sudoable=True, in_data=None, executable=None, encoding_errors='surrogate_or_replace'):
|
2014-11-14 23:14:08 +01:00
|
|
|
'''
|
|
|
|
This is the function which executes the low level shell command, which
|
|
|
|
may be commands to create/remove directories for temporary files, or to
|
|
|
|
run the module code or python directly when pipelining.
|
2015-10-06 23:10:25 +02:00
|
|
|
|
|
|
|
:kwarg encoding_errors: If the value returned by the command isn't
|
|
|
|
utf-8 then we have to figure out how to transform it to unicode.
|
|
|
|
If the value is just going to be displayed to the user (or
|
|
|
|
discarded) then the default of 'replace' is fine. If the data is
|
|
|
|
used as a key or is going to be written back out to a file
|
|
|
|
verbatim, then this won't work. May have to use some sort of
|
|
|
|
replacement strategy (python3 could use surrogateescape)
|
2014-11-14 23:14:08 +01:00
|
|
|
'''
|
2015-12-22 20:12:41 +01:00
|
|
|
|
2015-12-11 02:41:48 +01:00
|
|
|
display.debug("_low_level_execute_command(): starting")
|
2014-11-14 23:14:08 +01:00
|
|
|
if not cmd:
|
|
|
|
# this can happen with powershell modules when there is no analog to a Windows command (like chmod)
|
2015-12-11 02:41:48 +01:00
|
|
|
display.debug("_low_level_execute_command(): no command, exiting")
|
2016-03-30 16:15:04 +02:00
|
|
|
return dict(stdout='', stderr='', rc=254)
|
2014-11-14 23:14:08 +01:00
|
|
|
|
2015-09-17 14:59:22 +02:00
|
|
|
allow_same_user = C.BECOME_ALLOW_SAME_USER
|
|
|
|
same_user = self._play_context.become_user == self._play_context.remote_user
|
|
|
|
if sudoable and self._play_context.become and (allow_same_user or not same_user):
|
2015-12-11 02:41:48 +01:00
|
|
|
display.debug("_low_level_execute_command(): using become for this command")
|
2015-07-21 18:12:22 +02:00
|
|
|
cmd = self._play_context.make_become_cmd(cmd, executable=executable)
|
2015-06-24 18:12:54 +02:00
|
|
|
|
2016-03-12 02:38:38 +01:00
|
|
|
if self._connection.allow_executable:
|
|
|
|
if executable is None:
|
|
|
|
executable = self._play_context.executable
|
2016-05-19 19:33:17 +02:00
|
|
|
# mitigation for SSH race which can drop stdout (https://github.com/ansible/ansible/issues/13876)
|
|
|
|
# only applied for the default executable to avoid interfering with the raw action
|
|
|
|
cmd = self._connection._shell.append_command(cmd, 'sleep 0')
|
2016-03-28 17:02:27 +02:00
|
|
|
if executable:
|
2016-11-17 22:18:29 +01:00
|
|
|
cmd = executable + ' -c ' + shlex_quote(cmd)
|
2016-02-10 01:05:41 +01:00
|
|
|
|
2015-12-11 02:41:48 +01:00
|
|
|
display.debug("_low_level_execute_command(): executing: %s" % (cmd,))
|
2016-07-25 14:11:45 +02:00
|
|
|
|
2016-10-03 14:42:32 +02:00
|
|
|
# Change directory to basedir of task for command execution when connection is local
|
|
|
|
if self._connection.transport == 'local':
|
|
|
|
cwd = os.getcwd()
|
2016-10-04 20:23:57 +02:00
|
|
|
os.chdir(self._loader.get_basedir())
|
2016-07-25 14:11:45 +02:00
|
|
|
try:
|
|
|
|
rc, stdout, stderr = self._connection.exec_command(cmd, in_data=in_data, sudoable=sudoable)
|
|
|
|
finally:
|
2016-10-03 14:42:32 +02:00
|
|
|
if self._connection.transport == 'local':
|
|
|
|
os.chdir(cwd)
|
2014-11-14 23:14:08 +01:00
|
|
|
|
2015-10-01 16:28:56 +02:00
|
|
|
# stdout and stderr may be either a file-like or a bytes object.
|
|
|
|
# Convert either one to a text type
|
|
|
|
if isinstance(stdout, binary_type):
|
2016-09-07 07:54:17 +02:00
|
|
|
out = to_text(stdout, errors=encoding_errors)
|
2015-10-01 16:28:56 +02:00
|
|
|
elif not isinstance(stdout, text_type):
|
2016-09-07 07:54:17 +02:00
|
|
|
out = to_text(b''.join(stdout.readlines()), errors=encoding_errors)
|
2014-11-14 23:14:08 +01:00
|
|
|
else:
|
|
|
|
out = stdout
|
|
|
|
|
2015-10-01 16:28:56 +02:00
|
|
|
if isinstance(stderr, binary_type):
|
2016-09-07 07:54:17 +02:00
|
|
|
err = to_text(stderr, errors=encoding_errors)
|
2015-10-01 16:28:56 +02:00
|
|
|
elif not isinstance(stderr, text_type):
|
2016-09-07 07:54:17 +02:00
|
|
|
err = to_text(b''.join(stderr.readlines()), errors=encoding_errors)
|
2014-11-14 23:14:08 +01:00
|
|
|
else:
|
|
|
|
err = stderr
|
|
|
|
|
2015-06-06 06:16:35 +02:00
|
|
|
if rc is None:
|
|
|
|
rc = 0
|
|
|
|
|
2016-02-09 17:18:09 +01:00
|
|
|
# be sure to remove the BECOME-SUCCESS message now
|
|
|
|
out = self._strip_success_message(out)
|
2015-12-11 02:41:48 +01:00
|
|
|
|
2016-09-17 01:23:42 +02:00
|
|
|
display.debug(u"_low_level_execute_command() done: rc=%d, stdout=%s, stderr=%s" % (rc, out, err))
|
2015-07-24 18:39:54 +02:00
|
|
|
return dict(rc=rc, stdout=out, stdout_lines=out.splitlines(), stderr=err)
|
2015-07-16 01:47:59 +02:00
|
|
|
|
2015-09-24 22:29:36 +02:00
|
|
|
def _get_diff_data(self, destination, source, task_vars, source_file=True):
|
2015-07-27 04:29:56 +02:00
|
|
|
|
|
|
|
diff = {}
|
2015-11-11 17:29:37 +01:00
|
|
|
display.debug("Going to peek to see if file has changed permissions")
|
2015-07-27 04:29:56 +02:00
|
|
|
peek_result = self._execute_module(module_name='file', module_args=dict(path=destination, diff_peek=True), task_vars=task_vars, persist_files=True)
|
|
|
|
|
2016-12-07 17:00:11 +01:00
|
|
|
if not peek_result.get('failed', False) or peek_result.get('rc', 0) == 0:
|
2015-07-27 04:29:56 +02:00
|
|
|
|
2017-01-05 00:04:34 +01:00
|
|
|
if peek_result.get('state') == 'absent':
|
2015-07-27 04:29:56 +02:00
|
|
|
diff['before'] = ''
|
2017-01-05 00:04:34 +01:00
|
|
|
elif peek_result.get('appears_binary'):
|
2015-07-27 04:29:56 +02:00
|
|
|
diff['dst_binary'] = 1
|
2017-01-05 00:04:34 +01:00
|
|
|
elif peek_result.get('size') and C.MAX_FILE_SIZE_FOR_DIFF > 0 and peek_result['size'] > C.MAX_FILE_SIZE_FOR_DIFF:
|
2015-07-27 04:29:56 +02:00
|
|
|
diff['dst_larger'] = C.MAX_FILE_SIZE_FOR_DIFF
|
|
|
|
else:
|
2015-11-11 17:29:37 +01:00
|
|
|
display.debug("Slurping the file %s" % source)
|
2015-07-27 04:29:56 +02:00
|
|
|
dest_result = self._execute_module(module_name='slurp', module_args=dict(path=destination), task_vars=task_vars, persist_files=True)
|
|
|
|
if 'content' in dest_result:
|
|
|
|
dest_contents = dest_result['content']
|
|
|
|
if dest_result['encoding'] == 'base64':
|
|
|
|
dest_contents = base64.b64decode(dest_contents)
|
|
|
|
else:
|
|
|
|
raise AnsibleError("unknown encoding in content option, failed: %s" % dest_result)
|
|
|
|
diff['before_header'] = destination
|
|
|
|
diff['before'] = dest_contents
|
|
|
|
|
|
|
|
if source_file:
|
2016-02-10 18:05:45 +01:00
|
|
|
st = os.stat(source)
|
2016-02-10 17:23:49 +01:00
|
|
|
if C.MAX_FILE_SIZE_FOR_DIFF > 0 and st[stat.ST_SIZE] > C.MAX_FILE_SIZE_FOR_DIFF:
|
2015-07-27 04:29:56 +02:00
|
|
|
diff['src_larger'] = C.MAX_FILE_SIZE_FOR_DIFF
|
|
|
|
else:
|
2016-02-10 18:05:45 +01:00
|
|
|
display.debug("Reading local copy of the file %s" % source)
|
|
|
|
try:
|
|
|
|
src = open(source)
|
|
|
|
src_contents = src.read()
|
|
|
|
except Exception as e:
|
|
|
|
raise AnsibleError("Unexpected error while reading source (%s) for diff: %s " % (source, str(e)))
|
2016-02-10 17:23:49 +01:00
|
|
|
|
2016-02-10 18:05:45 +01:00
|
|
|
if "\x00" in src_contents:
|
|
|
|
diff['src_binary'] = 1
|
|
|
|
else:
|
|
|
|
diff['after_header'] = source
|
|
|
|
diff['after'] = src_contents
|
2015-07-27 04:29:56 +02:00
|
|
|
else:
|
2015-11-11 17:29:37 +01:00
|
|
|
display.debug("source of file passed in")
|
2015-07-27 04:29:56 +02:00
|
|
|
diff['after_header'] = 'dynamically generated'
|
|
|
|
diff['after'] = source
|
|
|
|
|
2016-02-09 07:05:46 +01:00
|
|
|
if self._play_context.no_log:
|
|
|
|
if 'before' in diff:
|
|
|
|
diff["before"] = ""
|
|
|
|
if 'after' in diff:
|
2016-02-11 09:09:24 +01:00
|
|
|
diff["after"] = " [[ Diff output has been hidden because 'no_log: true' was specified for this result ]]\n"
|
2016-02-04 21:44:45 +01:00
|
|
|
|
2015-07-27 04:29:56 +02:00
|
|
|
return diff
|
2016-06-28 23:23:30 +02:00
|
|
|
|
|
|
|
def _find_needle(self, dirname, needle):
|
|
|
|
'''
|
|
|
|
find a needle in haystack of paths, optionally using 'dirname' as a subdir.
|
|
|
|
This will build the ordered list of paths to search and pass them to dwim
|
|
|
|
to get back the first existing file found.
|
|
|
|
'''
|
|
|
|
|
2016-10-17 16:32:28 +02:00
|
|
|
# dwim already deals with playbook basedirs
|
2016-07-13 16:06:34 +02:00
|
|
|
path_stack = self._task.get_search_path()
|
2016-06-28 23:23:30 +02:00
|
|
|
|
|
|
|
result = self._loader.path_dwim_relative_stack(path_stack, dirname, needle)
|
|
|
|
|
|
|
|
if result is None:
|
2016-09-07 07:54:17 +02:00
|
|
|
raise AnsibleError("Unable to find '%s' in expected paths." % to_native(needle))
|
2016-06-28 23:23:30 +02:00
|
|
|
|
|
|
|
return result
|