mirror of
https://github.com/ansible-collections/community.general.git
synced 2024-09-14 20:13:21 +02:00
Temporary (#31677)
* allow shells to have per host options, remote_tmp added language to shell removed module lang setting from general as plugins have it now use get to avoid bad powershell plugin more resilient tmp discovery, fall back to `pwd` add shell to docs fixed options for when frags are only options added shell set ops in t_e and fixed option frags normalize tmp dir usag4e - pass tmpdir/tmp/temp options as env var to commands, making it default for tempfile - adjusted ansiballz tmpdir - default local tempfile usage to the configured local tmp - set env temp in action add options to powershell shift temporary to internal envvar/params ensure tempdir is set if we pass var ensure basic and url use expected tempdir ensure localhost uses local tmp give /var/tmp priority, less perms issues more consistent tempfile mgmt for ansiballz made async_dir configurable better action handling, allow for finally rm tmp fixed tmp issue and no more tempdir in ballz hostvarize world readable and admin users always set shell tempdir added comment to discourage use of exception/flow control * Mostly revert expand_user as it's not quite working. This was an additional feature anyhow. Kept the use of pwd as a fallback but moved it to a second ssh connection. This is not optimal but getting that to work in a single ssh connection was part of the problem holding this up. (cherry picked from commit 395b714120522f15e4c90a346f5e8e8d79213aca) * fixed script and other action plugins ensure tmpdir deletion allow for connections that don't support new options (legacy, 3rd party) fixed tests
This commit is contained in:
parent
eca3fcd214
commit
bbd6b8bb42
44 changed files with 1010 additions and 972 deletions
|
@ -49,6 +49,7 @@ from six import iteritems, string_types
|
|||
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.module_utils._text import to_bytes
|
||||
from ansible.plugins.loader import fragment_loader
|
||||
from ansible.utils import plugin_docs
|
||||
from ansible.utils.display import Display
|
||||
|
||||
|
@ -235,7 +236,7 @@ def get_plugin_info(module_dir, limit_to=None, verbose=False):
|
|||
primary_category = module_categories[0]
|
||||
|
||||
# use ansible core library to parse out doc metadata YAML and plaintext examples
|
||||
doc, examples, returndocs, metadata = plugin_docs.get_docstring(module_path, verbose=verbose)
|
||||
doc, examples, returndocs, metadata = plugin_docs.get_docstring(module_path, fragment_loader, verbose=verbose)
|
||||
|
||||
# save all the information
|
||||
module_info[module] = {'path': module_path,
|
||||
|
|
|
@ -22,7 +22,7 @@ and guidelines:
|
|||
|
||||
* In the event of failure, a key of 'failed' should be included, along with a string explanation in 'msg'. Modules that raise tracebacks (stacktraces) are generally considered 'poor' modules, though Ansible can deal with these returns and will automatically convert anything unparseable into a failed result. If you are using the AnsibleModule common Python code, the 'failed' element will be included for you automatically when you call 'fail_json'.
|
||||
|
||||
* Return codes from modules are actually not significant, but continue on with 0=success and non-zero=failure for reasons of future proofing.
|
||||
* Return codes from modules are used if 'failed' is missing, 0=success and non-zero=failure.
|
||||
|
||||
* As results from many hosts will be aggregated at once, modules should return only relevant output. Returning the entire contents of a log file is generally bad form.
|
||||
|
||||
|
@ -194,5 +194,4 @@ Avoid creating a module that does the work of other modules; this leads to code
|
|||
|
||||
Avoid creating 'caches'. Ansible is designed without a central server or authority, so you cannot guarantee it will not run with different permissions, options or locations. If you need a central authority, have it on top of Ansible (for example, using bastion/cm/ci server or tower); do not try to build it into modules.
|
||||
|
||||
Always use the hacking/test-module script when developing modules and it will warn
|
||||
you about these kind of things.
|
||||
Always use the hacking/test-module script when developing modules and it will warn you about these kind of things.
|
||||
|
|
|
@ -44,7 +44,7 @@ from ansible.module_utils._text import to_native, to_text
|
|||
from ansible.module_utils.parsing.convert_bool import boolean
|
||||
from ansible.parsing.splitter import parse_kv
|
||||
from ansible.playbook.play import Play
|
||||
from ansible.plugins.loader import module_loader
|
||||
from ansible.plugins.loader import module_loader, fragment_loader
|
||||
from ansible.utils import plugin_docs
|
||||
from ansible.utils.color import stringc
|
||||
|
||||
|
@ -356,7 +356,7 @@ class ConsoleCLI(CLI, cmd.Cmd):
|
|||
if module_name in self.modules:
|
||||
in_path = module_loader.find_plugin(module_name)
|
||||
if in_path:
|
||||
oc, a, _, _ = plugin_docs.get_docstring(in_path)
|
||||
oc, a, _, _ = plugin_docs.get_docstring(in_path, fragment_loader)
|
||||
if oc:
|
||||
display.display(oc['short_description'])
|
||||
display.display('Parameters:')
|
||||
|
@ -388,7 +388,7 @@ class ConsoleCLI(CLI, cmd.Cmd):
|
|||
|
||||
def module_args(self, module_name):
|
||||
in_path = module_loader.find_plugin(module_name)
|
||||
oc, a, _, _ = plugin_docs.get_docstring(in_path)
|
||||
oc, a, _, _ = plugin_docs.get_docstring(in_path, fragment_loader)
|
||||
return list(oc['options'].keys())
|
||||
|
||||
def run(self):
|
||||
|
|
|
@ -29,8 +29,9 @@ from ansible.module_utils._text import to_native
|
|||
from ansible.module_utils.six import string_types
|
||||
from ansible.parsing.yaml.dumper import AnsibleDumper
|
||||
from ansible.plugins.loader import module_loader, action_loader, lookup_loader, callback_loader, cache_loader, \
|
||||
vars_loader, connection_loader, strategy_loader, inventory_loader
|
||||
from ansible.utils import plugin_docs
|
||||
vars_loader, connection_loader, strategy_loader, inventory_loader, shell_loader, fragment_loader
|
||||
from ansible.utils.plugin_docs import BLACKLIST, get_docstring
|
||||
|
||||
try:
|
||||
from __main__ import display
|
||||
except ImportError:
|
||||
|
@ -71,7 +72,7 @@ class DocCLI(CLI):
|
|||
help='**For internal testing only** Show documentation for all plugins.')
|
||||
self.parser.add_option("-t", "--type", action="store", default='module', dest='type', type='choice',
|
||||
help='Choose which plugin type (defaults to "module")',
|
||||
choices=['cache', 'callback', 'connection', 'inventory', 'lookup', 'module', 'strategy', 'vars'])
|
||||
choices=['cache', 'callback', 'connection', 'inventory', 'lookup', 'module', 'shell', 'strategy', 'vars'])
|
||||
|
||||
super(DocCLI, self).parse()
|
||||
|
||||
|
@ -101,6 +102,8 @@ class DocCLI(CLI):
|
|||
loader = vars_loader
|
||||
elif plugin_type == 'inventory':
|
||||
loader = inventory_loader
|
||||
elif plugin_type == 'shell':
|
||||
loader = shell_loader
|
||||
else:
|
||||
loader = module_loader
|
||||
|
||||
|
@ -146,7 +149,6 @@ class DocCLI(CLI):
|
|||
# process command line list
|
||||
text = ''
|
||||
for plugin in self.args:
|
||||
|
||||
try:
|
||||
# if the plugin lives in a non-python file (eg, win_X.ps1), require the corresponding python file for docs
|
||||
filename = loader.find_plugin(plugin, mod_type='.py', ignore_deprecated=True, check_aliases=True)
|
||||
|
@ -158,7 +160,7 @@ class DocCLI(CLI):
|
|||
continue
|
||||
|
||||
try:
|
||||
doc, plainexamples, returndocs, metadata = plugin_docs.get_docstring(filename, verbose=(self.options.verbosity > 0))
|
||||
doc, plainexamples, returndocs, metadata = get_docstring(filename, fragment_loader, verbose=(self.options.verbosity > 0))
|
||||
except:
|
||||
display.vvv(traceback.format_exc())
|
||||
display.error("%s %s has a documentation error formatting or is missing documentation." % (plugin_type, plugin))
|
||||
|
@ -229,7 +231,7 @@ class DocCLI(CLI):
|
|||
plugin = os.path.splitext(plugin)[0] # removes the extension
|
||||
plugin = plugin.lstrip('_') # remove underscore from deprecated plugins
|
||||
|
||||
if plugin not in plugin_docs.BLACKLIST.get(bkey, ()):
|
||||
if plugin not in BLACKLIST.get(bkey, ()):
|
||||
self.plugin_list.add(plugin)
|
||||
display.vvvv("Added %s" % plugin)
|
||||
|
||||
|
@ -254,7 +256,7 @@ class DocCLI(CLI):
|
|||
|
||||
doc = None
|
||||
try:
|
||||
doc, plainexamples, returndocs, metadata = plugin_docs.get_docstring(filename)
|
||||
doc, plainexamples, returndocs, metadata = get_docstring(filename, fragment_loader)
|
||||
except:
|
||||
display.warning("%s has a documentation formatting error" % plugin)
|
||||
|
||||
|
|
|
@ -1,18 +1,6 @@
|
|||
# Copyright (c) 2017 Ansible Project
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
---
|
||||
ALLOW_WORLD_READABLE_TMPFILES:
|
||||
name: Allow world readable temporary files
|
||||
default: False
|
||||
description:
|
||||
- This makes the temporary files created on the machine to be world readable and will issue a warning instead of failing the task.
|
||||
- It is useful when becoming an unprivileged user.
|
||||
env: []
|
||||
ini:
|
||||
- {key: allow_world_readable_tmpfiles, section: defaults}
|
||||
type: boolean
|
||||
yaml: {key: defaults.allow_world_readable_tmpfiles}
|
||||
version_added: "2.1"
|
||||
ANSIBLE_COW_SELECTION:
|
||||
name: Cowsay filter selection
|
||||
default: default
|
||||
|
@ -744,15 +732,6 @@ DEFAULT_MODULE_COMPRESSION:
|
|||
- {key: module_compression, section: defaults}
|
||||
# vars:
|
||||
# - name: ansible_module_compression
|
||||
DEFAULT_MODULE_LANG:
|
||||
name: Target language environment
|
||||
default: "{{CONTROLER_LANG}}"
|
||||
description: "Language locale setting to use for modules when they execute on the target, if empty it defaults to 'en_US.UTF-8'"
|
||||
env: [{name: ANSIBLE_MODULE_LANG}]
|
||||
ini:
|
||||
- {key: module_lang, section: defaults}
|
||||
# vars:
|
||||
# - name: ansible_module_lang
|
||||
DEFAULT_MODULE_NAME:
|
||||
name: Default adhoc module
|
||||
default: command
|
||||
|
@ -768,16 +747,6 @@ DEFAULT_MODULE_PATH:
|
|||
ini:
|
||||
- {key: library, section: defaults}
|
||||
type: pathspec
|
||||
DEFAULT_MODULE_SET_LOCALE:
|
||||
name: Target locale
|
||||
default: False
|
||||
description: Controls if we set locale for modules when executing on the target.
|
||||
env: [{name: ANSIBLE_MODULE_SET_LOCALE}]
|
||||
ini:
|
||||
- {key: module_set_locale, section: defaults}
|
||||
type: boolean
|
||||
# vars:
|
||||
# - name: ansible_module_locale
|
||||
DEFAULT_MODULE_UTILS_PATH:
|
||||
name: Module Utils Path
|
||||
description: Colon separated paths in which Ansible will search for Module utils files, which are shared by modules.
|
||||
|
@ -851,17 +820,6 @@ DEFAULT_REMOTE_PORT:
|
|||
- {key: remote_port, section: defaults}
|
||||
type: integer
|
||||
yaml: {key: defaults.remote_port}
|
||||
DEFAULT_REMOTE_TMP:
|
||||
name: Target temporary directory
|
||||
default: ~/.ansible/tmp
|
||||
description:
|
||||
- Temporary directory to use on targets when executing tasks.
|
||||
- In some cases Ansible may still choose to use a system temporary dir to avoid permission issues.
|
||||
env: [{name: ANSIBLE_REMOTE_TEMP}]
|
||||
ini:
|
||||
- {key: remote_tmp, section: defaults}
|
||||
vars:
|
||||
- name: ansible_remote_tmp
|
||||
DEFAULT_REMOTE_USER:
|
||||
name: Login/Remote User
|
||||
default:
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import os # used to set lang and for backwards compat get_config
|
||||
import os
|
||||
|
||||
from ast import literal_eval
|
||||
from jinja2 import Template
|
||||
|
@ -114,7 +114,6 @@ MAGIC_VARIABLE_MAPPING = dict(
|
|||
module_compression=('ansible_module_compression', ),
|
||||
shell=('ansible_shell_type', ),
|
||||
executable=('ansible_shell_executable', ),
|
||||
remote_tmp_dir=('ansible_remote_tmp', ),
|
||||
|
||||
# connection common
|
||||
remote_addr=('ansible_ssh_host', 'ansible_host'),
|
||||
|
|
|
@ -252,11 +252,38 @@ class AnsibleFileNotFound(AnsibleRuntimeError):
|
|||
suppress_extended_error=suppress_extended_error, orig_exc=orig_exc)
|
||||
|
||||
|
||||
class AnsibleActionSkip(AnsibleRuntimeError):
|
||||
# These Exceptions are temporary, using them as flow control until we can get a better solution.
|
||||
# DO NOT USE as they will probably be removed soon.
|
||||
class AnsibleAction(AnsibleRuntimeError):
|
||||
''' Base Exception for Action plugin flow control '''
|
||||
|
||||
def __init__(self, message="", obj=None, show_content=True, suppress_extended_error=False, orig_exc=None, result=None):
|
||||
|
||||
super(AnsibleAction, self).__init__(message=message, obj=obj, show_content=show_content,
|
||||
suppress_extended_error=suppress_extended_error, orig_exc=orig_exc)
|
||||
if result is None:
|
||||
self.result = {}
|
||||
else:
|
||||
self.result = result
|
||||
|
||||
|
||||
class AnsibleActionSkip(AnsibleAction):
|
||||
''' an action runtime skip'''
|
||||
pass
|
||||
|
||||
def __init__(self, message="", obj=None, show_content=True, suppress_extended_error=False, orig_exc=None, result=None):
|
||||
super(AnsibleActionSkip, self).__init__(message=message, obj=obj, show_content=show_content,
|
||||
suppress_extended_error=suppress_extended_error, orig_exc=orig_exc, result=result)
|
||||
self.result.update({'skipped': True, 'msg': message})
|
||||
|
||||
|
||||
class AnsibleActionFail(AnsibleRuntimeError):
|
||||
class AnsibleActionFail(AnsibleAction):
|
||||
''' an action runtime failure'''
|
||||
def __init__(self, message="", obj=None, show_content=True, suppress_extended_error=False, orig_exc=None, result=None):
|
||||
super(AnsibleActionFail, self).__init__(message=message, obj=obj, show_content=show_content,
|
||||
suppress_extended_error=suppress_extended_error, orig_exc=orig_exc, result=result)
|
||||
self.result.update({'failed': True, 'msg': message})
|
||||
|
||||
|
||||
class AnsibleActionDone(AnsibleAction):
|
||||
''' an action runtime early exit'''
|
||||
pass
|
||||
|
|
|
@ -486,6 +486,7 @@ class TaskExecutor:
|
|||
self._connection._play_context = self._play_context
|
||||
|
||||
self._set_connection_options(variables, templar)
|
||||
self._set_shell_options(variables, templar)
|
||||
|
||||
# get handler
|
||||
self._handler = self._get_action_handler(connection=self._connection, templar=templar)
|
||||
|
@ -774,6 +775,15 @@ class TaskExecutor:
|
|||
|
||||
# set options with 'templated vars' specific to this plugin
|
||||
self._connection.set_options(var_options=options)
|
||||
self._set_shell_options(final_vars, templar)
|
||||
|
||||
def _set_shell_options(self, variables, templar):
|
||||
option_vars = C.config.get_plugin_vars('shell', self._connection._shell._load_name)
|
||||
options = {}
|
||||
for k in option_vars:
|
||||
if k in variables:
|
||||
options[k] = templar.template(variables[k])
|
||||
self._connection._shell.set_options(var_options=options)
|
||||
|
||||
def _get_action_handler(self, connection, templar):
|
||||
'''
|
||||
|
|
|
@ -97,6 +97,7 @@ class InventoryData(object):
|
|||
'You can correct this by setting ansible_python_interpreter for localhost')
|
||||
new_host.set_variable("ansible_python_interpreter", py_interp)
|
||||
new_host.set_variable("ansible_connection", 'local')
|
||||
new_host.set_variable("ansible_remote_tmp", C.DEFAULT_LOCAL_TMP)
|
||||
|
||||
self.localhost = new_host
|
||||
|
||||
|
|
|
@ -37,9 +37,25 @@ FILE_ATTRIBUTES = {
|
|||
'Z': 'compresseddirty',
|
||||
}
|
||||
|
||||
# ansible modules can be written in any language. To simplify
|
||||
# development of Python modules, the functions available here can
|
||||
# be used to do many common tasks
|
||||
PASS_VARS = {
|
||||
'check_mode': 'check_mode',
|
||||
'debug': '_debug',
|
||||
'diff': '_diff',
|
||||
'module_name': '_name',
|
||||
'no_log': 'no_log',
|
||||
'selinux_special_fs': '_selinux_special_fs',
|
||||
'shell_executable': '_shell',
|
||||
'socket': '_socket_path',
|
||||
'syslog_facility': '_syslog_facility',
|
||||
'verbosity': '_verbosity',
|
||||
'version': 'ansible_version',
|
||||
}
|
||||
|
||||
PASS_BOOLS = ('no_log', 'debug', 'diff')
|
||||
|
||||
# Ansible modules can be written in any language.
|
||||
# The functions available here can be used to do many common tasks,
|
||||
# to simplify development of Python modules.
|
||||
|
||||
import locale
|
||||
import os
|
||||
|
@ -90,7 +106,7 @@ NoneType = type(None)
|
|||
try:
|
||||
from collections.abc import KeysView
|
||||
SEQUENCETYPE = (Sequence, frozenset, KeysView)
|
||||
except:
|
||||
except ImportError:
|
||||
SEQUENCETYPE = (Sequence, frozenset)
|
||||
|
||||
try:
|
||||
|
@ -826,11 +842,12 @@ class AnsibleModule(object):
|
|||
self._clean = {}
|
||||
|
||||
self.aliases = {}
|
||||
self._legal_inputs = ['_ansible_check_mode', '_ansible_no_log', '_ansible_debug', '_ansible_diff', '_ansible_verbosity',
|
||||
'_ansible_selinux_special_fs', '_ansible_module_name', '_ansible_version', '_ansible_syslog_facility',
|
||||
'_ansible_socket', '_ansible_shell_executable']
|
||||
self._legal_inputs = ['_ansible_%s' % k for k in PASS_VARS]
|
||||
self._options_context = list()
|
||||
|
||||
# set tempdir to remote tmp
|
||||
self.tempdir = os.environ.get('ANSIBLE_REMOTE_TEMP', None)
|
||||
|
||||
if add_file_common_args:
|
||||
for k, v in FILE_COMMON_ARGUMENTS.items():
|
||||
if k not in self.argument_spec:
|
||||
|
@ -1634,44 +1651,17 @@ class AnsibleModule(object):
|
|||
|
||||
for (k, v) in list(param.items()):
|
||||
|
||||
if k == '_ansible_check_mode' and v:
|
||||
self.check_mode = True
|
||||
|
||||
elif k == '_ansible_no_log':
|
||||
self.no_log = self.boolean(v)
|
||||
|
||||
elif k == '_ansible_debug':
|
||||
self._debug = self.boolean(v)
|
||||
|
||||
elif k == '_ansible_diff':
|
||||
self._diff = self.boolean(v)
|
||||
|
||||
elif k == '_ansible_verbosity':
|
||||
self._verbosity = v
|
||||
|
||||
elif k == '_ansible_selinux_special_fs':
|
||||
self._selinux_special_fs = v
|
||||
|
||||
elif k == '_ansible_syslog_facility':
|
||||
self._syslog_facility = v
|
||||
|
||||
elif k == '_ansible_version':
|
||||
self.ansible_version = v
|
||||
|
||||
elif k == '_ansible_module_name':
|
||||
self._name = v
|
||||
|
||||
elif k == '_ansible_socket':
|
||||
self._socket_path = v
|
||||
|
||||
elif k == '_ansible_shell_executable' and v:
|
||||
self._shell = v
|
||||
|
||||
elif check_invalid_arguments and k not in legal_inputs:
|
||||
if check_invalid_arguments and k not in legal_inputs:
|
||||
unsupported_parameters.add(k)
|
||||
elif k.startswith('_ansible_'):
|
||||
# handle setting internal properties from internal ansible vars
|
||||
key = k.replace('_ansible_', '')
|
||||
if key in PASS_BOOLS:
|
||||
setattr(self, PASS_VARS[key], self.boolean(v))
|
||||
else:
|
||||
setattr(self, PASS_VARS[key], v)
|
||||
|
||||
# clean up internal params:
|
||||
if k.startswith('_ansible_'):
|
||||
# clean up internal params:
|
||||
del self.params[k]
|
||||
|
||||
if unsupported_parameters:
|
||||
|
@ -2202,7 +2192,7 @@ class AnsibleModule(object):
|
|||
except:
|
||||
# we don't have access to the cwd, probably because of sudo.
|
||||
# Try and move to a neutral location to prevent errors
|
||||
for cwd in [os.path.expandvars('$HOME'), tempfile.gettempdir()]:
|
||||
for cwd in [self.tempdir, os.path.expandvars('$HOME'), tempfile.gettempdir()]:
|
||||
try:
|
||||
if os.access(cwd, os.F_OK | os.R_OK):
|
||||
os.chdir(cwd)
|
||||
|
|
|
@ -973,6 +973,9 @@ def fetch_url(module, url, data=None, headers=None, method=None,
|
|||
if not HAS_URLPARSE:
|
||||
module.fail_json(msg='urlparse is not installed')
|
||||
|
||||
# ensure we use proper tempdir
|
||||
tempfile.tempdir = module.tempdir
|
||||
|
||||
# Get validate_certs from the module params
|
||||
validate_certs = module.params.get('validate_certs', True)
|
||||
|
||||
|
|
|
@ -97,32 +97,32 @@ notes:
|
|||
'''
|
||||
|
||||
EXAMPLES = r'''
|
||||
# Example from Ansible Playbooks
|
||||
- copy:
|
||||
- name: example copying file with owner and permissions
|
||||
copy:
|
||||
src: /srv/myfiles/foo.conf
|
||||
dest: /etc/foo.conf
|
||||
owner: foo
|
||||
group: foo
|
||||
mode: 0644
|
||||
|
||||
# The same example as above, but using a symbolic mode equivalent to 0644
|
||||
- copy:
|
||||
- name: The same example as above, but using a symbolic mode equivalent to 0644
|
||||
copy:
|
||||
src: /srv/myfiles/foo.conf
|
||||
dest: /etc/foo.conf
|
||||
owner: foo
|
||||
group: foo
|
||||
mode: u=rw,g=r,o=r
|
||||
|
||||
# Another symbolic mode example, adding some permissions and removing others
|
||||
- copy:
|
||||
- name: Another symbolic mode example, adding some permissions and removing others
|
||||
copy:
|
||||
src: /srv/myfiles/foo.conf
|
||||
dest: /etc/foo.conf
|
||||
owner: foo
|
||||
group: foo
|
||||
mode: u+rw,g-wx,o-rwx
|
||||
|
||||
# Copy a new "ntp.conf file into place, backing up the original if it differs from the copied version
|
||||
- copy:
|
||||
- name: Copy a new "ntp.conf file into place, backing up the original if it differs from the copied version
|
||||
copy:
|
||||
src: /mine/ntp.conf
|
||||
dest: /etc/ntp.conf
|
||||
owner: root
|
||||
|
@ -130,33 +130,23 @@ EXAMPLES = r'''
|
|||
mode: 0644
|
||||
backup: yes
|
||||
|
||||
# Copy a new "sudoers" file into place, after passing validation with visudo
|
||||
- copy:
|
||||
- name: Copy a new "sudoers" file into place, after passing validation with visudo
|
||||
copy:
|
||||
src: /mine/sudoers
|
||||
dest: /etc/sudoers
|
||||
validate: /usr/sbin/visudo -cf %s
|
||||
|
||||
# Copy a "sudoers" file on the remote machine for editing
|
||||
- copy:
|
||||
- name: Copy a "sudoers" file on the remote machine for editing
|
||||
copy:
|
||||
src: /etc/sudoers
|
||||
dest: /etc/sudoers.edit
|
||||
remote_src: yes
|
||||
validate: /usr/sbin/visudo -cf %s
|
||||
|
||||
# Create a CSV file from your complete inventory using an inline template
|
||||
- hosts: all
|
||||
tasks:
|
||||
- copy:
|
||||
content: |
|
||||
HOSTNAME;IPADDRESS;FQDN;OSNAME;OSVERSION;PROCESSOR;ARCHITECTURE;MEMORY;
|
||||
{% for host in hostvars %}
|
||||
{% set vars = hostvars[host|string] %}
|
||||
{{ vars.ansible_hostname }};{{ vars.remote_host }};{{ vars.ansible_fqdn }};{{ vars.ansible_distribution }};{{ vars.ansible_distribution_version }};{{ vars.ansible_processor[1] }};{{ vars.ansible_architecture }};{{ (vars.ansible_memtotal_mb/1024)|round|int }}; # NOQA
|
||||
{% endfor %}
|
||||
dest: /some/path/systems.csv
|
||||
backup: yes
|
||||
run_once: yes
|
||||
delegate_to: localhost
|
||||
- name: Copy using the 'content' for inline data
|
||||
copy:
|
||||
content: '# This file was moved to /etc/other.conf'
|
||||
dest: /etc/mine.conf'
|
||||
'''
|
||||
|
||||
RETURN = r'''
|
||||
|
|
|
@ -28,8 +28,7 @@ options:
|
|||
required: true
|
||||
mode:
|
||||
description:
|
||||
- if C(status), obtain the status; if C(cleanup), clean up the async job cache
|
||||
located in C(~/.ansible_async/) for the specified job I(jid).
|
||||
- if C(status), obtain the status; if C(cleanup), clean up the async job cache (by default in C(~/.ansible_async/)) for the specified job I(jid).
|
||||
choices: [ "status", "cleanup" ]
|
||||
default: "status"
|
||||
notes:
|
||||
|
@ -57,8 +56,10 @@ def main():
|
|||
mode = module.params['mode']
|
||||
jid = module.params['jid']
|
||||
|
||||
async_dir = os.environ.get('ANSIBLE_ASYNC_DIR', '~/.ansible_async')
|
||||
|
||||
# setup logging directory
|
||||
logdir = os.path.expanduser("~/.ansible_async")
|
||||
logdir = os.path.expanduser(async_dir)
|
||||
log_path = os.path.join(logdir, jid)
|
||||
|
||||
if not os.path.exists(log_path):
|
||||
|
|
|
@ -216,8 +216,10 @@ if __name__ == '__main__':
|
|||
cmd = wrapped_module
|
||||
step = 5
|
||||
|
||||
async_dir = os.environ.get('ANSIBLE_ASYNC_DIR', '~/.ansible_async')
|
||||
|
||||
# setup job output directory
|
||||
jobdir = os.path.expanduser("~/.ansible_async")
|
||||
jobdir = os.path.expanduser(async_dir)
|
||||
job_path = os.path.join(jobdir, jid)
|
||||
|
||||
if not os.path.exists(jobdir):
|
||||
|
|
|
@ -49,7 +49,6 @@ except ImportError:
|
|||
|
||||
__all__ = ['PlayContext']
|
||||
|
||||
|
||||
# TODO: needs to be configurable
|
||||
b_SU_PROMPT_LOCALIZATIONS = [
|
||||
to_bytes('Password'),
|
||||
|
@ -136,7 +135,6 @@ class PlayContext(Base):
|
|||
# connection fields, some are inherited from Base:
|
||||
# (connection, port, remote_user, environment, no_log)
|
||||
_remote_addr = FieldAttribute(isa='string')
|
||||
_remote_tmp_dir = FieldAttribute(isa='string', default=C.DEFAULT_REMOTE_TMP)
|
||||
_password = FieldAttribute(isa='string')
|
||||
_timeout = FieldAttribute(isa='int', default=C.DEFAULT_TIMEOUT)
|
||||
_connection_user = FieldAttribute(isa='string')
|
||||
|
|
|
@ -1,19 +1,6 @@
|
|||
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
# Copyright: (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
|
||||
# Copyright: (c) 2018, Ansible Project
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
# Make coding more python3-ish
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
|
@ -65,13 +52,14 @@ class ActionBase(with_metaclass(ABCMeta, object)):
|
|||
self._loader = loader
|
||||
self._templar = templar
|
||||
self._shared_loader_obj = shared_loader_obj
|
||||
# Backwards compat: self._display isn't really needed, just import the global display and use that.
|
||||
self._display = display
|
||||
self._cleanup_remote_tmp = False
|
||||
|
||||
self._supports_check_mode = True
|
||||
self._supports_async = False
|
||||
|
||||
# Backwards compat: self._display isn't really needed, just import the global display and use that.
|
||||
self._display = display
|
||||
|
||||
@abstractmethod
|
||||
def run(self, tmp=None, task_vars=None):
|
||||
""" Action Plugins should implement this method to perform their
|
||||
|
@ -99,6 +87,11 @@ class ActionBase(with_metaclass(ABCMeta, object)):
|
|||
elif self._task.async_val and self._play_context.check_mode:
|
||||
raise AnsibleActionFail('check mode and async cannot be used on same task.')
|
||||
|
||||
if not tmp and self._early_needs_tmp_path():
|
||||
self._make_tmp_path()
|
||||
else:
|
||||
self._connection._shell.tempdir = tmp
|
||||
|
||||
return result
|
||||
|
||||
def _remote_file_exists(self, path):
|
||||
|
@ -236,16 +229,20 @@ class ActionBase(with_metaclass(ABCMeta, object)):
|
|||
if remote_user is None:
|
||||
remote_user = self._play_context.remote_user
|
||||
|
||||
try:
|
||||
admin_users = self._connection._shell.get_option('admin_users') + [remote_user]
|
||||
except KeyError:
|
||||
admin_users = ['root', remote_user] # plugin does not support admin_users
|
||||
try:
|
||||
remote_tmp = self._connection._shell.get_option('remote_temp')
|
||||
except KeyError:
|
||||
remote_tmp = '~/ansible'
|
||||
|
||||
# deal with tmpdir creation
|
||||
basefile = 'ansible-tmp-%s-%s' % (time.time(), random.randint(0, 2**48))
|
||||
use_system_tmp = False
|
||||
|
||||
if self._play_context.become and self._play_context.become_user not in ('root', remote_user):
|
||||
use_system_tmp = True
|
||||
|
||||
tmp_mode = 0o700
|
||||
tmpdir = self._remote_expand_user(self._play_context.remote_tmp_dir, sudoable=False)
|
||||
|
||||
cmd = self._connection._shell.mkdtemp(basefile, use_system_tmp, tmp_mode, tmpdir)
|
||||
use_system_tmp = bool(self._play_context.become and self._play_context.become_user not in admin_users)
|
||||
tmpdir = self._remote_expand_user(remote_tmp, sudoable=False)
|
||||
cmd = self._connection._shell.mkdtemp(basefile=basefile, system=use_system_tmp, tmpdir=tmpdir)
|
||||
result = self._low_level_execute_command(cmd, sudoable=False)
|
||||
|
||||
# error handling on this seems a little aggressive?
|
||||
|
@ -287,11 +284,14 @@ class ActionBase(with_metaclass(ABCMeta, object)):
|
|||
if rc == '/':
|
||||
raise AnsibleError('failed to resolve remote temporary directory from %s: `%s` returned empty string' % (basefile, cmd))
|
||||
|
||||
self._connection._shell.tempdir = rc
|
||||
|
||||
if not use_system_tmp:
|
||||
self._connection._shell.env.update({'ANSIBLE_REMOTE_TEMP': self._connection._shell.tempdir})
|
||||
return rc
|
||||
|
||||
def _should_remove_tmp_path(self, tmp_path):
|
||||
'''Determine if temporary path should be deleted or kept by user request/config'''
|
||||
|
||||
return tmp_path and self._cleanup_remote_tmp and not C.DEFAULT_KEEP_REMOTE_FILES and "-tmp-" in tmp_path
|
||||
|
||||
def _remove_tmp_path(self, tmp_path):
|
||||
|
@ -320,7 +320,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
|
|||
if isinstance(data, dict):
|
||||
data = jsonify(data)
|
||||
|
||||
afd, afile = tempfile.mkstemp()
|
||||
afd, afile = tempfile.mkstemp(dir=C.DEFAULT_LOCAL_TMP)
|
||||
afo = os.fdopen(afd, 'wb')
|
||||
try:
|
||||
data = to_bytes(data, errors='surrogate_or_strict')
|
||||
|
@ -393,7 +393,12 @@ class ActionBase(with_metaclass(ABCMeta, object)):
|
|||
# we have a need for it, at which point we'll have to do something different.
|
||||
return remote_paths
|
||||
|
||||
if self._play_context.become and self._play_context.become_user and self._play_context.become_user not in ('root', remote_user):
|
||||
try:
|
||||
admin_users = self._connection._shell.get_option('admin_users')
|
||||
except KeyError:
|
||||
admin_users = ['root'] # plugin does not support admin users
|
||||
|
||||
if self._play_context.become and self._play_context.become_user and self._play_context.become_user not in admin_users + [remote_user]:
|
||||
# Unprivileged user that's different than the ssh user. Let's get
|
||||
# to work!
|
||||
|
||||
|
@ -420,12 +425,12 @@ class ActionBase(with_metaclass(ABCMeta, object)):
|
|||
raise AnsibleError('Failed to set file mode on remote temporary files (rc: {0}, err: {1})'.format(res['rc'], to_native(res['stderr'])))
|
||||
|
||||
res = self._remote_chown(remote_paths, self._play_context.become_user)
|
||||
if res['rc'] != 0 and remote_user == 'root':
|
||||
if res['rc'] != 0 and remote_user in admin_users:
|
||||
# chown failed even if remove_user is root
|
||||
raise AnsibleError('Failed to change ownership of the temporary files Ansible needs to create despite connecting as root. '
|
||||
raise AnsibleError('Failed to change ownership of the temporary files Ansible needs to create despite connecting as a privileged user. '
|
||||
'Unprivileged become user would be unable to read the file.')
|
||||
elif res['rc'] != 0:
|
||||
if C.ALLOW_WORLD_READABLE_TMPFILES:
|
||||
if self._connection._shell('allow_world_readable_temp'):
|
||||
# chown and fs acls failed -- do things this insecure
|
||||
# way only if the user opted in in the config file
|
||||
display.warning('Using world-readable permissions for temporary files Ansible needs to create when becoming an unprivileged user. '
|
||||
|
@ -534,33 +539,46 @@ class ActionBase(with_metaclass(ABCMeta, object)):
|
|||
finally:
|
||||
return x # pylint: disable=lost-exception
|
||||
|
||||
def _remote_expand_user(self, path, sudoable=True):
|
||||
''' takes a remote path and performs tilde expansion on the remote host '''
|
||||
if not path.startswith('~'): # FIXME: Windows paths may start with "~ instead of just ~
|
||||
def _remote_expand_user(self, path, sudoable=True, pathsep=None):
|
||||
''' takes a remote path and performs tilde/$HOME expansion on the remote host '''
|
||||
|
||||
# We only expand ~/path and ~username/path
|
||||
if not path.startswith('~'):
|
||||
return path
|
||||
|
||||
# FIXME: Can't use os.path.sep for Windows paths.
|
||||
# Per Jborean, we don't have to worry about Windows as we don't have a notion of user's home
|
||||
# dir there.
|
||||
split_path = path.split(os.path.sep, 1)
|
||||
expand_path = split_path[0]
|
||||
|
||||
if sudoable and expand_path == '~' and self._play_context.become and self._play_context.become_user:
|
||||
expand_path = '~%s' % self._play_context.become_user
|
||||
|
||||
# use shell to construct appropriate command and execute
|
||||
cmd = self._connection._shell.expand_user(expand_path)
|
||||
data = self._low_level_execute_command(cmd, sudoable=False)
|
||||
|
||||
try:
|
||||
initial_fragment = data['stdout'].strip().splitlines()[-1]
|
||||
except IndexError:
|
||||
initial_fragment = None
|
||||
|
||||
if not initial_fragment:
|
||||
# Something went wrong trying to expand the path remotely. Return
|
||||
# Something went wrong trying to expand the path remotely. Try using pwd, if not, return
|
||||
# the original string
|
||||
return path
|
||||
cmd = self._connection._shell.pwd()
|
||||
pwd = self._low_level_execute_command(cmd, sudoable=False).get('stdout', '').strip()
|
||||
if pwd:
|
||||
expanded = pwd
|
||||
else:
|
||||
expanded = path
|
||||
|
||||
if len(split_path) > 1:
|
||||
return self._connection._shell.join_path(initial_fragment, *split_path[1:])
|
||||
elif len(split_path) > 1:
|
||||
expanded = self._connection._shell.join_path(initial_fragment, *split_path[1:])
|
||||
else:
|
||||
return initial_fragment
|
||||
expanded = initial_fragment
|
||||
|
||||
return expanded
|
||||
|
||||
def _strip_success_message(self, data):
|
||||
'''
|
||||
|
@ -655,8 +673,11 @@ class ActionBase(with_metaclass(ABCMeta, object)):
|
|||
if not self._is_pipelining_enabled(module_style, wrap_async):
|
||||
|
||||
# we might need remote tmp dir
|
||||
if not tmp or 'tmp' not in tmp:
|
||||
tmp = self._make_tmp_path()
|
||||
if not tmp:
|
||||
if not self._connection._shell.tempdir or tmp is None or 'tmp' not in tmp:
|
||||
tmp = self._make_tmp_path()
|
||||
else:
|
||||
tmp = self._connection._shell.tempdir
|
||||
|
||||
remote_module_filename = self._connection._shell.get_remote_filename(module_path)
|
||||
remote_module_path = self._connection._shell.join_path(tmp, remote_module_filename)
|
||||
|
@ -733,14 +754,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
|
|||
else:
|
||||
cmd = remote_module_path
|
||||
|
||||
rm_tmp = None
|
||||
|
||||
if self._should_remove_tmp_path(tmp) and not persist_files and delete_remote_tmp:
|
||||
if not self._play_context.become or self._play_context.become_user == 'root':
|
||||
# not sudoing or sudoing to root, so can cleanup files in the same step
|
||||
rm_tmp = tmp
|
||||
|
||||
cmd = self._connection._shell.build_module_command(environment_string, shebang, cmd, arg_path=args_file_path, rm_tmp=rm_tmp).strip()
|
||||
cmd = self._connection._shell.build_module_command(environment_string, shebang, cmd, arg_path=args_file_path).strip()
|
||||
|
||||
# Fix permissions of the tmp path and tmp files. This should be called after all files have been transferred.
|
||||
if remote_files:
|
||||
|
@ -756,15 +770,12 @@ class ActionBase(with_metaclass(ABCMeta, object)):
|
|||
|
||||
# NOTE: INTERNAL KEYS ONLY ACCESSIBLE HERE
|
||||
# get internal info before cleaning
|
||||
tmpdir_delete = (not data.pop("_ansible_suppress_tmpdir_delete", False) and wrap_async)
|
||||
if data.pop("_ansible_suppress_tmpdir_delete", False):
|
||||
self._cleanup_remote_tmp = False
|
||||
|
||||
# remove internal keys
|
||||
remove_internal_keys(data)
|
||||
|
||||
# cleanup tmp?
|
||||
if (self._play_context.become and self._play_context.become_user != 'root') and not persist_files and delete_remote_tmp or tmpdir_delete:
|
||||
self._remove_tmp_path(tmp)
|
||||
|
||||
# FIXME: for backwards compat, figure out if still makes sense
|
||||
if wrap_async:
|
||||
data['changed'] = True
|
||||
|
|
|
@ -25,7 +25,8 @@ import os.path
|
|||
import re
|
||||
import tempfile
|
||||
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible import constants as C
|
||||
from ansible.errors import AnsibleError, AnsibleAction, AnsibleActionDone, AnsibleActionFail
|
||||
from ansible.module_utils._text import to_native, to_text
|
||||
from ansible.module_utils.parsing.convert_bool import boolean
|
||||
from ansible.plugins.action import ActionBase
|
||||
|
@ -39,7 +40,7 @@ class ActionModule(ActionBase):
|
|||
def _assemble_from_fragments(self, src_path, delimiter=None, compiled_regexp=None, ignore_hidden=False, decrypt=True):
|
||||
''' assemble a file from a directory of fragments '''
|
||||
|
||||
tmpfd, temp_path = tempfile.mkstemp()
|
||||
tmpfd, temp_path = tempfile.mkstemp(dir=C.DEFAULT_LOCAL_TMP)
|
||||
tmp = os.fdopen(tmpfd, 'wb')
|
||||
delimit_me = False
|
||||
add_newline = False
|
||||
|
@ -96,78 +97,73 @@ class ActionModule(ActionBase):
|
|||
ignore_hidden = self._task.args.get('ignore_hidden', False)
|
||||
decrypt = self._task.args.get('decrypt', True)
|
||||
|
||||
if src is None or dest is None:
|
||||
result['failed'] = True
|
||||
result['msg'] = "src and dest are required"
|
||||
return result
|
||||
try:
|
||||
if src is None or dest is None:
|
||||
raise AnsibleActionFail("src and dest are required")
|
||||
|
||||
if boolean(remote_src, strict=False):
|
||||
result.update(self._execute_module(tmp=tmp, task_vars=task_vars))
|
||||
return result
|
||||
else:
|
||||
try:
|
||||
src = self._find_needle('files', src)
|
||||
except AnsibleError as e:
|
||||
result['failed'] = True
|
||||
result['msg'] = to_native(e)
|
||||
return result
|
||||
if boolean(remote_src, strict=False):
|
||||
result.update(self._execute_module(tmp=tmp, task_vars=task_vars))
|
||||
raise AnsibleActionDone()
|
||||
else:
|
||||
try:
|
||||
src = self._find_needle('files', src)
|
||||
except AnsibleError as e:
|
||||
raise AnsibleActionFail(to_native(e))
|
||||
|
||||
if not tmp:
|
||||
tmp = self._make_tmp_path()
|
||||
if not os.path.isdir(src):
|
||||
raise AnsibleActionFail(u"Source (%s) is not a directory" % src)
|
||||
|
||||
if not os.path.isdir(src):
|
||||
result['failed'] = True
|
||||
result['msg'] = u"Source (%s) is not a directory" % src
|
||||
return result
|
||||
_re = None
|
||||
if regexp is not None:
|
||||
_re = re.compile(regexp)
|
||||
|
||||
_re = None
|
||||
if regexp is not None:
|
||||
_re = re.compile(regexp)
|
||||
# Does all work assembling the file
|
||||
path = self._assemble_from_fragments(src, delimiter, _re, ignore_hidden, decrypt)
|
||||
|
||||
# Does all work assembling the file
|
||||
path = self._assemble_from_fragments(src, delimiter, _re, ignore_hidden, decrypt)
|
||||
path_checksum = checksum_s(path)
|
||||
dest = self._remote_expand_user(dest)
|
||||
dest_stat = self._execute_remote_stat(dest, all_vars=task_vars, follow=follow, tmp=tmp)
|
||||
|
||||
path_checksum = checksum_s(path)
|
||||
dest = self._remote_expand_user(dest)
|
||||
dest_stat = self._execute_remote_stat(dest, all_vars=task_vars, follow=follow, tmp=tmp)
|
||||
diff = {}
|
||||
|
||||
diff = {}
|
||||
# setup args for running modules
|
||||
new_module_args = self._task.args.copy()
|
||||
|
||||
# setup args for running modules
|
||||
new_module_args = self._task.args.copy()
|
||||
# clean assemble specific options
|
||||
for opt in ['remote_src', 'regexp', 'delimiter', 'ignore_hidden', 'decrypt']:
|
||||
if opt in new_module_args:
|
||||
del new_module_args[opt]
|
||||
|
||||
# clean assemble specific options
|
||||
for opt in ['remote_src', 'regexp', 'delimiter', 'ignore_hidden', 'decrypt']:
|
||||
if opt in new_module_args:
|
||||
del new_module_args[opt]
|
||||
|
||||
new_module_args.update(
|
||||
dict(
|
||||
dest=dest,
|
||||
original_basename=os.path.basename(src),
|
||||
new_module_args.update(
|
||||
dict(
|
||||
dest=dest,
|
||||
original_basename=os.path.basename(src),
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
if path_checksum != dest_stat['checksum']:
|
||||
if path_checksum != dest_stat['checksum']:
|
||||
|
||||
if self._play_context.diff:
|
||||
diff = self._get_diff_data(dest, path, task_vars)
|
||||
if self._play_context.diff:
|
||||
diff = self._get_diff_data(dest, path, task_vars)
|
||||
|
||||
remote_path = self._connection._shell.join_path(tmp, 'src')
|
||||
xfered = self._transfer_file(path, remote_path)
|
||||
remote_path = self._connection._shell.join_path(self._connection._shell.tempdir, 'src')
|
||||
xfered = self._transfer_file(path, remote_path)
|
||||
|
||||
# fix file permissions when the copy is done as a different user
|
||||
self._fixup_perms2((tmp, remote_path))
|
||||
# fix file permissions when the copy is done as a different user
|
||||
self._fixup_perms2((self._connection._shell.tempdir, remote_path))
|
||||
|
||||
new_module_args.update(dict(src=xfered,))
|
||||
new_module_args.update(dict(src=xfered,))
|
||||
|
||||
res = self._execute_module(module_name='copy', module_args=new_module_args, task_vars=task_vars, tmp=tmp, delete_remote_tmp=False)
|
||||
if diff:
|
||||
res['diff'] = diff
|
||||
result.update(res)
|
||||
else:
|
||||
result.update(self._execute_module(module_name='file', module_args=new_module_args, task_vars=task_vars, tmp=tmp, delete_remote_tmp=False))
|
||||
res = self._execute_module(module_name='copy', module_args=new_module_args, task_vars=task_vars, tmp=tmp)
|
||||
if diff:
|
||||
res['diff'] = diff
|
||||
result.update(res)
|
||||
else:
|
||||
result.update(self._execute_module(module_name='file', module_args=new_module_args, task_vars=task_vars, tmp=tmp))
|
||||
|
||||
self._remove_tmp_path(tmp)
|
||||
except AnsibleAction as e:
|
||||
result.update(e.result)
|
||||
finally:
|
||||
self._remove_tmp_path(self._connection._shell.tempdir)
|
||||
|
||||
return result
|
||||
|
|
|
@ -22,4 +22,8 @@ class ActionModule(ActionBase):
|
|||
wrap_async = self._task.async_val and not self._connection.has_native_async
|
||||
results = merge_hash(results, self._execute_module(tmp=tmp, task_vars=task_vars, wrap_async=wrap_async))
|
||||
|
||||
if not wrap_async:
|
||||
# remove a temporary path we created
|
||||
self._remove_tmp_path(self._connection._shell.tempdir)
|
||||
|
||||
return results
|
||||
|
|
|
@ -26,8 +26,8 @@ import os.path
|
|||
import stat
|
||||
import tempfile
|
||||
import traceback
|
||||
from itertools import chain
|
||||
|
||||
from ansible import constants as C
|
||||
from ansible.errors import AnsibleError, AnsibleFileNotFound
|
||||
from ansible.module_utils._text import to_bytes, to_native, to_text
|
||||
from ansible.module_utils.parsing.convert_bool import boolean
|
||||
|
@ -186,12 +186,13 @@ def _walk_dirs(topdir, base_path=None, local_follow=False, trailing_slash_detect
|
|||
|
||||
class ActionModule(ActionBase):
|
||||
|
||||
TRANSFERS_FILES = True
|
||||
|
||||
def _create_remote_file_args(self, module_args):
|
||||
# remove action plugin only keys
|
||||
return dict((k, v) for k, v in module_args.items() if k not in ('content', 'decrypt'))
|
||||
|
||||
def _copy_file(self, source_full, source_rel, content, content_tempfile,
|
||||
dest, task_vars, tmp, delete_remote_tmp):
|
||||
def _copy_file(self, source_full, source_rel, content, content_tempfile, dest, task_vars, tmp):
|
||||
decrypt = boolean(self._task.args.get('decrypt', True), strict=False)
|
||||
follow = boolean(self._task.args.get('follow', False), strict=False)
|
||||
force = boolean(self._task.args.get('force', 'yes'), strict=False)
|
||||
|
@ -206,7 +207,6 @@ class ActionModule(ActionBase):
|
|||
except AnsibleFileNotFound as e:
|
||||
result['failed'] = True
|
||||
result['msg'] = "could not find src=%s, %s" % (source_full, to_text(e))
|
||||
self._remove_tmp_path(tmp)
|
||||
return result
|
||||
|
||||
# Get the local mode and set if user wanted it preserved
|
||||
|
@ -221,13 +221,7 @@ class ActionModule(ActionBase):
|
|||
if self._connection._shell.path_has_trailing_slash(dest):
|
||||
dest_file = self._connection._shell.join_path(dest, source_rel)
|
||||
else:
|
||||
dest_file = self._connection._shell.join_path(dest)
|
||||
|
||||
# Create a tmp path if missing only if this is not recursive.
|
||||
# If this is recursive we already have a tmp path.
|
||||
if delete_remote_tmp:
|
||||
if tmp is None or "-tmp-" not in tmp:
|
||||
tmp = self._make_tmp_path()
|
||||
dest_file = dest
|
||||
|
||||
# Attempt to get remote file info
|
||||
dest_status = self._execute_remote_stat(dest_file, all_vars=task_vars, follow=follow, tmp=tmp, checksum=force)
|
||||
|
@ -237,7 +231,6 @@ class ActionModule(ActionBase):
|
|||
if content is not None:
|
||||
# If source was defined as content remove the temporary file and fail out.
|
||||
self._remove_tempfile_if_content_defined(content, content_tempfile)
|
||||
self._remove_tmp_path(tmp)
|
||||
result['failed'] = True
|
||||
result['msg'] = "can not use content with a dir as dest"
|
||||
return result
|
||||
|
@ -265,7 +258,7 @@ class ActionModule(ActionBase):
|
|||
return result
|
||||
|
||||
# Define a remote directory that we will copy the file to.
|
||||
tmp_src = self._connection._shell.join_path(tmp, 'source')
|
||||
tmp_src = self._connection._shell.join_path(self._connection._shell.tempdir, 'source')
|
||||
|
||||
remote_path = None
|
||||
|
||||
|
@ -280,7 +273,7 @@ class ActionModule(ActionBase):
|
|||
|
||||
# fix file permissions when the copy is done as a different user
|
||||
if remote_path:
|
||||
self._fixup_perms2((tmp, remote_path))
|
||||
self._fixup_perms2((self._connection._shell.tempdir, remote_path))
|
||||
|
||||
if raw:
|
||||
# Continue to next iteration if raw is defined.
|
||||
|
@ -301,9 +294,7 @@ class ActionModule(ActionBase):
|
|||
if lmode:
|
||||
new_module_args['mode'] = lmode
|
||||
|
||||
module_return = self._execute_module(module_name='copy',
|
||||
module_args=new_module_args, task_vars=task_vars,
|
||||
tmp=tmp, delete_remote_tmp=delete_remote_tmp)
|
||||
module_return = self._execute_module(module_name='copy', module_args=new_module_args, task_vars=task_vars, tmp=tmp)
|
||||
|
||||
else:
|
||||
# no need to transfer the file, already correct hash, but still need to call
|
||||
|
@ -312,8 +303,6 @@ class ActionModule(ActionBase):
|
|||
self._loader.cleanup_tmp_file(source_full)
|
||||
|
||||
if raw:
|
||||
# Continue to next iteration if raw is defined.
|
||||
self._remove_tmp_path(tmp)
|
||||
return None
|
||||
|
||||
# Fix for https://github.com/ansible/ansible-modules-core/issues/1568.
|
||||
|
@ -339,9 +328,7 @@ class ActionModule(ActionBase):
|
|||
new_module_args['mode'] = lmode
|
||||
|
||||
# Execute the file module.
|
||||
module_return = self._execute_module(module_name='file',
|
||||
module_args=new_module_args, task_vars=task_vars,
|
||||
tmp=tmp, delete_remote_tmp=delete_remote_tmp)
|
||||
module_return = self._execute_module(module_name='file', module_args=new_module_args, task_vars=task_vars, tmp=tmp)
|
||||
|
||||
if not module_return.get('checksum'):
|
||||
module_return['checksum'] = local_checksum
|
||||
|
@ -379,7 +366,7 @@ class ActionModule(ActionBase):
|
|||
|
||||
def _create_content_tempfile(self, content):
|
||||
''' Create a tempfile containing defined content '''
|
||||
fd, content_tempfile = tempfile.mkstemp()
|
||||
fd, content_tempfile = tempfile.mkstemp(dir=C.DEFAULT_LOCAL_TMP)
|
||||
f = os.fdopen(fd, 'wb')
|
||||
content = to_bytes(content)
|
||||
try:
|
||||
|
@ -402,6 +389,9 @@ class ActionModule(ActionBase):
|
|||
|
||||
result = super(ActionModule, self).run(tmp, task_vars)
|
||||
|
||||
if tmp is None:
|
||||
tmp = self._connection._shell.tempdir
|
||||
|
||||
source = self._task.args.get('src', None)
|
||||
content = self._task.args.get('content', None)
|
||||
dest = self._task.args.get('dest', None)
|
||||
|
@ -493,19 +483,6 @@ class ActionModule(ActionBase):
|
|||
# Used to cut down on command calls when not recursive.
|
||||
module_executed = False
|
||||
|
||||
# Optimization: Can delete remote_tmp on the first call if we're only
|
||||
# copying a single file. Otherwise we keep the remote_tmp until it
|
||||
# is no longer needed.
|
||||
delete_remote_tmp = False
|
||||
if sum(len(f) for f in chain(source_files.values())) == 1:
|
||||
# Tell _execute_module to delete the file if there is one file.
|
||||
delete_remote_tmp = True
|
||||
|
||||
# If this is a recursive action create a tmp path that we can share as the _exec_module create is too late.
|
||||
if not delete_remote_tmp:
|
||||
if tmp is None or "-tmp-" not in tmp:
|
||||
tmp = self._make_tmp_path()
|
||||
|
||||
# expand any user home dir specifier
|
||||
dest = self._remote_expand_user(dest)
|
||||
|
||||
|
@ -513,7 +490,7 @@ class ActionModule(ActionBase):
|
|||
for source_full, source_rel in source_files['files']:
|
||||
# copy files over. This happens first as directories that have
|
||||
# a file do not need to be created later
|
||||
module_return = self._copy_file(source_full, source_rel, content, content_tempfile, dest, task_vars, tmp, delete_remote_tmp)
|
||||
module_return = self._copy_file(source_full, source_rel, content, content_tempfile, dest, task_vars, tmp)
|
||||
if module_return is None:
|
||||
continue
|
||||
|
||||
|
@ -539,9 +516,7 @@ class ActionModule(ActionBase):
|
|||
new_module_args['state'] = 'directory'
|
||||
new_module_args['mode'] = self._task.args.get('directory_mode', None)
|
||||
|
||||
module_return = self._execute_module(module_name='file',
|
||||
module_args=new_module_args, task_vars=task_vars,
|
||||
tmp=tmp, delete_remote_tmp=delete_remote_tmp)
|
||||
module_return = self._execute_module(module_name='file', module_args=new_module_args, task_vars=task_vars, tmp=tmp)
|
||||
module_executed = True
|
||||
changed = changed or module_return.get('changed', False)
|
||||
|
||||
|
@ -553,15 +528,11 @@ class ActionModule(ActionBase):
|
|||
new_module_args['state'] = 'link'
|
||||
new_module_args['force'] = True
|
||||
|
||||
module_return = self._execute_module(module_name='file',
|
||||
module_args=new_module_args, task_vars=task_vars,
|
||||
tmp=tmp, delete_remote_tmp=delete_remote_tmp)
|
||||
module_return = self._execute_module(module_name='file', module_args=new_module_args, task_vars=task_vars, tmp=tmp)
|
||||
module_executed = True
|
||||
|
||||
if module_return.get('failed'):
|
||||
result.update(module_return)
|
||||
if not delete_remote_tmp:
|
||||
self._remove_tmp_path(tmp)
|
||||
return result
|
||||
|
||||
changed = changed or module_return.get('changed', False)
|
||||
|
@ -571,13 +542,12 @@ class ActionModule(ActionBase):
|
|||
if 'path' in module_return and 'dest' not in module_return:
|
||||
module_return['dest'] = module_return['path']
|
||||
|
||||
# Delete tmp path if we were recursive or if we did not execute a module.
|
||||
if not delete_remote_tmp or (delete_remote_tmp and not module_executed):
|
||||
self._remove_tmp_path(tmp)
|
||||
|
||||
if module_executed and len(source_files['files']) == 1:
|
||||
result.update(module_return)
|
||||
else:
|
||||
result.update(dict(dest=dest, src=source, changed=changed))
|
||||
|
||||
# Delete tmp path
|
||||
self._remove_tmp_path(self._connection._shell.tempdir)
|
||||
|
||||
return result
|
||||
|
|
|
@ -44,170 +44,174 @@ class ActionModule(ActionBase):
|
|||
|
||||
result = super(ActionModule, self).run(tmp, task_vars)
|
||||
|
||||
if self._play_context.check_mode:
|
||||
result['skipped'] = True
|
||||
result['msg'] = 'check mode not (yet) supported for this module'
|
||||
return result
|
||||
try:
|
||||
if self._play_context.check_mode:
|
||||
result['skipped'] = True
|
||||
result['msg'] = 'check mode not (yet) supported for this module'
|
||||
return result
|
||||
|
||||
source = self._task.args.get('src', None)
|
||||
dest = self._task.args.get('dest', None)
|
||||
flat = boolean(self._task.args.get('flat'), strict=False)
|
||||
fail_on_missing = boolean(self._task.args.get('fail_on_missing'), strict=False)
|
||||
validate_checksum = boolean(self._task.args.get('validate_checksum',
|
||||
self._task.args.get('validate_md5', True)),
|
||||
strict=False)
|
||||
source = self._task.args.get('src', None)
|
||||
dest = self._task.args.get('dest', None)
|
||||
flat = boolean(self._task.args.get('flat'), strict=False)
|
||||
fail_on_missing = boolean(self._task.args.get('fail_on_missing'), strict=False)
|
||||
validate_checksum = boolean(self._task.args.get('validate_checksum',
|
||||
self._task.args.get('validate_md5', True)),
|
||||
strict=False)
|
||||
|
||||
# validate source and dest are strings FIXME: use basic.py and module specs
|
||||
if not isinstance(source, string_types):
|
||||
result['msg'] = "Invalid type supplied for source option, it must be a string"
|
||||
# validate source and dest are strings FIXME: use basic.py and module specs
|
||||
if not isinstance(source, string_types):
|
||||
result['msg'] = "Invalid type supplied for source option, it must be a string"
|
||||
|
||||
if not isinstance(dest, string_types):
|
||||
result['msg'] = "Invalid type supplied for dest option, it must be a string"
|
||||
if not isinstance(dest, string_types):
|
||||
result['msg'] = "Invalid type supplied for dest option, it must be a string"
|
||||
|
||||
# validate_md5 is the deprecated way to specify validate_checksum
|
||||
if 'validate_md5' in self._task.args and 'validate_checksum' in self._task.args:
|
||||
result['msg'] = "validate_checksum and validate_md5 cannot both be specified"
|
||||
# validate_md5 is the deprecated way to specify validate_checksum
|
||||
if 'validate_md5' in self._task.args and 'validate_checksum' in self._task.args:
|
||||
result['msg'] = "validate_checksum and validate_md5 cannot both be specified"
|
||||
|
||||
if 'validate_md5' in self._task.args:
|
||||
display.deprecated('Use validate_checksum instead of validate_md5', version='2.8')
|
||||
if 'validate_md5' in self._task.args:
|
||||
display.deprecated('Use validate_checksum instead of validate_md5', version='2.8')
|
||||
|
||||
if source is None or dest is None:
|
||||
result['msg'] = "src and dest are required"
|
||||
if source is None or dest is None:
|
||||
result['msg'] = "src and dest are required"
|
||||
|
||||
if result.get('msg'):
|
||||
result['failed'] = True
|
||||
return result
|
||||
if result.get('msg'):
|
||||
result['failed'] = True
|
||||
return result
|
||||
|
||||
source = self._connection._shell.join_path(source)
|
||||
source = self._remote_expand_user(source)
|
||||
source = self._connection._shell.join_path(source)
|
||||
source = self._remote_expand_user(source)
|
||||
|
||||
remote_checksum = None
|
||||
if not self._play_context.become:
|
||||
# calculate checksum for the remote file, don't bother if using become as slurp will be used
|
||||
# Force remote_checksum to follow symlinks because fetch always follows symlinks
|
||||
remote_checksum = self._remote_checksum(source, all_vars=task_vars, follow=True)
|
||||
remote_checksum = None
|
||||
if not self._play_context.become:
|
||||
# calculate checksum for the remote file, don't bother if using become as slurp will be used
|
||||
# Force remote_checksum to follow symlinks because fetch always follows symlinks
|
||||
remote_checksum = self._remote_checksum(source, all_vars=task_vars, follow=True)
|
||||
|
||||
# use slurp if permissions are lacking or privilege escalation is needed
|
||||
remote_data = None
|
||||
if remote_checksum in ('1', '2', None):
|
||||
slurpres = self._execute_module(module_name='slurp', module_args=dict(src=source), task_vars=task_vars, tmp=tmp)
|
||||
if slurpres.get('failed'):
|
||||
if not fail_on_missing and (slurpres.get('msg').startswith('file not found') or remote_checksum == '1'):
|
||||
result['msg'] = "the remote file does not exist, not transferring, ignored"
|
||||
result['file'] = source
|
||||
result['changed'] = False
|
||||
# use slurp if permissions are lacking or privilege escalation is needed
|
||||
remote_data = None
|
||||
if remote_checksum in ('1', '2', None):
|
||||
slurpres = self._execute_module(module_name='slurp', module_args=dict(src=source), task_vars=task_vars, tmp=tmp)
|
||||
if slurpres.get('failed'):
|
||||
if not fail_on_missing and (slurpres.get('msg').startswith('file not found') or remote_checksum == '1'):
|
||||
result['msg'] = "the remote file does not exist, not transferring, ignored"
|
||||
result['file'] = source
|
||||
result['changed'] = False
|
||||
else:
|
||||
result.update(slurpres)
|
||||
return result
|
||||
else:
|
||||
result.update(slurpres)
|
||||
if slurpres['encoding'] == 'base64':
|
||||
remote_data = base64.b64decode(slurpres['content'])
|
||||
if remote_data is not None:
|
||||
remote_checksum = checksum_s(remote_data)
|
||||
# the source path may have been expanded on the
|
||||
# target system, so we compare it here and use the
|
||||
# expanded version if it's different
|
||||
remote_source = slurpres.get('source')
|
||||
if remote_source and remote_source != source:
|
||||
source = remote_source
|
||||
|
||||
# calculate the destination name
|
||||
if os.path.sep not in self._connection._shell.join_path('a', ''):
|
||||
source = self._connection._shell._unquote(source)
|
||||
source_local = source.replace('\\', '/')
|
||||
else:
|
||||
source_local = source
|
||||
|
||||
dest = os.path.expanduser(dest)
|
||||
if flat:
|
||||
if os.path.isdir(to_bytes(dest, errors='surrogate_or_strict')) and not dest.endswith(os.sep):
|
||||
result['msg'] = "dest is an existing directory, use a trailing slash if you want to fetch src into that directory"
|
||||
result['file'] = dest
|
||||
result['failed'] = True
|
||||
return result
|
||||
if dest.endswith(os.sep):
|
||||
# if the path ends with "/", we'll use the source filename as the
|
||||
# destination filename
|
||||
base = os.path.basename(source_local)
|
||||
dest = os.path.join(dest, base)
|
||||
if not dest.startswith("/"):
|
||||
# if dest does not start with "/", we'll assume a relative path
|
||||
dest = self._loader.path_dwim(dest)
|
||||
else:
|
||||
# files are saved in dest dir, with a subdir for each host, then the filename
|
||||
if 'inventory_hostname' in task_vars:
|
||||
target_name = task_vars['inventory_hostname']
|
||||
else:
|
||||
target_name = self._play_context.remote_addr
|
||||
dest = "%s/%s/%s" % (self._loader.path_dwim(dest), target_name, source_local)
|
||||
|
||||
dest = dest.replace("//", "/")
|
||||
|
||||
if remote_checksum in ('0', '1', '2', '3', '4', '5'):
|
||||
result['changed'] = False
|
||||
result['file'] = source
|
||||
if remote_checksum == '0':
|
||||
result['msg'] = "unable to calculate the checksum of the remote file"
|
||||
elif remote_checksum == '1':
|
||||
result['msg'] = "the remote file does not exist"
|
||||
elif remote_checksum == '2':
|
||||
result['msg'] = "no read permission on remote file"
|
||||
elif remote_checksum == '3':
|
||||
result['msg'] = "remote file is a directory, fetch cannot work on directories"
|
||||
elif remote_checksum == '4':
|
||||
result['msg'] = "python isn't present on the system. Unable to compute checksum"
|
||||
elif remote_checksum == '5':
|
||||
result['msg'] = "stdlib json or simplejson was not found on the remote machine. Only the raw module can work without those installed"
|
||||
# Historically, these don't fail because you may want to transfer
|
||||
# a log file that possibly MAY exist but keep going to fetch other
|
||||
# log files. Today, this is better achieved by adding
|
||||
# ignore_errors or failed_when to the task. Control the behaviour
|
||||
# via fail_when_missing
|
||||
if fail_on_missing:
|
||||
result['failed'] = True
|
||||
del result['changed']
|
||||
else:
|
||||
result['msg'] += ", not transferring, ignored"
|
||||
return result
|
||||
else:
|
||||
if slurpres['encoding'] == 'base64':
|
||||
remote_data = base64.b64decode(slurpres['content'])
|
||||
if remote_data is not None:
|
||||
remote_checksum = checksum_s(remote_data)
|
||||
# the source path may have been expanded on the
|
||||
# target system, so we compare it here and use the
|
||||
# expanded version if it's different
|
||||
remote_source = slurpres.get('source')
|
||||
if remote_source and remote_source != source:
|
||||
source = remote_source
|
||||
|
||||
# calculate the destination name
|
||||
if os.path.sep not in self._connection._shell.join_path('a', ''):
|
||||
source = self._connection._shell._unquote(source)
|
||||
source_local = source.replace('\\', '/')
|
||||
else:
|
||||
source_local = source
|
||||
# calculate checksum for the local file
|
||||
local_checksum = checksum(dest)
|
||||
|
||||
dest = os.path.expanduser(dest)
|
||||
if flat:
|
||||
if os.path.isdir(to_bytes(dest, errors='surrogate_or_strict')) and not dest.endswith(os.sep):
|
||||
result['msg'] = "dest is an existing directory, use a trailing slash if you want to fetch src into that directory"
|
||||
result['file'] = dest
|
||||
result['failed'] = True
|
||||
return result
|
||||
if dest.endswith(os.sep):
|
||||
# if the path ends with "/", we'll use the source filename as the
|
||||
# destination filename
|
||||
base = os.path.basename(source_local)
|
||||
dest = os.path.join(dest, base)
|
||||
if not dest.startswith("/"):
|
||||
# if dest does not start with "/", we'll assume a relative path
|
||||
dest = self._loader.path_dwim(dest)
|
||||
else:
|
||||
# files are saved in dest dir, with a subdir for each host, then the filename
|
||||
if 'inventory_hostname' in task_vars:
|
||||
target_name = task_vars['inventory_hostname']
|
||||
else:
|
||||
target_name = self._play_context.remote_addr
|
||||
dest = "%s/%s/%s" % (self._loader.path_dwim(dest), target_name, source_local)
|
||||
if remote_checksum != local_checksum:
|
||||
# create the containing directories, if needed
|
||||
makedirs_safe(os.path.dirname(dest))
|
||||
|
||||
dest = dest.replace("//", "/")
|
||||
|
||||
if remote_checksum in ('0', '1', '2', '3', '4', '5'):
|
||||
result['changed'] = False
|
||||
result['file'] = source
|
||||
if remote_checksum == '0':
|
||||
result['msg'] = "unable to calculate the checksum of the remote file"
|
||||
elif remote_checksum == '1':
|
||||
result['msg'] = "the remote file does not exist"
|
||||
elif remote_checksum == '2':
|
||||
result['msg'] = "no read permission on remote file"
|
||||
elif remote_checksum == '3':
|
||||
result['msg'] = "remote file is a directory, fetch cannot work on directories"
|
||||
elif remote_checksum == '4':
|
||||
result['msg'] = "python isn't present on the system. Unable to compute checksum"
|
||||
elif remote_checksum == '5':
|
||||
result['msg'] = "stdlib json or simplejson was not found on the remote machine. Only the raw module can work without those installed"
|
||||
# Historically, these don't fail because you may want to transfer
|
||||
# a log file that possibly MAY exist but keep going to fetch other
|
||||
# log files. Today, this is better achieved by adding
|
||||
# ignore_errors or failed_when to the task. Control the behaviour
|
||||
# via fail_when_missing
|
||||
if fail_on_missing:
|
||||
result['failed'] = True
|
||||
del result['changed']
|
||||
else:
|
||||
result['msg'] += ", not transferring, ignored"
|
||||
return result
|
||||
|
||||
# calculate checksum for the local file
|
||||
local_checksum = checksum(dest)
|
||||
|
||||
if remote_checksum != local_checksum:
|
||||
# create the containing directories, if needed
|
||||
makedirs_safe(os.path.dirname(dest))
|
||||
|
||||
# fetch the file and check for changes
|
||||
if remote_data is None:
|
||||
self._connection.fetch_file(source, dest)
|
||||
else:
|
||||
# fetch the file and check for changes
|
||||
if remote_data is None:
|
||||
self._connection.fetch_file(source, dest)
|
||||
else:
|
||||
try:
|
||||
f = open(to_bytes(dest, errors='surrogate_or_strict'), 'wb')
|
||||
f.write(remote_data)
|
||||
f.close()
|
||||
except (IOError, OSError) as e:
|
||||
raise AnsibleError("Failed to fetch the file: %s" % e)
|
||||
new_checksum = secure_hash(dest)
|
||||
# For backwards compatibility. We'll return None on FIPS enabled systems
|
||||
try:
|
||||
f = open(to_bytes(dest, errors='surrogate_or_strict'), 'wb')
|
||||
f.write(remote_data)
|
||||
f.close()
|
||||
except (IOError, OSError) as e:
|
||||
raise AnsibleError("Failed to fetch the file: %s" % e)
|
||||
new_checksum = secure_hash(dest)
|
||||
# For backwards compatibility. We'll return None on FIPS enabled systems
|
||||
try:
|
||||
new_md5 = md5(dest)
|
||||
except ValueError:
|
||||
new_md5 = None
|
||||
new_md5 = md5(dest)
|
||||
except ValueError:
|
||||
new_md5 = None
|
||||
|
||||
if validate_checksum and new_checksum != remote_checksum:
|
||||
result.update(dict(failed=True, md5sum=new_md5,
|
||||
msg="checksum mismatch", file=source, dest=dest, remote_md5sum=None,
|
||||
checksum=new_checksum, remote_checksum=remote_checksum))
|
||||
if validate_checksum and new_checksum != remote_checksum:
|
||||
result.update(dict(failed=True, md5sum=new_md5,
|
||||
msg="checksum mismatch", file=source, dest=dest, remote_md5sum=None,
|
||||
checksum=new_checksum, remote_checksum=remote_checksum))
|
||||
else:
|
||||
result.update({'changed': True, 'md5sum': new_md5, 'dest': dest,
|
||||
'remote_md5sum': None, 'checksum': new_checksum,
|
||||
'remote_checksum': remote_checksum})
|
||||
else:
|
||||
result.update({'changed': True, 'md5sum': new_md5, 'dest': dest,
|
||||
'remote_md5sum': None, 'checksum': new_checksum,
|
||||
'remote_checksum': remote_checksum})
|
||||
else:
|
||||
# For backwards compatibility. We'll return None on FIPS enabled systems
|
||||
try:
|
||||
local_md5 = md5(dest)
|
||||
except ValueError:
|
||||
local_md5 = None
|
||||
result.update(dict(changed=False, md5sum=local_md5, file=source, dest=dest, checksum=local_checksum))
|
||||
# For backwards compatibility. We'll return None on FIPS enabled systems
|
||||
try:
|
||||
local_md5 = md5(dest)
|
||||
except ValueError:
|
||||
local_md5 = None
|
||||
result.update(dict(changed=False, md5sum=local_md5, file=source, dest=dest, checksum=local_checksum))
|
||||
|
||||
finally:
|
||||
self._remove_tmp_path(self._connection._shell.tempdir)
|
||||
|
||||
return result
|
||||
|
|
|
@ -29,24 +29,28 @@ class ActionModule(ActionBase):
|
|||
self._supports_check_mode = True
|
||||
self._supports_async = True
|
||||
|
||||
results = super(ActionModule, self).run(tmp, task_vars)
|
||||
result = super(ActionModule, self).run(tmp, task_vars)
|
||||
|
||||
if not results.get('skipped'):
|
||||
if not result.get('skipped'):
|
||||
|
||||
if results.get('invocation', {}).get('module_args'):
|
||||
if result.get('invocation', {}).get('module_args'):
|
||||
# avoid passing to modules in case of no_log
|
||||
# should not be set anymore but here for backwards compatibility
|
||||
del results['invocation']['module_args']
|
||||
del result['invocation']['module_args']
|
||||
|
||||
# FUTURE: better to let _execute_module calculate this internally?
|
||||
wrap_async = self._task.async_val and not self._connection.has_native_async
|
||||
|
||||
# do work!
|
||||
results = merge_hash(results, self._execute_module(tmp=tmp, task_vars=task_vars, wrap_async=wrap_async))
|
||||
result = merge_hash(result, self._execute_module(tmp=tmp, task_vars=task_vars, wrap_async=wrap_async))
|
||||
|
||||
# hack to keep --verbose from showing all the setup module results
|
||||
# moved from setup module as now we filter out all _ansible_ from results
|
||||
# hack to keep --verbose from showing all the setup module result
|
||||
# moved from setup module as now we filter out all _ansible_ from result
|
||||
if self._task.action == 'setup':
|
||||
results['_ansible_verbose_override'] = True
|
||||
result['_ansible_verbose_override'] = True
|
||||
|
||||
return results
|
||||
if not wrap_async:
|
||||
# remove a temporary path we created
|
||||
self._remove_tmp_path(self._connection._shell.tempdir)
|
||||
|
||||
return result
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
from ansible.errors import AnsibleAction, AnsibleActionFail
|
||||
from ansible.plugins.action import ActionBase
|
||||
|
||||
try:
|
||||
|
@ -46,29 +47,35 @@ class ActionModule(ActionBase):
|
|||
module = self._templar.template("{{hostvars['%s']['ansible_facts']['pkg_mgr']}}" % self._task.delegate_to)
|
||||
else:
|
||||
module = self._templar.template('{{ansible_facts.pkg_mgr}}')
|
||||
except:
|
||||
except Exception:
|
||||
pass # could not get it from template!
|
||||
|
||||
if module == 'auto':
|
||||
facts = self._execute_module(module_name='setup', module_args=dict(filter='ansible_pkg_mgr', gather_subset='!all'), task_vars=task_vars)
|
||||
display.debug("Facts %s" % facts)
|
||||
module = facts.get('ansible_facts', {}).get('ansible_pkg_mgr', 'auto')
|
||||
try:
|
||||
if module == 'auto':
|
||||
facts = self._execute_module(module_name='setup', module_args=dict(filter='ansible_pkg_mgr', gather_subset='!all'), task_vars=task_vars)
|
||||
display.debug("Facts %s" % facts)
|
||||
module = facts.get('ansible_facts', {}).get('ansible_pkg_mgr', 'auto')
|
||||
|
||||
if module != 'auto':
|
||||
if module != 'auto':
|
||||
|
||||
if module not in self._shared_loader_obj.module_loader:
|
||||
result['failed'] = True
|
||||
result['msg'] = 'Could not find a module for %s.' % module
|
||||
if module not in self._shared_loader_obj.module_loader:
|
||||
raise AnsibleActionFail('Could not find a module for %s.' % module)
|
||||
else:
|
||||
# run the 'package' module
|
||||
new_module_args = self._task.args.copy()
|
||||
if 'use' in new_module_args:
|
||||
del new_module_args['use']
|
||||
|
||||
display.vvvv("Running %s" % module)
|
||||
result.update(self._execute_module(module_name=module, module_args=new_module_args, task_vars=task_vars, wrap_async=self._task.async_val))
|
||||
else:
|
||||
# run the 'package' module
|
||||
new_module_args = self._task.args.copy()
|
||||
if 'use' in new_module_args:
|
||||
del new_module_args['use']
|
||||
raise AnsibleActionFail('Could not detect which package manager to use. Try gathering facts or setting the "use" option.')
|
||||
|
||||
display.vvvv("Running %s" % module)
|
||||
result.update(self._execute_module(module_name=module, module_args=new_module_args, task_vars=task_vars, wrap_async=self._task.async_val))
|
||||
else:
|
||||
result['failed'] = True
|
||||
result['msg'] = 'Could not detect which package manager to use. Try gathering facts or setting the "use" option.'
|
||||
except AnsibleAction as e:
|
||||
result.update(e.result)
|
||||
finally:
|
||||
if not self._task.async_val:
|
||||
# remove a temporary path we created
|
||||
self._remove_tmp_path(self._connection._shell.tempdir)
|
||||
|
||||
return result
|
||||
|
|
|
@ -20,7 +20,7 @@ __metaclass__ = type
|
|||
|
||||
import os
|
||||
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.errors import AnsibleError, AnsibleAction, AnsibleActionDone, AnsibleActionFail
|
||||
from ansible.module_utils._text import to_native
|
||||
from ansible.module_utils.parsing.convert_bool import boolean
|
||||
from ansible.plugins.action import ActionBase
|
||||
|
@ -28,6 +28,8 @@ from ansible.plugins.action import ActionBase
|
|||
|
||||
class ActionModule(ActionBase):
|
||||
|
||||
TRANSFERS_FILES = True
|
||||
|
||||
def run(self, tmp=None, task_vars=None):
|
||||
if task_vars is None:
|
||||
task_vars = dict()
|
||||
|
@ -37,39 +39,33 @@ class ActionModule(ActionBase):
|
|||
src = self._task.args.get('src', None)
|
||||
remote_src = boolean(self._task.args.get('remote_src', 'no'), strict=False)
|
||||
|
||||
if src is None:
|
||||
result['failed'] = True
|
||||
result['msg'] = "src is required"
|
||||
return result
|
||||
elif remote_src:
|
||||
# everything is remote, so we just execute the module
|
||||
# without changing any of the module arguments
|
||||
result.update(self._execute_module(task_vars=task_vars))
|
||||
return result
|
||||
|
||||
try:
|
||||
src = self._find_needle('files', src)
|
||||
except AnsibleError as e:
|
||||
result['failed'] = True
|
||||
result['msg'] = to_native(e)
|
||||
return result
|
||||
if src is None:
|
||||
raise AnsibleActionFail("src is required")
|
||||
elif remote_src:
|
||||
# everything is remote, so we just execute the module
|
||||
# without changing any of the module arguments
|
||||
raise AnsibleActionDone(result=self._execute_module(task_vars=task_vars))
|
||||
|
||||
# create the remote tmp dir if needed, and put the source file there
|
||||
if tmp is None or "-tmp-" not in tmp:
|
||||
tmp = self._make_tmp_path()
|
||||
try:
|
||||
src = self._find_needle('files', src)
|
||||
except AnsibleError as e:
|
||||
raise AnsibleActionFail(to_native(e))
|
||||
|
||||
tmp_src = self._connection._shell.join_path(tmp, os.path.basename(src))
|
||||
self._transfer_file(src, tmp_src)
|
||||
tmp_src = self._connection._shell.join_path(self._connection._shell.tempdir, os.path.basename(src))
|
||||
self._transfer_file(src, tmp_src)
|
||||
self._fixup_perms2((tmp_src,))
|
||||
|
||||
self._fixup_perms2((tmp, tmp_src))
|
||||
|
||||
new_module_args = self._task.args.copy()
|
||||
new_module_args.update(
|
||||
dict(
|
||||
src=tmp_src,
|
||||
new_module_args = self._task.args.copy()
|
||||
new_module_args.update(
|
||||
dict(
|
||||
src=tmp_src,
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
result.update(self._execute_module('patch', module_args=new_module_args, task_vars=task_vars))
|
||||
self._remove_tmp_path(tmp)
|
||||
result.update(self._execute_module('patch', module_args=new_module_args, task_vars=task_vars))
|
||||
except AnsibleAction as e:
|
||||
result.update(e.result)
|
||||
finally:
|
||||
self._remove_tmp_path(self._connection._shell.tempdir)
|
||||
return result
|
||||
|
|
|
@ -21,12 +21,13 @@ import os
|
|||
import re
|
||||
import shlex
|
||||
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.errors import AnsibleError, AnsibleAction, AnsibleActionDone, AnsibleActionFail, AnsibleActionSkip
|
||||
from ansible.module_utils._text import to_native, to_text
|
||||
from ansible.plugins.action import ActionBase
|
||||
|
||||
|
||||
class ActionModule(ActionBase):
|
||||
|
||||
TRANSFERS_FILES = True
|
||||
|
||||
# On Windows platform, absolute paths begin with a (back)slash
|
||||
|
@ -40,95 +41,91 @@ class ActionModule(ActionBase):
|
|||
|
||||
result = super(ActionModule, self).run(tmp, task_vars)
|
||||
|
||||
if not tmp:
|
||||
tmp = self._make_tmp_path()
|
||||
|
||||
creates = self._task.args.get('creates')
|
||||
if creates:
|
||||
# do not run the command if the line contains creates=filename
|
||||
# and the filename already exists. This allows idempotence
|
||||
# of command executions.
|
||||
if self._remote_file_exists(creates):
|
||||
self._remove_tmp_path(tmp)
|
||||
return dict(skipped=True, msg=("skipped, since %s exists" % creates))
|
||||
|
||||
removes = self._task.args.get('removes')
|
||||
if removes:
|
||||
# do not run the command if the line contains removes=filename
|
||||
# and the filename does not exist. This allows idempotence
|
||||
# of command executions.
|
||||
if not self._remote_file_exists(removes):
|
||||
self._remove_tmp_path(tmp)
|
||||
return dict(skipped=True, msg=("skipped, since %s does not exist" % removes))
|
||||
|
||||
# The chdir must be absolute, because a relative path would rely on
|
||||
# remote node behaviour & user config.
|
||||
chdir = self._task.args.get('chdir')
|
||||
if chdir:
|
||||
# Powershell is the only Windows-path aware shell
|
||||
if self._connection._shell.SHELL_FAMILY == 'powershell' and \
|
||||
not self.windows_absolute_path_detection.matches(chdir):
|
||||
return dict(failed=True, msg='chdir %s must be an absolute path for a Windows remote node' % chdir)
|
||||
# Every other shell is unix-path-aware.
|
||||
if self._connection._shell.SHELL_FAMILY != 'powershell' and not chdir.startswith('/'):
|
||||
return dict(failed=True, msg='chdir %s must be an absolute path for a Unix-aware remote node' % chdir)
|
||||
|
||||
# Split out the script as the first item in raw_params using
|
||||
# shlex.split() in order to support paths and files with spaces in the name.
|
||||
# Any arguments passed to the script will be added back later.
|
||||
raw_params = to_native(self._task.args.get('_raw_params', ''), errors='surrogate_or_strict')
|
||||
parts = [to_text(s, errors='surrogate_or_strict') for s in shlex.split(raw_params.strip())]
|
||||
source = parts[0]
|
||||
|
||||
try:
|
||||
source = self._loader.get_real_file(self._find_needle('files', source), decrypt=self._task.args.get('decrypt', True))
|
||||
except AnsibleError as e:
|
||||
return dict(failed=True, msg=to_native(e))
|
||||
creates = self._task.args.get('creates')
|
||||
if creates:
|
||||
# do not run the command if the line contains creates=filename
|
||||
# and the filename already exists. This allows idempotence
|
||||
# of command executions.
|
||||
if self._remote_file_exists(creates):
|
||||
raise AnsibleActionSkip("%s exists, matching creates option" % creates)
|
||||
|
||||
if not self._play_context.check_mode:
|
||||
# transfer the file to a remote tmp location
|
||||
tmp_src = self._connection._shell.join_path(tmp, os.path.basename(source))
|
||||
removes = self._task.args.get('removes')
|
||||
if removes:
|
||||
# do not run the command if the line contains removes=filename
|
||||
# and the filename does not exist. This allows idempotence
|
||||
# of command executions.
|
||||
if not self._remote_file_exists(removes):
|
||||
raise AnsibleActionSkip("%s does not exist, matching removes option" % removes)
|
||||
|
||||
# Convert raw_params to text for the purpose of replacing the script since
|
||||
# parts and tmp_src are both unicode strings and raw_params will be different
|
||||
# depending on Python version.
|
||||
#
|
||||
# Once everything is encoded consistently, replace the script path on the remote
|
||||
# system with the remainder of the raw_params. This preserves quoting in parameters
|
||||
# that would have been removed by shlex.split().
|
||||
target_command = to_text(raw_params).strip().replace(parts[0], tmp_src)
|
||||
# The chdir must be absolute, because a relative path would rely on
|
||||
# remote node behaviour & user config.
|
||||
chdir = self._task.args.get('chdir')
|
||||
if chdir:
|
||||
# Powershell is the only Windows-path aware shell
|
||||
if self._connection._shell.SHELL_FAMILY == 'powershell' and \
|
||||
not self.windows_absolute_path_detection.matches(chdir):
|
||||
raise AnsibleActionFail('chdir %s must be an absolute path for a Windows remote node' % chdir)
|
||||
# Every other shell is unix-path-aware.
|
||||
if self._connection._shell.SHELL_FAMILY != 'powershell' and not chdir.startswith('/'):
|
||||
raise AnsibleActionFail('chdir %s must be an absolute path for a Unix-aware remote node' % chdir)
|
||||
|
||||
self._transfer_file(source, tmp_src)
|
||||
# Split out the script as the first item in raw_params using
|
||||
# shlex.split() in order to support paths and files with spaces in the name.
|
||||
# Any arguments passed to the script will be added back later.
|
||||
raw_params = to_native(self._task.args.get('_raw_params', ''), errors='surrogate_or_strict')
|
||||
parts = [to_text(s, errors='surrogate_or_strict') for s in shlex.split(raw_params.strip())]
|
||||
source = parts[0]
|
||||
|
||||
# set file permissions, more permissive when the copy is done as a different user
|
||||
self._fixup_perms2((tmp, tmp_src), execute=True)
|
||||
try:
|
||||
source = self._loader.get_real_file(self._find_needle('files', source), decrypt=self._task.args.get('decrypt', True))
|
||||
except AnsibleError as e:
|
||||
raise AnsibleActionFail(to_native(e))
|
||||
|
||||
# add preparation steps to one ssh roundtrip executing the script
|
||||
env_dict = dict()
|
||||
env_string = self._compute_environment_string(env_dict)
|
||||
script_cmd = ' '.join([env_string, target_command])
|
||||
|
||||
if self._play_context.check_mode:
|
||||
# now we execute script, always assume changed.
|
||||
result['changed'] = True
|
||||
self._remove_tmp_path(tmp)
|
||||
return result
|
||||
|
||||
script_cmd = self._connection._shell.wrap_for_exec(script_cmd)
|
||||
if not self._play_context.check_mode:
|
||||
# transfer the file to a remote tmp location
|
||||
tmp_src = self._connection._shell.join_path(self._connection._shell.tempdir, os.path.basename(source))
|
||||
|
||||
exec_data = None
|
||||
# HACK: come up with a sane way to pass around env outside the command
|
||||
if self._connection.transport == "winrm":
|
||||
exec_data = self._connection._create_raw_wrapper_payload(script_cmd, env_dict)
|
||||
# Convert raw_params to text for the purpose of replacing the script since
|
||||
# parts and tmp_src are both unicode strings and raw_params will be different
|
||||
# depending on Python version.
|
||||
#
|
||||
# Once everything is encoded consistently, replace the script path on the remote
|
||||
# system with the remainder of the raw_params. This preserves quoting in parameters
|
||||
# that would have been removed by shlex.split().
|
||||
target_command = to_text(raw_params).strip().replace(parts[0], tmp_src)
|
||||
|
||||
result.update(self._low_level_execute_command(cmd=script_cmd, in_data=exec_data, sudoable=True, chdir=chdir))
|
||||
self._transfer_file(source, tmp_src)
|
||||
|
||||
# clean up after
|
||||
self._remove_tmp_path(tmp)
|
||||
# set file permissions, more permissive when the copy is done as a different user
|
||||
self._fixup_perms2((tmp_src,), execute=True)
|
||||
|
||||
result['changed'] = True
|
||||
# add preparation steps to one ssh roundtrip executing the script
|
||||
env_dict = dict()
|
||||
env_string = self._compute_environment_string(env_dict)
|
||||
script_cmd = ' '.join([env_string, target_command])
|
||||
|
||||
if 'rc' in result and result['rc'] != 0:
|
||||
result['failed'] = True
|
||||
result['msg'] = 'non-zero return code'
|
||||
if self._play_context.check_mode:
|
||||
raise AnsibleActionDone()
|
||||
|
||||
script_cmd = self._connection._shell.wrap_for_exec(script_cmd)
|
||||
|
||||
exec_data = None
|
||||
# HACK: come up with a sane way to pass around env outside the command
|
||||
if self._connection.transport == "winrm":
|
||||
exec_data = self._connection._create_raw_wrapper_payload(script_cmd, env_dict)
|
||||
|
||||
result.update(self._low_level_execute_command(cmd=script_cmd, in_data=exec_data, sudoable=True, chdir=chdir))
|
||||
|
||||
if 'rc' in result and result['rc'] != 0:
|
||||
raise AnsibleActionFail('non-zero return code')
|
||||
|
||||
except AnsibleAction as e:
|
||||
result.update(e.result)
|
||||
finally:
|
||||
self._remove_tmp_path(self._connection._shell.tempdir)
|
||||
|
||||
return result
|
||||
|
|
|
@ -18,6 +18,7 @@ from __future__ import (absolute_import, division, print_function)
|
|||
__metaclass__ = type
|
||||
|
||||
|
||||
from ansible.errors import AnsibleAction, AnsibleActionFail
|
||||
from ansible.plugins.action import ActionBase
|
||||
|
||||
|
||||
|
@ -48,35 +49,41 @@ class ActionModule(ActionBase):
|
|||
except:
|
||||
pass # could not get it from template!
|
||||
|
||||
if module == 'auto':
|
||||
facts = self._execute_module(module_name='setup', module_args=dict(gather_subset='!all', filter='ansible_service_mgr'), task_vars=task_vars)
|
||||
self._display.debug("Facts %s" % facts)
|
||||
module = facts.get('ansible_facts', {}).get('ansible_service_mgr', 'auto')
|
||||
try:
|
||||
if module == 'auto':
|
||||
facts = self._execute_module(module_name='setup', module_args=dict(gather_subset='!all', filter='ansible_service_mgr'), task_vars=task_vars)
|
||||
self._display.debug("Facts %s" % facts)
|
||||
module = facts.get('ansible_facts', {}).get('ansible_service_mgr', 'auto')
|
||||
|
||||
if not module or module == 'auto' or module not in self._shared_loader_obj.module_loader:
|
||||
module = 'service'
|
||||
if not module or module == 'auto' or module not in self._shared_loader_obj.module_loader:
|
||||
module = 'service'
|
||||
|
||||
if module != 'auto':
|
||||
# run the 'service' module
|
||||
new_module_args = self._task.args.copy()
|
||||
if 'use' in new_module_args:
|
||||
del new_module_args['use']
|
||||
if module != 'auto':
|
||||
# run the 'service' module
|
||||
new_module_args = self._task.args.copy()
|
||||
if 'use' in new_module_args:
|
||||
del new_module_args['use']
|
||||
|
||||
# for backwards compatibility
|
||||
if 'state' in new_module_args and new_module_args['state'] == 'running':
|
||||
self._display.deprecated(msg="state=running is deprecated. Please use state=started", version="2.7")
|
||||
new_module_args['state'] = 'started'
|
||||
# for backwards compatibility
|
||||
if 'state' in new_module_args and new_module_args['state'] == 'running':
|
||||
self._display.deprecated(msg="state=running is deprecated. Please use state=started", version="2.7")
|
||||
new_module_args['state'] = 'started'
|
||||
|
||||
if module in self.UNUSED_PARAMS:
|
||||
for unused in self.UNUSED_PARAMS[module]:
|
||||
if unused in new_module_args:
|
||||
del new_module_args[unused]
|
||||
self._display.warning('Ignoring "%s" as it is not used in "%s"' % (unused, module))
|
||||
if module in self.UNUSED_PARAMS:
|
||||
for unused in self.UNUSED_PARAMS[module]:
|
||||
if unused in new_module_args:
|
||||
del new_module_args[unused]
|
||||
self._display.warning('Ignoring "%s" as it is not used in "%s"' % (unused, module))
|
||||
|
||||
self._display.vvvv("Running %s" % module)
|
||||
result.update(self._execute_module(module_name=module, module_args=new_module_args, task_vars=task_vars, wrap_async=self._task.async_val))
|
||||
else:
|
||||
result['failed'] = True
|
||||
result['msg'] = 'Could not detect which service manager to use. Try gathering facts or setting the "use" option.'
|
||||
self._display.vvvv("Running %s" % module)
|
||||
result.update(self._execute_module(module_name=module, module_args=new_module_args, task_vars=task_vars, wrap_async=self._task.async_val))
|
||||
else:
|
||||
raise AnsibleActionFail('Could not detect which service manager to use. Try gathering facts or setting the "use" option.')
|
||||
|
||||
except AnsibleAction as e:
|
||||
result.update(e.result)
|
||||
finally:
|
||||
if not self._task.async_val:
|
||||
self._remove_tmp_path(self._connection._shell.tempdir)
|
||||
|
||||
return result
|
||||
|
|
|
@ -22,4 +22,9 @@ class ActionModule(ActionBase):
|
|||
loader=self._loader,
|
||||
templar=self._templar,
|
||||
shared_loader_obj=self._shared_loader_obj)
|
||||
return command_action.run(task_vars=task_vars)
|
||||
result = command_action.run(task_vars=task_vars)
|
||||
|
||||
# remove a temporary path we created
|
||||
self._remove_tmp_path(self._connection._shell.tempdir)
|
||||
|
||||
return result
|
||||
|
|
|
@ -21,13 +21,11 @@ import os
|
|||
import shutil
|
||||
import tempfile
|
||||
|
||||
from ansible import constants as C
|
||||
from ansible.errors import AnsibleError, AnsibleFileNotFound
|
||||
from ansible.errors import AnsibleError, AnsibleFileNotFound, AnsibleAction, AnsibleActionFail
|
||||
from ansible.module_utils._text import to_bytes, to_text
|
||||
from ansible.module_utils.parsing.convert_bool import boolean
|
||||
from ansible.plugins.action import ActionBase
|
||||
from ansible.template import generate_ansible_template_vars
|
||||
from ansible.utils.hashing import checksum_s
|
||||
|
||||
|
||||
class ActionModule(ActionBase):
|
||||
|
@ -35,20 +33,6 @@ class ActionModule(ActionBase):
|
|||
TRANSFERS_FILES = True
|
||||
DEFAULT_NEWLINE_SEQUENCE = "\n"
|
||||
|
||||
def get_checksum(self, dest, all_vars, try_directory=False, source=None, tmp=None):
|
||||
try:
|
||||
dest_stat = self._execute_remote_stat(dest, all_vars=all_vars, follow=False, tmp=tmp)
|
||||
|
||||
if dest_stat['exists'] and dest_stat['isdir'] and try_directory and source:
|
||||
base = os.path.basename(source)
|
||||
dest = os.path.join(dest, base)
|
||||
dest_stat = self._execute_remote_stat(dest, all_vars=all_vars, follow=False, tmp=tmp)
|
||||
|
||||
except AnsibleError as e:
|
||||
return dict(failed=True, msg=to_text(e))
|
||||
|
||||
return dest_stat['checksum']
|
||||
|
||||
def run(self, tmp=None, task_vars=None):
|
||||
''' handler for template operations '''
|
||||
|
||||
|
@ -76,108 +60,103 @@ class ActionModule(ActionBase):
|
|||
if newline_sequence in wrong_sequences:
|
||||
newline_sequence = allowed_sequences[wrong_sequences.index(newline_sequence)]
|
||||
|
||||
if state is not None:
|
||||
result['failed'] = True
|
||||
result['msg'] = "'state' cannot be specified on a template"
|
||||
elif source is None or dest is None:
|
||||
result['failed'] = True
|
||||
result['msg'] = "src and dest are required"
|
||||
elif newline_sequence not in allowed_sequences:
|
||||
result['failed'] = True
|
||||
result['msg'] = "newline_sequence needs to be one of: \n, \r or \r\n"
|
||||
else:
|
||||
try:
|
||||
if state is not None:
|
||||
raise AnsibleActionFail("'state' cannot be specified on a template")
|
||||
elif source is None or dest is None:
|
||||
raise AnsibleActionFail("src and dest are required")
|
||||
elif newline_sequence not in allowed_sequences:
|
||||
raise AnsibleActionFail("newline_sequence needs to be one of: \n, \r or \r\n")
|
||||
else:
|
||||
try:
|
||||
source = self._find_needle('templates', source)
|
||||
except AnsibleError as e:
|
||||
raise AnsibleActionFail(to_text(e))
|
||||
|
||||
# Get vault decrypted tmp file
|
||||
try:
|
||||
source = self._find_needle('templates', source)
|
||||
except AnsibleError as e:
|
||||
result['failed'] = True
|
||||
result['msg'] = to_text(e)
|
||||
tmp_source = self._loader.get_real_file(source)
|
||||
except AnsibleFileNotFound as e:
|
||||
raise AnsibleActionFail("could not find src=%s, %s" % (source, to_text(e)))
|
||||
|
||||
if 'failed' in result:
|
||||
return result
|
||||
# template the source data locally & get ready to transfer
|
||||
try:
|
||||
with open(tmp_source, 'r') as f:
|
||||
template_data = to_text(f.read())
|
||||
|
||||
# Get vault decrypted tmp file
|
||||
try:
|
||||
tmp_source = self._loader.get_real_file(source)
|
||||
except AnsibleFileNotFound as e:
|
||||
result['failed'] = True
|
||||
result['msg'] = "could not find src=%s, %s" % (source, e)
|
||||
self._remove_tmp_path(tmp)
|
||||
return result
|
||||
# set jinja2 internal search path for includes
|
||||
searchpath = task_vars.get('ansible_search_path', [])
|
||||
searchpath.extend([self._loader._basedir, os.path.dirname(source)])
|
||||
|
||||
# template the source data locally & get ready to transfer
|
||||
try:
|
||||
with open(tmp_source, 'r') as f:
|
||||
template_data = to_text(f.read())
|
||||
# We want to search into the 'templates' subdir of each search path in
|
||||
# addition to our original search paths.
|
||||
newsearchpath = []
|
||||
for p in searchpath:
|
||||
newsearchpath.append(os.path.join(p, 'templates'))
|
||||
newsearchpath.append(p)
|
||||
searchpath = newsearchpath
|
||||
|
||||
# set jinja2 internal search path for includes
|
||||
searchpath = task_vars.get('ansible_search_path', [])
|
||||
searchpath.extend([self._loader._basedir, os.path.dirname(source)])
|
||||
self._templar.environment.loader.searchpath = searchpath
|
||||
self._templar.environment.newline_sequence = newline_sequence
|
||||
if block_start_string is not None:
|
||||
self._templar.environment.block_start_string = block_start_string
|
||||
if block_end_string is not None:
|
||||
self._templar.environment.block_end_string = block_end_string
|
||||
if variable_start_string is not None:
|
||||
self._templar.environment.variable_start_string = variable_start_string
|
||||
if variable_end_string is not None:
|
||||
self._templar.environment.variable_end_string = variable_end_string
|
||||
if trim_blocks is not None:
|
||||
self._templar.environment.trim_blocks = bool(trim_blocks)
|
||||
|
||||
# We want to search into the 'templates' subdir of each search path in
|
||||
# addition to our original search paths.
|
||||
newsearchpath = []
|
||||
for p in searchpath:
|
||||
newsearchpath.append(os.path.join(p, 'templates'))
|
||||
newsearchpath.append(p)
|
||||
searchpath = newsearchpath
|
||||
# add ansible 'template' vars
|
||||
temp_vars = task_vars.copy()
|
||||
temp_vars.update(generate_ansible_template_vars(source))
|
||||
|
||||
self._templar.environment.loader.searchpath = searchpath
|
||||
self._templar.environment.newline_sequence = newline_sequence
|
||||
if block_start_string is not None:
|
||||
self._templar.environment.block_start_string = block_start_string
|
||||
if block_end_string is not None:
|
||||
self._templar.environment.block_end_string = block_end_string
|
||||
if variable_start_string is not None:
|
||||
self._templar.environment.variable_start_string = variable_start_string
|
||||
if variable_end_string is not None:
|
||||
self._templar.environment.variable_end_string = variable_end_string
|
||||
if trim_blocks is not None:
|
||||
self._templar.environment.trim_blocks = bool(trim_blocks)
|
||||
old_vars = self._templar._available_variables
|
||||
self._templar.set_available_variables(temp_vars)
|
||||
resultant = self._templar.do_template(template_data, preserve_trailing_newlines=True, escape_backslashes=False)
|
||||
self._templar.set_available_variables(old_vars)
|
||||
except AnsibleAction:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise AnsibleActionFail("%s: %s" % (type(e).__name__, to_text(e)))
|
||||
finally:
|
||||
self._loader.cleanup_tmp_file(tmp_source)
|
||||
|
||||
# add ansible 'template' vars
|
||||
temp_vars = task_vars.copy()
|
||||
temp_vars.update(generate_ansible_template_vars(source))
|
||||
new_task = self._task.copy()
|
||||
new_task.args.pop('newline_sequence', None)
|
||||
new_task.args.pop('block_start_string', None)
|
||||
new_task.args.pop('block_end_string', None)
|
||||
new_task.args.pop('variable_start_string', None)
|
||||
new_task.args.pop('variable_end_string', None)
|
||||
new_task.args.pop('trim_blocks', None)
|
||||
try:
|
||||
tempdir = tempfile.mkdtemp()
|
||||
result_file = os.path.join(tempdir, os.path.basename(source))
|
||||
with open(result_file, 'wb') as f:
|
||||
f.write(to_bytes(resultant, errors='surrogate_or_strict'))
|
||||
|
||||
old_vars = self._templar._available_variables
|
||||
self._templar.set_available_variables(temp_vars)
|
||||
resultant = self._templar.do_template(template_data, preserve_trailing_newlines=True, escape_backslashes=False)
|
||||
self._templar.set_available_variables(old_vars)
|
||||
except Exception as e:
|
||||
result['failed'] = True
|
||||
result['msg'] = "%s: %s" % (type(e).__name__, to_text(e))
|
||||
return result
|
||||
new_task.args.update(
|
||||
dict(
|
||||
src=result_file,
|
||||
dest=dest,
|
||||
follow=follow,
|
||||
),
|
||||
)
|
||||
copy_action = self._shared_loader_obj.action_loader.get('copy',
|
||||
task=new_task,
|
||||
connection=self._connection,
|
||||
play_context=self._play_context,
|
||||
loader=self._loader,
|
||||
templar=self._templar,
|
||||
shared_loader_obj=self._shared_loader_obj)
|
||||
result.update(copy_action.run(task_vars=task_vars))
|
||||
finally:
|
||||
shutil.rmtree(tempdir)
|
||||
except AnsibleAction as e:
|
||||
result.update(e.result)
|
||||
finally:
|
||||
self._loader.cleanup_tmp_file(tmp_source)
|
||||
|
||||
new_task = self._task.copy()
|
||||
new_task.args.pop('newline_sequence', None)
|
||||
new_task.args.pop('block_start_string', None)
|
||||
new_task.args.pop('block_end_string', None)
|
||||
new_task.args.pop('variable_start_string', None)
|
||||
new_task.args.pop('variable_end_string', None)
|
||||
new_task.args.pop('trim_blocks', None)
|
||||
try:
|
||||
tempdir = tempfile.mkdtemp()
|
||||
result_file = os.path.join(tempdir, os.path.basename(source))
|
||||
with open(result_file, 'wb') as f:
|
||||
f.write(to_bytes(resultant, errors='surrogate_or_strict'))
|
||||
|
||||
new_task.args.update(
|
||||
dict(
|
||||
src=result_file,
|
||||
dest=dest,
|
||||
follow=follow,
|
||||
),
|
||||
)
|
||||
copy_action = self._shared_loader_obj.action_loader.get('copy',
|
||||
task=new_task,
|
||||
connection=self._connection,
|
||||
play_context=self._play_context,
|
||||
loader=self._loader,
|
||||
templar=self._templar,
|
||||
shared_loader_obj=self._shared_loader_obj)
|
||||
result.update(copy_action.run(task_vars=task_vars))
|
||||
finally:
|
||||
shutil.rmtree(tempdir)
|
||||
self._remove_tmp_path(self._connection._shell.tempdir)
|
||||
|
||||
return result
|
||||
|
|
|
@ -20,7 +20,7 @@ __metaclass__ = type
|
|||
|
||||
import os
|
||||
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.errors import AnsibleError, AnsibleAction, AnsibleActionFail, AnsibleActionSkip
|
||||
from ansible.module_utils._text import to_text
|
||||
from ansible.module_utils.parsing.convert_bool import boolean
|
||||
from ansible.plugins.action import ActionBase
|
||||
|
@ -43,96 +43,81 @@ class ActionModule(ActionBase):
|
|||
creates = self._task.args.get('creates', None)
|
||||
decrypt = self._task.args.get('decrypt', True)
|
||||
|
||||
# "copy" is deprecated in favor of "remote_src".
|
||||
if 'copy' in self._task.args:
|
||||
# They are mutually exclusive.
|
||||
if 'remote_src' in self._task.args:
|
||||
result['failed'] = True
|
||||
result['msg'] = "parameters are mutually exclusive: ('copy', 'remote_src')"
|
||||
return result
|
||||
# We will take the information from copy and store it in
|
||||
# the remote_src var to use later in this file.
|
||||
self._task.args['remote_src'] = remote_src = not boolean(self._task.args.pop('copy'), strict=False)
|
||||
|
||||
if source is None or dest is None:
|
||||
result['failed'] = True
|
||||
result['msg'] = "src (or content) and dest are required"
|
||||
return result
|
||||
|
||||
if not tmp:
|
||||
tmp = self._make_tmp_path()
|
||||
|
||||
if creates:
|
||||
# do not run the command if the line contains creates=filename
|
||||
# and the filename already exists. This allows idempotence
|
||||
# of command executions.
|
||||
creates = self._remote_expand_user(creates)
|
||||
if self._remote_file_exists(creates):
|
||||
result['skipped'] = True
|
||||
result['msg'] = "skipped, since %s exists" % creates
|
||||
self._remove_tmp_path(tmp)
|
||||
return result
|
||||
|
||||
dest = self._remote_expand_user(dest) # CCTODO: Fix path for Windows hosts.
|
||||
source = os.path.expanduser(source)
|
||||
|
||||
if not remote_src:
|
||||
try:
|
||||
source = self._loader.get_real_file(self._find_needle('files', source), decrypt=decrypt)
|
||||
except AnsibleError as e:
|
||||
result['failed'] = True
|
||||
result['msg'] = to_text(e)
|
||||
self._remove_tmp_path(tmp)
|
||||
return result
|
||||
|
||||
try:
|
||||
remote_stat = self._execute_remote_stat(dest, all_vars=task_vars, follow=True)
|
||||
except AnsibleError as e:
|
||||
result['failed'] = True
|
||||
result['msg'] = to_text(e)
|
||||
self._remove_tmp_path(tmp)
|
||||
return result
|
||||
# "copy" is deprecated in favor of "remote_src".
|
||||
if 'copy' in self._task.args:
|
||||
# They are mutually exclusive.
|
||||
if 'remote_src' in self._task.args:
|
||||
raise AnsibleActionFail("parameters are mutually exclusive: ('copy', 'remote_src')")
|
||||
# We will take the information from copy and store it in
|
||||
# the remote_src var to use later in this file.
|
||||
self._task.args['remote_src'] = remote_src = not boolean(self._task.args.pop('copy'), strict=False)
|
||||
|
||||
if not remote_stat['exists'] or not remote_stat['isdir']:
|
||||
result['failed'] = True
|
||||
result['msg'] = "dest '%s' must be an existing dir" % dest
|
||||
self._remove_tmp_path(tmp)
|
||||
return result
|
||||
if source is None or dest is None:
|
||||
raise AnsibleActionFail("src (or content) and dest are required")
|
||||
|
||||
if not remote_src:
|
||||
# transfer the file to a remote tmp location
|
||||
tmp_src = self._connection._shell.join_path(tmp, 'source')
|
||||
self._transfer_file(source, tmp_src)
|
||||
if creates:
|
||||
# do not run the command if the line contains creates=filename
|
||||
# and the filename already exists. This allows idempotence
|
||||
# of command executions.
|
||||
creates = self._remote_expand_user(creates)
|
||||
if self._remote_file_exists(creates):
|
||||
raise AnsibleActionSkip("skipped, since %s exists" % creates)
|
||||
|
||||
# handle diff mode client side
|
||||
# handle check mode client side
|
||||
dest = self._remote_expand_user(dest) # CCTODO: Fix path for Windows hosts.
|
||||
source = os.path.expanduser(source)
|
||||
|
||||
if not remote_src:
|
||||
# fix file permissions when the copy is done as a different user
|
||||
self._fixup_perms2((tmp, tmp_src))
|
||||
# Build temporary module_args.
|
||||
new_module_args = self._task.args.copy()
|
||||
new_module_args.update(
|
||||
dict(
|
||||
src=tmp_src,
|
||||
original_basename=os.path.basename(source),
|
||||
),
|
||||
)
|
||||
if not remote_src:
|
||||
try:
|
||||
source = self._loader.get_real_file(self._find_needle('files', source), decrypt=decrypt)
|
||||
except AnsibleError as e:
|
||||
raise AnsibleActionFail(to_text(e))
|
||||
|
||||
else:
|
||||
new_module_args = self._task.args.copy()
|
||||
new_module_args.update(
|
||||
dict(
|
||||
original_basename=os.path.basename(source),
|
||||
),
|
||||
)
|
||||
try:
|
||||
remote_stat = self._execute_remote_stat(dest, all_vars=task_vars, follow=True)
|
||||
except AnsibleError as e:
|
||||
raise AnsibleActionFail(to_text(e))
|
||||
|
||||
# remove action plugin only key
|
||||
for key in ('decrypt',):
|
||||
if key in new_module_args:
|
||||
del new_module_args[key]
|
||||
if not remote_stat['exists'] or not remote_stat['isdir']:
|
||||
raise AnsibleActionFail("dest '%s' must be an existing dir" % dest)
|
||||
|
||||
# execute the unarchive module now, with the updated args
|
||||
result.update(self._execute_module(module_args=new_module_args, task_vars=task_vars))
|
||||
self._remove_tmp_path(tmp)
|
||||
if not remote_src:
|
||||
# transfer the file to a remote tmp location
|
||||
tmp_src = self._connection._shell.join_path(self._connection._shell.tempdir, 'source')
|
||||
self._transfer_file(source, tmp_src)
|
||||
|
||||
# handle diff mode client side
|
||||
# handle check mode client side
|
||||
|
||||
if not remote_src:
|
||||
# fix file permissions when the copy is done as a different user
|
||||
self._fixup_perms2((self._connection._shell.tempdir, tmp_src))
|
||||
# Build temporary module_args.
|
||||
new_module_args = self._task.args.copy()
|
||||
new_module_args.update(
|
||||
dict(
|
||||
src=tmp_src,
|
||||
original_basename=os.path.basename(source),
|
||||
),
|
||||
)
|
||||
|
||||
else:
|
||||
new_module_args = self._task.args.copy()
|
||||
new_module_args.update(
|
||||
dict(
|
||||
original_basename=os.path.basename(source),
|
||||
),
|
||||
)
|
||||
|
||||
# remove action plugin only key
|
||||
for key in ('decrypt',):
|
||||
if key in new_module_args:
|
||||
del new_module_args[key]
|
||||
|
||||
# execute the unarchive module now, with the updated args
|
||||
result.update(self._execute_module(module_args=new_module_args, task_vars=task_vars))
|
||||
except AnsibleAction as e:
|
||||
result.update(e.result)
|
||||
finally:
|
||||
self._remove_tmp_path(self._connection._shell.tempdir)
|
||||
return result
|
||||
|
|
|
@ -15,6 +15,7 @@ import tempfile
|
|||
import traceback
|
||||
import zipfile
|
||||
|
||||
from ansible import constants as C
|
||||
from ansible.errors import AnsibleError, AnsibleFileNotFound
|
||||
from ansible.module_utils._text import to_bytes, to_native, to_text
|
||||
from ansible.module_utils.parsing.convert_bool import boolean
|
||||
|
@ -218,7 +219,7 @@ class ActionModule(ActionBase):
|
|||
|
||||
def _create_content_tempfile(self, content):
|
||||
''' Create a tempfile containing defined content '''
|
||||
fd, content_tempfile = tempfile.mkstemp()
|
||||
fd, content_tempfile = tempfile.mkstemp(dir=C.DEFAULT_LOCAL_TMP)
|
||||
f = os.fdopen(fd, 'wb')
|
||||
content = to_bytes(content)
|
||||
try:
|
||||
|
|
|
@ -60,7 +60,7 @@ class ConnectionBase(AnsiblePlugin):
|
|||
supports_persistence = False
|
||||
force_persistence = False
|
||||
|
||||
def __init__(self, play_context, new_stdin, *args, **kwargs):
|
||||
def __init__(self, play_context, new_stdin, shell=None, *args, **kwargs):
|
||||
|
||||
super(ConnectionBase, self).__init__()
|
||||
|
||||
|
@ -78,9 +78,11 @@ class ConnectionBase(AnsiblePlugin):
|
|||
self.success_key = None
|
||||
self.prompt = None
|
||||
self._connected = False
|
||||
|
||||
self._socket_path = None
|
||||
|
||||
if shell is not None:
|
||||
self._shell = shell
|
||||
|
||||
# load the shell plugin for this action/connection
|
||||
if play_context.shell:
|
||||
shell_type = play_context.shell
|
||||
|
|
|
@ -19,7 +19,7 @@ from collections import defaultdict
|
|||
from ansible import constants as C
|
||||
from ansible.plugins import get_plugin_class, MODULE_CACHE, PATH_CACHE, PLUGIN_PATH_CACHE
|
||||
from ansible.module_utils._text import to_text
|
||||
from ansible.parsing.plugin_docs import read_docstring
|
||||
from ansible.utils.plugin_docs import get_docstring
|
||||
|
||||
try:
|
||||
from __main__ import display
|
||||
|
@ -209,14 +209,14 @@ class PluginLoader:
|
|||
if self.class_name:
|
||||
type_name = get_plugin_class(self.class_name)
|
||||
|
||||
# FIXME: expand from just connection and callback
|
||||
if type_name in ('callback', 'connection', 'inventory', 'lookup'):
|
||||
dstring = read_docstring(path, verbose=False, ignore_errors=False)
|
||||
# FIXME: expand to other plugins, but never doc fragments
|
||||
# if type name != 'module_doc_fragment':
|
||||
if type_name in ('callback', 'connection', 'inventory', 'lookup', 'shell'):
|
||||
dstring = get_docstring(path, fragment_loader, verbose=False, ignore_errors=True)[0]
|
||||
|
||||
if dstring.get('doc', False):
|
||||
if 'options' in dstring['doc'] and isinstance(dstring['doc']['options'], dict):
|
||||
C.config.initialize_plugin_configuration_definitions(type_name, name, dstring['doc']['options'])
|
||||
display.debug('Loaded config def from plugin (%s/%s)' % (type_name, name))
|
||||
if 'options' in dstring and isinstance(dstring['options'], dict):
|
||||
C.config.initialize_plugin_configuration_definitions(type_name, name, dstring['options'])
|
||||
display.debug('Loaded config def from plugin (%s/%s)' % (type_name, name))
|
||||
|
||||
def add_directory(self, directory, with_subdir=False):
|
||||
''' Adds an additional directory to the search path '''
|
||||
|
@ -462,6 +462,14 @@ class PluginLoader:
|
|||
self._update_object(obj, name, path)
|
||||
yield obj
|
||||
|
||||
# doc fragments first
|
||||
fragment_loader = PluginLoader(
|
||||
'ModuleDocFragment',
|
||||
'ansible.utils.module_docs_fragments',
|
||||
os.path.join(os.path.dirname(__file__), 'module_docs_fragments'),
|
||||
'',
|
||||
)
|
||||
|
||||
action_loader = PluginLoader(
|
||||
'ActionModule',
|
||||
'ansible.plugins.action',
|
||||
|
@ -545,13 +553,6 @@ test_loader = PluginLoader(
|
|||
'test_plugins'
|
||||
)
|
||||
|
||||
fragment_loader = PluginLoader(
|
||||
'ModuleDocFragment',
|
||||
'ansible.utils.module_docs_fragments',
|
||||
os.path.join(os.path.dirname(__file__), 'module_docs_fragments'),
|
||||
'',
|
||||
)
|
||||
|
||||
strategy_loader = PluginLoader(
|
||||
'StrategyModule',
|
||||
'ansible.plugins.strategy',
|
||||
|
|
|
@ -18,10 +18,10 @@ from __future__ import (absolute_import, division, print_function)
|
|||
__metaclass__ = type
|
||||
|
||||
import os
|
||||
import re
|
||||
import ansible.constants as C
|
||||
import time
|
||||
import os.path
|
||||
import random
|
||||
import re
|
||||
import time
|
||||
|
||||
from ansible.module_utils.six import text_type
|
||||
from ansible.module_utils.six.moves import shlex_quote
|
||||
|
@ -31,26 +31,32 @@ _USER_HOME_PATH_RE = re.compile(r'^~[_.A-Za-z0-9][-_.A-Za-z0-9]*$')
|
|||
|
||||
|
||||
class ShellBase(AnsiblePlugin):
|
||||
|
||||
def __init__(self):
|
||||
|
||||
super(ShellBase, self).__init__()
|
||||
|
||||
self.env = dict()
|
||||
if C.DEFAULT_MODULE_SET_LOCALE:
|
||||
module_locale = C.DEFAULT_MODULE_LANG or os.getenv('LANG', 'en_US.UTF-8')
|
||||
self.env = {}
|
||||
self.tempdir = None
|
||||
|
||||
def set_options(self, task_keys=None, var_options=None, direct=None):
|
||||
|
||||
super(ShellBase, self).set_options(task_keys=task_keys, var_options=var_options, direct=direct)
|
||||
|
||||
# not all shell modules have this option
|
||||
if self.get_option('set_module_language'):
|
||||
self.env.update(
|
||||
dict(
|
||||
LANG=module_locale,
|
||||
LC_ALL=module_locale,
|
||||
LC_MESSAGES=module_locale,
|
||||
LANG=self.get_option('module_language'),
|
||||
LC_ALL=self.get_option('module_language'),
|
||||
LC_MESSAGES=self.get_option('module_language'),
|
||||
)
|
||||
)
|
||||
|
||||
# set env
|
||||
self.env.update(self.get_option('environment'))
|
||||
|
||||
def env_prefix(self, **kwargs):
|
||||
env = self.env.copy()
|
||||
env.update(kwargs)
|
||||
return ' '.join(['%s=%s' % (k, shlex_quote(text_type(v))) for k, v in env.items()])
|
||||
return ' '.join(['%s=%s' % (k, shlex_quote(text_type(v))) for k, v in kwargs.items()])
|
||||
|
||||
def join_path(self, *args):
|
||||
return os.path.join(*args)
|
||||
|
@ -96,32 +102,27 @@ class ShellBase(AnsiblePlugin):
|
|||
cmd = ['test', '-e', shlex_quote(path)]
|
||||
return ' '.join(cmd)
|
||||
|
||||
def mkdtemp(self, basefile=None, system=False, mode=None, tmpdir=None):
|
||||
def mkdtemp(self, basefile=None, system=False, mode=0o700, tmpdir=None):
|
||||
if not basefile:
|
||||
basefile = 'ansible-tmp-%s-%s' % (time.time(), random.randint(0, 2**48))
|
||||
|
||||
# When system is specified we have to create this in a directory where
|
||||
# other users can read and access the temp directory. This is because
|
||||
# we use system to create tmp dirs for unprivileged users who are
|
||||
# sudo'ing to a second unprivileged user. The only dirctories where
|
||||
# that is standard are the tmp dirs, /tmp and /var/tmp. So we only
|
||||
# allow one of those two locations if system=True. However, users
|
||||
# might want to have some say over which of /tmp or /var/tmp is used
|
||||
# (because /tmp may be a tmpfs and want to conserve RAM or persist the
|
||||
# tmp files beyond a reboot. So we check if the user set REMOTE_TMP
|
||||
# to somewhere in or below /var/tmp and if so use /var/tmp. If
|
||||
# anything else we use /tmp (because /tmp is specified by POSIX nad
|
||||
# /var/tmp is not).
|
||||
# other users can read and access the temp directory.
|
||||
# This is because we use system to create tmp dirs for unprivileged users who are
|
||||
# sudo'ing to a second unprivileged user.
|
||||
# The 'system_temps' setting defines dirctories we can use for this purpose
|
||||
# the default are, /tmp and /var/tmp.
|
||||
# So we only allow one of those locations if system=True, using the
|
||||
# passed in tmpdir if it is valid or the first one from the setting if not.
|
||||
|
||||
if system:
|
||||
# FIXME: create 'system tmp dirs' config/var and check tmpdir is in those values to allow for /opt/tmp, etc
|
||||
if tmpdir.startswith('/var/tmp'):
|
||||
basetmpdir = '/var/tmp'
|
||||
if tmpdir.startswith(tuple(self.get_option('system_temps'))):
|
||||
basetmpdir = tmpdir
|
||||
else:
|
||||
basetmpdir = '/tmp'
|
||||
basetmpdir = self.get_option('system_temps')[0]
|
||||
else:
|
||||
if tmpdir is None:
|
||||
basetmpdir = C.DEFAULT_REMOTE_TMP
|
||||
basetmpdir = self.get_option('remote_temp')
|
||||
else:
|
||||
basetmpdir = tmpdir
|
||||
|
||||
|
@ -138,13 +139,15 @@ class ShellBase(AnsiblePlugin):
|
|||
|
||||
return cmd
|
||||
|
||||
def expand_user(self, user_home_path):
|
||||
def expand_user(self, user_home_path, username=''):
|
||||
''' Return a command to expand tildes in a path
|
||||
|
||||
It can be either "~" or "~username". We use the POSIX definition of
|
||||
a username:
|
||||
It can be either "~" or "~username". We just ignore $HOME
|
||||
We use the POSIX definition of a username:
|
||||
http://pubs.opengroup.org/onlinepubs/000095399/basedefs/xbd_chap03.html#tag_03_426
|
||||
http://pubs.opengroup.org/onlinepubs/000095399/basedefs/xbd_chap03.html#tag_03_276
|
||||
|
||||
Falls back to 'current workind directory' as we assume 'home is where the remote user ends up'
|
||||
'''
|
||||
|
||||
# Check that the user_path to expand is safe
|
||||
|
@ -152,9 +155,17 @@ class ShellBase(AnsiblePlugin):
|
|||
if not _USER_HOME_PATH_RE.match(user_home_path):
|
||||
# shlex_quote will make the shell return the string verbatim
|
||||
user_home_path = shlex_quote(user_home_path)
|
||||
elif username:
|
||||
# if present the user name is appended to resolve "that user's home"
|
||||
user_home_path += username
|
||||
|
||||
return 'echo %s' % user_home_path
|
||||
|
||||
def build_module_command(self, env_string, shebang, cmd, arg_path=None, rm_tmp=None):
|
||||
def pwd(self):
|
||||
"""Return the working directory after connecting"""
|
||||
return 'echo %spwd%s' % (self._SHELL_SUB_LEFT, self._SHELL_SUB_RIGHT)
|
||||
|
||||
def build_module_command(self, env_string, shebang, cmd, arg_path=None):
|
||||
# don't quote the cmd if it's an empty string, because this will break pipelining mode
|
||||
if cmd.strip() != '':
|
||||
cmd = shlex_quote(cmd)
|
||||
|
@ -168,8 +179,6 @@ class ShellBase(AnsiblePlugin):
|
|||
if arg_path is not None:
|
||||
cmd_parts.append(arg_path)
|
||||
new_cmd = " ".join(cmd_parts)
|
||||
if rm_tmp:
|
||||
new_cmd = '%s; rm -rf "%s" %s' % (new_cmd, rm_tmp, self._SHELL_REDIRECT_ALLNULL)
|
||||
return new_cmd
|
||||
|
||||
def append_command(self, cmd, cmd_to_append):
|
||||
|
|
|
@ -1,24 +1,22 @@
|
|||
# (c) 2014, Chris Church <chris@ninemoreminutes.com>
|
||||
#
|
||||
# This file is part of Ansible.
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
# Copyright (c) 2014, Chris Church <chris@ninemoreminutes.com>
|
||||
# Copyright (c) 2017 Ansible Project
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
from ansible.plugins.shell import ShellBase
|
||||
|
||||
DOCUMENTATION = '''
|
||||
name: csh
|
||||
plugin_type: shell
|
||||
version_added: ""
|
||||
short_description: C shell (/bin/csh)
|
||||
description:
|
||||
- When you have no other option than to use csh
|
||||
extends_documentation_fragment:
|
||||
- shell_common
|
||||
'''
|
||||
|
||||
|
||||
class ShellModule(ShellBase):
|
||||
|
||||
|
|
|
@ -1,19 +1,6 @@
|
|||
# (c) 2014, Chris Church <chris@ninemoreminutes.com>
|
||||
#
|
||||
# This file is part of Ansible.
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
# Copyright (c) 2014, Chris Church <chris@ninemoreminutes.com>
|
||||
# Copyright (c) 2017 Ansible Project
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
|
@ -21,6 +8,17 @@ from ansible.module_utils.six import text_type
|
|||
from ansible.module_utils.six.moves import shlex_quote
|
||||
from ansible.plugins.shell.sh import ShellModule as ShModule
|
||||
|
||||
DOCUMENTATION = '''
|
||||
name: fish
|
||||
plugin_type: shell
|
||||
version_added: ""
|
||||
short_description: fish shell (/bin/fish)
|
||||
description:
|
||||
- This is here because some people are restricted to fish.
|
||||
extends_documentation_fragment:
|
||||
- shell_common
|
||||
'''
|
||||
|
||||
|
||||
class ShellModule(ShModule):
|
||||
|
||||
|
@ -43,7 +41,7 @@ class ShellModule(ShModule):
|
|||
env.update(kwargs)
|
||||
return ' '.join(['set -lx %s %s;' % (k, shlex_quote(text_type(v))) for k, v in env.items()])
|
||||
|
||||
def build_module_command(self, env_string, shebang, cmd, arg_path=None, rm_tmp=None):
|
||||
def build_module_command(self, env_string, shebang, cmd, arg_path=None):
|
||||
# don't quote the cmd if it's an empty string, because this will break pipelining mode
|
||||
if cmd.strip() != '':
|
||||
cmd = shlex_quote(cmd)
|
||||
|
@ -51,8 +49,6 @@ class ShellModule(ShModule):
|
|||
if arg_path is not None:
|
||||
cmd_parts.append(arg_path)
|
||||
new_cmd = " ".join(cmd_parts)
|
||||
if rm_tmp:
|
||||
new_cmd = 'begin ; %s; rm -rf "%s" %s ; end' % (new_cmd, rm_tmp, self._SHELL_REDIRECT_ALLNULL)
|
||||
return new_cmd
|
||||
|
||||
def checksum(self, path, python_interp):
|
||||
|
|
|
@ -1,22 +1,18 @@
|
|||
# (c) 2014, Chris Church <chris@ninemoreminutes.com>
|
||||
#
|
||||
# This file is part of Ansible.
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
# Copyright (c) 2014, Chris Church <chris@ninemoreminutes.com>
|
||||
# Copyright (c) 2017 Ansible Project
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
DOCUMENTATION = '''
|
||||
name: powershell
|
||||
plugin_type: shell
|
||||
version_added: ""
|
||||
short_description: Windows Powershell
|
||||
description:
|
||||
- The only option whne using 'winrm' as a connection plugin
|
||||
'''
|
||||
|
||||
import base64
|
||||
import os
|
||||
import re
|
||||
|
@ -1693,8 +1689,10 @@ Function Run($payload) {
|
|||
|
||||
''' # end async_watchdog
|
||||
|
||||
from ansible.plugins import AnsiblePlugin
|
||||
|
||||
class ShellModule(object):
|
||||
|
||||
class ShellModule(AnsiblePlugin):
|
||||
|
||||
# Common shell filenames that this plugin handles
|
||||
# Powershell is handled differently. It's selected when winrm is the
|
||||
|
@ -1773,7 +1771,7 @@ class ShellModule(object):
|
|||
# FIXME: Support system temp path and passed in tmpdir!
|
||||
return self._encode_script('''(New-Item -Type Directory -Path $env:temp -Name "%s").FullName | Write-Host -Separator '';''' % basefile)
|
||||
|
||||
def expand_user(self, user_home_path):
|
||||
def expand_user(self, user_home_path, username=''):
|
||||
# PowerShell only supports "~" (not "~username"). Resolve-Path ~ does
|
||||
# not seem to work remotely, though by default we are always starting
|
||||
# in the user's home directory.
|
||||
|
@ -1823,7 +1821,7 @@ class ShellModule(object):
|
|||
''' % dict(path=path)
|
||||
return self._encode_script(script)
|
||||
|
||||
def build_module_command(self, env_string, shebang, cmd, arg_path=None, rm_tmp=None):
|
||||
def build_module_command(self, env_string, shebang, cmd, arg_path=None):
|
||||
# pipelining bypass
|
||||
if cmd == '':
|
||||
return '-'
|
||||
|
@ -1878,10 +1876,6 @@ class ShellModule(object):
|
|||
Exit 1
|
||||
}
|
||||
''' % (env_string, ' '.join(cmd_parts))
|
||||
if rm_tmp:
|
||||
rm_tmp = self._escape(self._unquote(rm_tmp))
|
||||
rm_cmd = 'Remove-Item "%s" -Force -Recurse -ErrorAction SilentlyContinue' % rm_tmp
|
||||
script = '%s\nFinally { %s }' % (script, rm_cmd)
|
||||
return self._encode_script(script, preserve_rc=False)
|
||||
|
||||
def wrap_for_exec(self, cmd):
|
||||
|
|
|
@ -1,22 +1,19 @@
|
|||
# (c) 2014, Chris Church <chris@ninemoreminutes.com>
|
||||
#
|
||||
# This file is part of Ansible.
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
# Copyright (c) 2014, Chris Church <chris@ninemoreminutes.com>
|
||||
# Copyright (c) 2017 Ansible Project
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
DOCUMENTATION = '''
|
||||
name: sh
|
||||
plugin_type: shell
|
||||
short_description: "POSIX shell (/bin/sh)"
|
||||
version_added: historical
|
||||
description:
|
||||
- This shell plugin is the one you want to use on most Unix systems, it is the most compatible and widely installed shell.
|
||||
extends_documentation_fragment:
|
||||
- shell_common
|
||||
'''
|
||||
|
||||
from ansible.module_utils.six.moves import shlex_quote
|
||||
from ansible.plugins.shell import ShellBase
|
||||
|
@ -26,6 +23,8 @@ class ShellModule(ShellBase):
|
|||
|
||||
# Common shell filenames that this plugin handles.
|
||||
# Note: sh is the default shell plugin so this plugin may also be selected
|
||||
# This code needs to be SH-compliant. BASH-isms will not work if /bin/sh points to a non-BASH shell.
|
||||
|
||||
# if the filename is not listed in any Shell plugin.
|
||||
COMPATIBLE_SHELLS = frozenset(('sh', 'zsh', 'bash', 'dash', 'ksh'))
|
||||
# Family of shells this has. Must match the filename without extension
|
||||
|
@ -42,22 +41,16 @@ class ShellModule(ShellBase):
|
|||
_SHELL_GROUP_RIGHT = ')'
|
||||
|
||||
def checksum(self, path, python_interp):
|
||||
# The following test needs to be SH-compliant. BASH-isms will
|
||||
# not work if /bin/sh points to a non-BASH shell.
|
||||
#
|
||||
# In the following test, each condition is a check and logical
|
||||
# comparison (|| or &&) that sets the rc value. Every check is run so
|
||||
# the last check in the series to fail will be the rc that is
|
||||
# returned.
|
||||
# the last check in the series to fail will be the rc that is returned.
|
||||
#
|
||||
# If a check fails we error before invoking the hash functions because
|
||||
# hash functions may successfully take the hash of a directory on BSDs
|
||||
# (UFS filesystem?) which is not what the rest of the ansible code
|
||||
# expects
|
||||
# (UFS filesystem?) which is not what the rest of the ansible code expects
|
||||
#
|
||||
# If all of the available hashing methods fail we fail with an rc of
|
||||
# 0. This logic is added to the end of the cmd at the bottom of this
|
||||
# function.
|
||||
# If all of the available hashing methods fail we fail with an rc of 0.
|
||||
# This logic is added to the end of the cmd at the bottom of this function.
|
||||
|
||||
# Return codes:
|
||||
# checksum: success!
|
||||
|
|
|
@ -1,20 +1,5 @@
|
|||
#
|
||||
# (c) 2016, Sumit Kumar <sumit4@netapp.com>
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
# Copyright (c) 2017 Ansible Project
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
|
||||
class ModuleDocFragment(object):
|
||||
|
|
92
lib/ansible/utils/module_docs_fragments/shell_common.py
Normal file
92
lib/ansible/utils/module_docs_fragments/shell_common.py
Normal file
|
@ -0,0 +1,92 @@
|
|||
# Copyright (c) 2017 Ansible Project
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
|
||||
class ModuleDocFragment(object):
|
||||
|
||||
# common shelldocumentation fragment
|
||||
DOCUMENTATION = """
|
||||
options:
|
||||
remote_temp:
|
||||
description:
|
||||
- Temporary directory to use on targets when executing tasks.
|
||||
default: '~/.ansible/tmp'
|
||||
env: [{name: ANSIBLE_REMOTE_TEMP}]
|
||||
ini:
|
||||
- section: defaults
|
||||
key: remote_tmp
|
||||
vars:
|
||||
- name: ansible_remote_tmp
|
||||
system_temps:
|
||||
description:
|
||||
- List of valid system temporary directories for Ansible to choose when it cannot use ``remote_temp``, normally due to permission issues.
|
||||
default: [ /var/tmp, /tmp ]
|
||||
type: list
|
||||
env: [{name: ANSIBLE_SYSTEM_TMPS}]
|
||||
ini:
|
||||
- section: defaults
|
||||
key: system_tmps
|
||||
vars:
|
||||
- name: ansible_system_tmps
|
||||
async_dir:
|
||||
description:
|
||||
- Directory in which ansible will keep async job inforamtion
|
||||
default: '~/.ansible_async'
|
||||
env: [{name: ANSIBLE_ASYNC_DIR}]
|
||||
ini:
|
||||
- section: defaults
|
||||
key: async_dir
|
||||
vars:
|
||||
- name: ansible_async_dir
|
||||
set_module_language:
|
||||
default: False
|
||||
description: Controls if we set locale for modules when executing on the target.
|
||||
env:
|
||||
- name: ANSIBLE_MODULE_SET_LOCALE
|
||||
ini:
|
||||
- section: defaults
|
||||
key: module_set_locale
|
||||
type: boolean
|
||||
vars:
|
||||
- name: ansible_module_set_locale
|
||||
module_language:
|
||||
description:
|
||||
- "If 'set_module_language' is true, this is the language language/locale setting to use for modules when they execute on the target."
|
||||
- "Defaults to match the controller's settings."
|
||||
default: "{{CONTROLLER_LANG}}"
|
||||
env:
|
||||
- name: ANSIBLE_MODULE_LANG
|
||||
ini:
|
||||
- section: defaults
|
||||
key: module_lang
|
||||
vars:
|
||||
- name: ansible_module_lang
|
||||
environment:
|
||||
type: dict
|
||||
default: {}
|
||||
description:
|
||||
- dictionary of environment variables and their values to use when executing commands.
|
||||
admin_users:
|
||||
type: list
|
||||
default: ['root', 'toor', 'admin']
|
||||
description:
|
||||
- list of users to be expected to have admin privileges, for BSD you might want to add 'toor' for windows 'Administrator'.
|
||||
env:
|
||||
- name: ANSIBLE_ADMIN_USERS
|
||||
ini:
|
||||
- section: defaults
|
||||
key: admin_users
|
||||
vars:
|
||||
- name: ansible_admin_users
|
||||
allow_world_readable_temp:
|
||||
type: boolean
|
||||
description:
|
||||
- This makes the temporary files created on the machine to be world readable and will issue a warning instead of failing the task.
|
||||
- It is useful when becoming an unprivileged user.
|
||||
ini:
|
||||
- section: defaults
|
||||
key: allow_world_readable_tmpfiles
|
||||
vars:
|
||||
- name: ansible_world_readable_tmpfiles
|
||||
version_added: "2.1"
|
||||
"""
|
|
@ -27,7 +27,6 @@ from ansible.module_utils.six import string_types
|
|||
from ansible.module_utils._text import to_native
|
||||
from ansible.parsing.plugin_docs import read_docstring
|
||||
from ansible.parsing.yaml.loader import AnsibleLoader
|
||||
from ansible.plugins.loader import fragment_loader
|
||||
|
||||
try:
|
||||
from __main__ import display
|
||||
|
@ -59,7 +58,7 @@ def merge_fragment(target, source):
|
|||
target[key] = value
|
||||
|
||||
|
||||
def add_fragments(doc, filename):
|
||||
def add_fragments(doc, filename, fragment_loader):
|
||||
|
||||
fragments = doc.pop('extends_documentation_fragment', [])
|
||||
|
||||
|
@ -99,6 +98,8 @@ def add_fragments(doc, filename):
|
|||
merge_fragment(doc['options'], fragment.pop('options'))
|
||||
except Exception as e:
|
||||
raise AnsibleError("%s options (%s) of unknown type: %s" % (to_native(e), fragment_name, filename))
|
||||
else:
|
||||
doc['options'] = fragment.pop('options')
|
||||
|
||||
# merge rest of the sections
|
||||
try:
|
||||
|
@ -107,15 +108,15 @@ def add_fragments(doc, filename):
|
|||
raise AnsibleError("%s (%s) of unknown type: %s" % (to_native(e), fragment_name, filename))
|
||||
|
||||
|
||||
def get_docstring(filename, verbose=False):
|
||||
def get_docstring(filename, fragment_loader, verbose=False, ignore_errors=False):
|
||||
"""
|
||||
DOCUMENTATION can be extended using documentation fragments loaded by the PluginLoader from the module_docs_fragments directory.
|
||||
"""
|
||||
|
||||
data = read_docstring(filename, verbose=verbose)
|
||||
data = read_docstring(filename, verbose=verbose, ignore_errors=ignore_errors)
|
||||
|
||||
# add fragments to documentation
|
||||
if data.get('doc', False):
|
||||
add_fragments(data['doc'], filename)
|
||||
add_fragments(data['doc'], filename, fragment_loader=fragment_loader)
|
||||
|
||||
return data['doc'], data['plainexamples'], data['returndocs'], data['metadata']
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
- block:
|
||||
|
||||
- name: Create a local temporary directory
|
||||
shell: mktemp -d "${TMPDIR:-/tmp}/ansible_test.XXXXXXXXX"
|
||||
shell: mktemp -d /tmp/ansible_test.XXXXXXXXX
|
||||
register: tempfile_result
|
||||
connection: local
|
||||
|
||||
|
@ -10,6 +10,9 @@
|
|||
# output_dir is hardcoded in test/runner/lib/executor.py and created there
|
||||
remote_dir: '{{ output_dir }}'
|
||||
|
||||
- file: path={{local_temp_dir}} state=directory
|
||||
name: ensure temp dir exists
|
||||
|
||||
- name: Create remote unprivileged remote user
|
||||
user:
|
||||
name: '{{ remote_unprivileged_user }}'
|
||||
|
|
|
@ -198,7 +198,7 @@
|
|||
assert:
|
||||
that:
|
||||
- _check_mode_test2 is skipped
|
||||
- '_check_mode_test2.msg == "skipped, since {{ output_dir_test | expanduser }}/afile2.txt exists"'
|
||||
- '_check_mode_test2.msg == "{{ output_dir_test | expanduser }}/afile2.txt exists, matching creates option"'
|
||||
|
||||
- name: Remove afile2.txt
|
||||
file:
|
||||
|
@ -220,4 +220,4 @@
|
|||
assert:
|
||||
that:
|
||||
- _check_mode_test3 is skipped
|
||||
- '_check_mode_test3.msg == "skipped, since {{ output_dir_test | expanduser }}/afile2.txt does not exist"'
|
||||
- '_check_mode_test3.msg == "{{ output_dir_test | expanduser }}/afile2.txt does not exist, matching removes option"'
|
||||
|
|
|
@ -38,6 +38,7 @@ from fnmatch import fnmatch
|
|||
|
||||
from ansible import __version__ as ansible_version
|
||||
from ansible.executor.module_common import REPLACER_WINDOWS
|
||||
from ansible.plugins.loader import fragment_loader
|
||||
from ansible.utils.plugin_docs import BLACKLIST, get_docstring
|
||||
|
||||
from module_args import AnsibleModuleImportError, get_argument_spec
|
||||
|
@ -829,7 +830,7 @@ class ModuleValidator(Validator):
|
|||
if not errors and not traces:
|
||||
with CaptureStd():
|
||||
try:
|
||||
get_docstring(self.path, verbose=True)
|
||||
get_docstring(self.path, fragment_loader, verbose=True)
|
||||
except AssertionError:
|
||||
fragment = doc['extends_documentation_fragment']
|
||||
self.reporter.error(
|
||||
|
@ -1026,7 +1027,7 @@ class ModuleValidator(Validator):
|
|||
|
||||
with CaptureStd():
|
||||
try:
|
||||
existing_doc, _, _, _ = get_docstring(self.base_module, verbose=True)
|
||||
existing_doc = get_docstring(self.base_module, fragment_loader, verbose=True)[0]
|
||||
existing_options = existing_doc.get('options', {}) or {}
|
||||
except AssertionError:
|
||||
fragment = doc['extends_documentation_fragment']
|
||||
|
|
|
@ -21,6 +21,7 @@ from __future__ import (absolute_import, division, print_function)
|
|||
__metaclass__ = type
|
||||
|
||||
import os
|
||||
import re
|
||||
|
||||
from ansible import constants as C
|
||||
from ansible.compat.tests import unittest
|
||||
|
@ -229,11 +230,23 @@ class TestActionBase(unittest.TestCase):
|
|||
# create our fake task
|
||||
mock_task = MagicMock()
|
||||
|
||||
def get_shell_opt(opt):
|
||||
|
||||
ret = None
|
||||
if opt == 'admin_users':
|
||||
ret = ['root', 'toor', 'Administrator']
|
||||
elif opt == 'remote_temp':
|
||||
ret = '~/.ansible/tmp'
|
||||
|
||||
return ret
|
||||
|
||||
# create a mock connection, so we don't actually try and connect to things
|
||||
mock_connection = MagicMock()
|
||||
mock_connection.transport = 'ssh'
|
||||
mock_connection._shell.mkdtemp.return_value = 'mkdir command'
|
||||
mock_connection._shell.join_path.side_effect = os.path.join
|
||||
mock_connection._shell.get_option = get_shell_opt
|
||||
mock_connection._shell.HOMES_RE = re.compile(r'(\'|\")?(~|\$HOME)(.*)')
|
||||
|
||||
# we're using a real play context here
|
||||
play_context = PlayContext()
|
||||
|
@ -395,12 +408,10 @@ class TestActionBase(unittest.TestCase):
|
|||
mock_task.args = dict(a=1, b=2, c=3)
|
||||
|
||||
# create a mock connection, so we don't actually try and connect to things
|
||||
def build_module_command(env_string, shebang, cmd, arg_path=None, rm_tmp=None):
|
||||
def build_module_command(env_string, shebang, cmd, arg_path=None):
|
||||
to_run = [env_string, cmd]
|
||||
if arg_path:
|
||||
to_run.append(arg_path)
|
||||
if rm_tmp:
|
||||
to_run.append(rm_tmp)
|
||||
return " ".join(to_run)
|
||||
|
||||
mock_connection = MagicMock()
|
||||
|
|
|
@ -63,6 +63,11 @@ class ConnectionMock(object):
|
|||
transport = None
|
||||
_new_stdin = StdinMock()
|
||||
|
||||
# my shell
|
||||
_shell = MagicMock()
|
||||
_shell.mkdtemp.return_value = 'mkdir command'
|
||||
_shell.join_path.side_effect = os.path.join
|
||||
|
||||
|
||||
class PlayContextMock(object):
|
||||
shell = None
|
||||
|
|
Loading…
Reference in a new issue