mirror of
https://github.com/ansible-collections/community.general.git
synced 2024-09-14 20:13:21 +02:00
Revert templating enhancements from 73dbab70
e6c28658
d409352c
9858b1f2
4587528b
9b1fe455
214b0b05
8d3db803
7f9504d1
5031104c
35cb9dc2
2bd8cb57
1e85c754
This commit is contained in:
parent
e0fbaea566
commit
d154bf8781
11 changed files with 1244 additions and 1397 deletions
|
@ -77,32 +77,39 @@ def write_argsfile(argstring, json=False):
|
||||||
def boilerplate_module(modfile, args):
|
def boilerplate_module(modfile, args):
|
||||||
""" simulate what ansible does with new style modules """
|
""" simulate what ansible does with new style modules """
|
||||||
|
|
||||||
#module_fh = open(modfile)
|
module_fh = open(modfile)
|
||||||
#module_data = module_fh.read()
|
module_data = module_fh.read()
|
||||||
#module_fh.close()
|
included_boilerplate = module_data.find(module_common.REPLACER) != -1
|
||||||
|
module_fh.close()
|
||||||
|
|
||||||
replacer = module_common.ModuleReplacer()
|
if included_boilerplate:
|
||||||
|
|
||||||
#included_boilerplate = module_data.find(module_common.REPLACER) != -1 or module_data.find("import ansible.module_utils") != -1
|
module_data = module_data.replace(module_common.REPLACER, module_common.MODULE_COMMON)
|
||||||
|
encoded_args = repr(str(args))
|
||||||
|
module_data = module_data.replace(module_common.REPLACER_ARGS, encoded_args)
|
||||||
|
encoded_lang = repr(C.DEFAULT_MODULE_LANG)
|
||||||
|
empty_complex = repr("{}")
|
||||||
|
module_data = module_data.replace(module_common.REPLACER_LANG, encoded_lang)
|
||||||
|
module_data = module_data.replace('syslog.LOG_USER', "syslog.%s" % C.DEFAULT_SYSLOG_FACILITY)
|
||||||
|
module_data = module_data.replace(module_common.REPLACER_COMPLEX, empty_complex)
|
||||||
|
|
||||||
complex_args = {}
|
modfile2_path = os.path.expanduser("~/.ansible_module_generated")
|
||||||
inject = {}
|
print "* including generated source, if any, saving to: %s" % modfile2_path
|
||||||
(module_data, module_style, shebang) = replacer.modify_module(
|
print "* this will offset any line numbers in tracebacks/debuggers!"
|
||||||
modfile,
|
modfile2 = open(modfile2_path, 'w')
|
||||||
complex_args,
|
modfile2.write(module_data)
|
||||||
args,
|
modfile2.close()
|
||||||
inject
|
modfile = modfile2_path
|
||||||
)
|
|
||||||
|
|
||||||
modfile2_path = os.path.expanduser("~/.ansible_module_generated")
|
return (modfile2_path, included_boilerplate, False)
|
||||||
print "* including generated source, if any, saving to: %s" % modfile2_path
|
else:
|
||||||
print "* this may offset any line numbers in tracebacks/debuggers!"
|
|
||||||
modfile2 = open(modfile2_path, 'w')
|
|
||||||
modfile2.write(module_data)
|
|
||||||
modfile2.close()
|
|
||||||
modfile = modfile2_path
|
|
||||||
|
|
||||||
return (modfile2_path, module_style)
|
old_style_but_json = False
|
||||||
|
if 'WANT_JSON' in module_data:
|
||||||
|
old_style_but_json = True
|
||||||
|
|
||||||
|
print "* module boilerplate substitution not requested in module, line numbers will be unaltered"
|
||||||
|
return (modfile, included_boilerplate, old_style_but_json)
|
||||||
|
|
||||||
def runtest( modfile, argspath):
|
def runtest( modfile, argspath):
|
||||||
"""Test run a module, piping it's output for reporting."""
|
"""Test run a module, piping it's output for reporting."""
|
||||||
|
@ -144,16 +151,14 @@ def rundebug(debugger, modfile, argspath):
|
||||||
def main():
|
def main():
|
||||||
|
|
||||||
options, args = parse()
|
options, args = parse()
|
||||||
(modfile, module_style) = boilerplate_module(options.module_path, options.module_args)
|
(modfile, is_new_style, old_style_but_json) = boilerplate_module(options.module_path, options.module_args)
|
||||||
|
|
||||||
argspath=None
|
argspath=None
|
||||||
if module_style != 'new':
|
if not is_new_style:
|
||||||
if module_style == 'non_native_want_json':
|
if old_style_but_json:
|
||||||
argspath = write_argsfile(options.module_args, json=True)
|
argspath = write_argsfile(options.module_args, json=True)
|
||||||
elif module_style == 'old':
|
|
||||||
argspath = write_argsfile(options.module_args, json=False)
|
|
||||||
else:
|
else:
|
||||||
raise Exception("internal error, unexpected module style: %s" % module_style)
|
argspath = write_argsfile(options.module_args, json=False)
|
||||||
if options.debugger:
|
if options.debugger:
|
||||||
rundebug(options.debugger, modfile, argspath)
|
rundebug(options.debugger, modfile, argspath)
|
||||||
else:
|
else:
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,958 +0,0 @@
|
||||||
# This code is part of Ansible, but is an independent component.
|
|
||||||
# This particular file snippet, and this file snippet only, is BSD licensed.
|
|
||||||
# Modules you write using this snippet, which is embedded dynamically by Ansible
|
|
||||||
# still belong to the author of the module, and may assign their own license
|
|
||||||
# to the complete work.
|
|
||||||
#
|
|
||||||
# Copyright (c), Michael DeHaan <michael.dehaan@gmail.com>, 2012-2013
|
|
||||||
# All rights reserved.
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without modification,
|
|
||||||
# are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# * Redistributions of source code must retain the above copyright
|
|
||||||
# notice, this list of conditions and the following disclaimer.
|
|
||||||
# * Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
|
||||||
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
|
||||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
|
||||||
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
|
||||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
|
||||||
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
|
||||||
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
|
||||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
#
|
|
||||||
|
|
||||||
# == BEGIN DYNAMICALLY INSERTED CODE ==
|
|
||||||
|
|
||||||
MODULE_ARGS = "<<INCLUDE_ANSIBLE_MODULE_ARGS>>"
|
|
||||||
MODULE_LANG = "<<INCLUDE_ANSIBLE_MODULE_LANG>>"
|
|
||||||
MODULE_COMPLEX_ARGS = "<<INCLUDE_ANSIBLE_MODULE_COMPLEX_ARGS>>"
|
|
||||||
|
|
||||||
BOOLEANS_TRUE = ['yes', 'on', '1', 'true', 1]
|
|
||||||
BOOLEANS_FALSE = ['no', 'off', '0', 'false', 0]
|
|
||||||
BOOLEANS = BOOLEANS_TRUE + BOOLEANS_FALSE
|
|
||||||
|
|
||||||
# ansible modules can be written in any language. To simplify
|
|
||||||
# development of Python modules, the functions available here
|
|
||||||
# can be inserted in any module source automatically by including
|
|
||||||
# #<<INCLUDE_ANSIBLE_MODULE_COMMON>> on a blank line by itself inside
|
|
||||||
# of an ansible module. The source of this common code lives
|
|
||||||
# in lib/ansible/module_common.py
|
|
||||||
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import shlex
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
import syslog
|
|
||||||
import types
|
|
||||||
import time
|
|
||||||
import shutil
|
|
||||||
import stat
|
|
||||||
import traceback
|
|
||||||
import grp
|
|
||||||
import pwd
|
|
||||||
import platform
|
|
||||||
import errno
|
|
||||||
|
|
||||||
try:
|
|
||||||
import json
|
|
||||||
except ImportError:
|
|
||||||
try:
|
|
||||||
import simplejson as json
|
|
||||||
except ImportError:
|
|
||||||
sys.stderr.write('Error: ansible requires a json module, none found!')
|
|
||||||
sys.exit(1)
|
|
||||||
except SyntaxError:
|
|
||||||
sys.stderr.write('SyntaxError: probably due to json and python being for different versions')
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
HAVE_SELINUX=False
|
|
||||||
try:
|
|
||||||
import selinux
|
|
||||||
HAVE_SELINUX=True
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
HAVE_HASHLIB=False
|
|
||||||
try:
|
|
||||||
from hashlib import md5 as _md5
|
|
||||||
HAVE_HASHLIB=True
|
|
||||||
except ImportError:
|
|
||||||
from md5 import md5 as _md5
|
|
||||||
|
|
||||||
try:
|
|
||||||
from hashlib import sha256 as _sha256
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
try:
|
|
||||||
from systemd import journal
|
|
||||||
has_journal = True
|
|
||||||
except ImportError:
|
|
||||||
import syslog
|
|
||||||
has_journal = False
|
|
||||||
|
|
||||||
FILE_COMMON_ARGUMENTS=dict(
|
|
||||||
src = dict(),
|
|
||||||
mode = dict(),
|
|
||||||
owner = dict(),
|
|
||||||
group = dict(),
|
|
||||||
seuser = dict(),
|
|
||||||
serole = dict(),
|
|
||||||
selevel = dict(),
|
|
||||||
setype = dict(),
|
|
||||||
# not taken by the file module, but other modules call file so it must ignore them.
|
|
||||||
content = dict(),
|
|
||||||
backup = dict(),
|
|
||||||
force = dict(),
|
|
||||||
)
|
|
||||||
|
|
||||||
def get_platform():
|
|
||||||
''' what's the platform? example: Linux is a platform. '''
|
|
||||||
return platform.system()
|
|
||||||
|
|
||||||
def get_distribution():
|
|
||||||
''' return the distribution name '''
|
|
||||||
if platform.system() == 'Linux':
|
|
||||||
try:
|
|
||||||
distribution = platform.linux_distribution()[0].capitalize()
|
|
||||||
if distribution == 'NA':
|
|
||||||
if os.path.is_file('/etc/system-release'):
|
|
||||||
distribution = 'OtherLinux'
|
|
||||||
except:
|
|
||||||
# FIXME: MethodMissing, I assume?
|
|
||||||
distribution = platform.dist()[0].capitalize()
|
|
||||||
else:
|
|
||||||
distribution = None
|
|
||||||
return distribution
|
|
||||||
|
|
||||||
def load_platform_subclass(cls, *args, **kwargs):
|
|
||||||
'''
|
|
||||||
used by modules like User to have different implementations based on detected platform. See User
|
|
||||||
module for an example.
|
|
||||||
'''
|
|
||||||
|
|
||||||
this_platform = get_platform()
|
|
||||||
distribution = get_distribution()
|
|
||||||
subclass = None
|
|
||||||
|
|
||||||
# get the most specific superclass for this platform
|
|
||||||
if distribution is not None:
|
|
||||||
for sc in cls.__subclasses__():
|
|
||||||
if sc.distribution is not None and sc.distribution == distribution and sc.platform == this_platform:
|
|
||||||
subclass = sc
|
|
||||||
if subclass is None:
|
|
||||||
for sc in cls.__subclasses__():
|
|
||||||
if sc.platform == this_platform and sc.distribution is None:
|
|
||||||
subclass = sc
|
|
||||||
if subclass is None:
|
|
||||||
subclass = cls
|
|
||||||
|
|
||||||
return super(cls, subclass).__new__(subclass)
|
|
||||||
|
|
||||||
|
|
||||||
class AnsibleModule(object):
|
|
||||||
|
|
||||||
def __init__(self, argument_spec, bypass_checks=False, no_log=False,
|
|
||||||
check_invalid_arguments=True, mutually_exclusive=None, required_together=None,
|
|
||||||
required_one_of=None, add_file_common_args=False, supports_check_mode=False):
|
|
||||||
|
|
||||||
'''
|
|
||||||
common code for quickly building an ansible module in Python
|
|
||||||
(although you can write modules in anything that can return JSON)
|
|
||||||
see library/* for examples
|
|
||||||
'''
|
|
||||||
|
|
||||||
self.argument_spec = argument_spec
|
|
||||||
self.supports_check_mode = supports_check_mode
|
|
||||||
self.check_mode = False
|
|
||||||
|
|
||||||
self.aliases = {}
|
|
||||||
|
|
||||||
if add_file_common_args:
|
|
||||||
for k, v in FILE_COMMON_ARGUMENTS.iteritems():
|
|
||||||
if k not in self.argument_spec:
|
|
||||||
self.argument_spec[k] = v
|
|
||||||
|
|
||||||
os.environ['LANG'] = MODULE_LANG
|
|
||||||
(self.params, self.args) = self._load_params()
|
|
||||||
|
|
||||||
self._legal_inputs = [ 'CHECKMODE' ]
|
|
||||||
|
|
||||||
self.aliases = self._handle_aliases()
|
|
||||||
|
|
||||||
if check_invalid_arguments:
|
|
||||||
self._check_invalid_arguments()
|
|
||||||
self._check_for_check_mode()
|
|
||||||
|
|
||||||
self._set_defaults(pre=True)
|
|
||||||
|
|
||||||
if not bypass_checks:
|
|
||||||
self._check_required_arguments()
|
|
||||||
self._check_argument_values()
|
|
||||||
self._check_argument_types()
|
|
||||||
self._check_mutually_exclusive(mutually_exclusive)
|
|
||||||
self._check_required_together(required_together)
|
|
||||||
self._check_required_one_of(required_one_of)
|
|
||||||
|
|
||||||
self._set_defaults(pre=False)
|
|
||||||
if not no_log:
|
|
||||||
self._log_invocation()
|
|
||||||
|
|
||||||
def load_file_common_arguments(self, params):
|
|
||||||
'''
|
|
||||||
many modules deal with files, this encapsulates common
|
|
||||||
options that the file module accepts such that it is directly
|
|
||||||
available to all modules and they can share code.
|
|
||||||
'''
|
|
||||||
|
|
||||||
path = params.get('path', params.get('dest', None))
|
|
||||||
if path is None:
|
|
||||||
return {}
|
|
||||||
else:
|
|
||||||
path = os.path.expanduser(path)
|
|
||||||
|
|
||||||
mode = params.get('mode', None)
|
|
||||||
owner = params.get('owner', None)
|
|
||||||
group = params.get('group', None)
|
|
||||||
|
|
||||||
# selinux related options
|
|
||||||
seuser = params.get('seuser', None)
|
|
||||||
serole = params.get('serole', None)
|
|
||||||
setype = params.get('setype', None)
|
|
||||||
selevel = params.get('selevel', None)
|
|
||||||
secontext = [seuser, serole, setype]
|
|
||||||
|
|
||||||
if self.selinux_mls_enabled():
|
|
||||||
secontext.append(selevel)
|
|
||||||
|
|
||||||
default_secontext = self.selinux_default_context(path)
|
|
||||||
for i in range(len(default_secontext)):
|
|
||||||
if i is not None and secontext[i] == '_default':
|
|
||||||
secontext[i] = default_secontext[i]
|
|
||||||
|
|
||||||
return dict(
|
|
||||||
path=path, mode=mode, owner=owner, group=group,
|
|
||||||
seuser=seuser, serole=serole, setype=setype,
|
|
||||||
selevel=selevel, secontext=secontext,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# Detect whether using selinux that is MLS-aware.
|
|
||||||
# While this means you can set the level/range with
|
|
||||||
# selinux.lsetfilecon(), it may or may not mean that you
|
|
||||||
# will get the selevel as part of the context returned
|
|
||||||
# by selinux.lgetfilecon().
|
|
||||||
|
|
||||||
def selinux_mls_enabled(self):
|
|
||||||
if not HAVE_SELINUX:
|
|
||||||
return False
|
|
||||||
if selinux.is_selinux_mls_enabled() == 1:
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def selinux_enabled(self):
|
|
||||||
if not HAVE_SELINUX:
|
|
||||||
seenabled = self.get_bin_path('selinuxenabled')
|
|
||||||
if seenabled is not None:
|
|
||||||
(rc,out,err) = self.run_command(seenabled)
|
|
||||||
if rc == 0:
|
|
||||||
self.fail_json(msg="Aborting, target uses selinux but python bindings (libselinux-python) aren't installed!")
|
|
||||||
return False
|
|
||||||
if selinux.is_selinux_enabled() == 1:
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Determine whether we need a placeholder for selevel/mls
|
|
||||||
def selinux_initial_context(self):
|
|
||||||
context = [None, None, None]
|
|
||||||
if self.selinux_mls_enabled():
|
|
||||||
context.append(None)
|
|
||||||
return context
|
|
||||||
|
|
||||||
def _to_filesystem_str(self, path):
|
|
||||||
'''Returns filesystem path as a str, if it wasn't already.
|
|
||||||
|
|
||||||
Used in selinux interactions because it cannot accept unicode
|
|
||||||
instances, and specifying complex args in a playbook leaves
|
|
||||||
you with unicode instances. This method currently assumes
|
|
||||||
that your filesystem encoding is UTF-8.
|
|
||||||
|
|
||||||
'''
|
|
||||||
if isinstance(path, unicode):
|
|
||||||
path = path.encode("utf-8")
|
|
||||||
return path
|
|
||||||
|
|
||||||
# If selinux fails to find a default, return an array of None
|
|
||||||
def selinux_default_context(self, path, mode=0):
|
|
||||||
context = self.selinux_initial_context()
|
|
||||||
if not HAVE_SELINUX or not self.selinux_enabled():
|
|
||||||
return context
|
|
||||||
try:
|
|
||||||
ret = selinux.matchpathcon(self._to_filesystem_str(path), mode)
|
|
||||||
except OSError:
|
|
||||||
return context
|
|
||||||
if ret[0] == -1:
|
|
||||||
return context
|
|
||||||
# Limit split to 4 because the selevel, the last in the list,
|
|
||||||
# may contain ':' characters
|
|
||||||
context = ret[1].split(':', 3)
|
|
||||||
return context
|
|
||||||
|
|
||||||
def selinux_context(self, path):
|
|
||||||
context = self.selinux_initial_context()
|
|
||||||
if not HAVE_SELINUX or not self.selinux_enabled():
|
|
||||||
return context
|
|
||||||
try:
|
|
||||||
ret = selinux.lgetfilecon_raw(self._to_filesystem_str(path))
|
|
||||||
except OSError, e:
|
|
||||||
if e.errno == errno.ENOENT:
|
|
||||||
self.fail_json(path=path, msg='path %s does not exist' % path)
|
|
||||||
else:
|
|
||||||
self.fail_json(path=path, msg='failed to retrieve selinux context')
|
|
||||||
if ret[0] == -1:
|
|
||||||
return context
|
|
||||||
# Limit split to 4 because the selevel, the last in the list,
|
|
||||||
# may contain ':' characters
|
|
||||||
context = ret[1].split(':', 3)
|
|
||||||
return context
|
|
||||||
|
|
||||||
def user_and_group(self, filename):
|
|
||||||
filename = os.path.expanduser(filename)
|
|
||||||
st = os.lstat(filename)
|
|
||||||
uid = st.st_uid
|
|
||||||
gid = st.st_gid
|
|
||||||
return (uid, gid)
|
|
||||||
|
|
||||||
def set_default_selinux_context(self, path, changed):
|
|
||||||
if not HAVE_SELINUX or not self.selinux_enabled():
|
|
||||||
return changed
|
|
||||||
context = self.selinux_default_context(path)
|
|
||||||
return self.set_context_if_different(path, context, False)
|
|
||||||
|
|
||||||
def set_context_if_different(self, path, context, changed):
|
|
||||||
|
|
||||||
if not HAVE_SELINUX or not self.selinux_enabled():
|
|
||||||
return changed
|
|
||||||
cur_context = self.selinux_context(path)
|
|
||||||
new_context = list(cur_context)
|
|
||||||
# Iterate over the current context instead of the
|
|
||||||
# argument context, which may have selevel.
|
|
||||||
|
|
||||||
for i in range(len(cur_context)):
|
|
||||||
if context[i] is not None and context[i] != cur_context[i]:
|
|
||||||
new_context[i] = context[i]
|
|
||||||
if context[i] is None:
|
|
||||||
new_context[i] = cur_context[i]
|
|
||||||
if cur_context != new_context:
|
|
||||||
try:
|
|
||||||
if self.check_mode:
|
|
||||||
return True
|
|
||||||
rc = selinux.lsetfilecon(self._to_filesystem_str(path),
|
|
||||||
str(':'.join(new_context)))
|
|
||||||
except OSError:
|
|
||||||
self.fail_json(path=path, msg='invalid selinux context', new_context=new_context, cur_context=cur_context, input_was=context)
|
|
||||||
if rc != 0:
|
|
||||||
self.fail_json(path=path, msg='set selinux context failed')
|
|
||||||
changed = True
|
|
||||||
return changed
|
|
||||||
|
|
||||||
def set_owner_if_different(self, path, owner, changed):
|
|
||||||
path = os.path.expanduser(path)
|
|
||||||
if owner is None:
|
|
||||||
return changed
|
|
||||||
orig_uid, orig_gid = self.user_and_group(path)
|
|
||||||
try:
|
|
||||||
uid = int(owner)
|
|
||||||
except ValueError:
|
|
||||||
try:
|
|
||||||
uid = pwd.getpwnam(owner).pw_uid
|
|
||||||
except KeyError:
|
|
||||||
self.fail_json(path=path, msg='chown failed: failed to look up user %s' % owner)
|
|
||||||
if orig_uid != uid:
|
|
||||||
if self.check_mode:
|
|
||||||
return True
|
|
||||||
try:
|
|
||||||
os.lchown(path, uid, -1)
|
|
||||||
except OSError:
|
|
||||||
self.fail_json(path=path, msg='chown failed')
|
|
||||||
changed = True
|
|
||||||
return changed
|
|
||||||
|
|
||||||
def set_group_if_different(self, path, group, changed):
|
|
||||||
path = os.path.expanduser(path)
|
|
||||||
if group is None:
|
|
||||||
return changed
|
|
||||||
orig_uid, orig_gid = self.user_and_group(path)
|
|
||||||
try:
|
|
||||||
gid = int(group)
|
|
||||||
except ValueError:
|
|
||||||
try:
|
|
||||||
gid = grp.getgrnam(group).gr_gid
|
|
||||||
except KeyError:
|
|
||||||
self.fail_json(path=path, msg='chgrp failed: failed to look up group %s' % group)
|
|
||||||
if orig_gid != gid:
|
|
||||||
if self.check_mode:
|
|
||||||
return True
|
|
||||||
try:
|
|
||||||
os.lchown(path, -1, gid)
|
|
||||||
except OSError:
|
|
||||||
self.fail_json(path=path, msg='chgrp failed')
|
|
||||||
changed = True
|
|
||||||
return changed
|
|
||||||
|
|
||||||
def set_mode_if_different(self, path, mode, changed):
|
|
||||||
path = os.path.expanduser(path)
|
|
||||||
if mode is None:
|
|
||||||
return changed
|
|
||||||
try:
|
|
||||||
# FIXME: support English modes
|
|
||||||
if not isinstance(mode, int):
|
|
||||||
mode = int(mode, 8)
|
|
||||||
except Exception, e:
|
|
||||||
self.fail_json(path=path, msg='mode needs to be something octalish', details=str(e))
|
|
||||||
|
|
||||||
st = os.lstat(path)
|
|
||||||
prev_mode = stat.S_IMODE(st[stat.ST_MODE])
|
|
||||||
|
|
||||||
if prev_mode != mode:
|
|
||||||
if self.check_mode:
|
|
||||||
return True
|
|
||||||
# FIXME: comparison against string above will cause this to be executed
|
|
||||||
# every time
|
|
||||||
try:
|
|
||||||
if 'lchmod' in dir(os):
|
|
||||||
os.lchmod(path, mode)
|
|
||||||
else:
|
|
||||||
os.chmod(path, mode)
|
|
||||||
except OSError, e:
|
|
||||||
if os.path.islink(path) and e.errno == errno.EPERM: # Can't set mode on symbolic links
|
|
||||||
pass
|
|
||||||
elif e.errno == errno.ENOENT: # Can't set mode on broken symbolic links
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
raise e
|
|
||||||
except Exception, e:
|
|
||||||
self.fail_json(path=path, msg='chmod failed', details=str(e))
|
|
||||||
|
|
||||||
st = os.lstat(path)
|
|
||||||
new_mode = stat.S_IMODE(st[stat.ST_MODE])
|
|
||||||
|
|
||||||
if new_mode != prev_mode:
|
|
||||||
changed = True
|
|
||||||
return changed
|
|
||||||
|
|
||||||
def set_file_attributes_if_different(self, file_args, changed):
|
|
||||||
# set modes owners and context as needed
|
|
||||||
changed = self.set_context_if_different(
|
|
||||||
file_args['path'], file_args['secontext'], changed
|
|
||||||
)
|
|
||||||
changed = self.set_owner_if_different(
|
|
||||||
file_args['path'], file_args['owner'], changed
|
|
||||||
)
|
|
||||||
changed = self.set_group_if_different(
|
|
||||||
file_args['path'], file_args['group'], changed
|
|
||||||
)
|
|
||||||
changed = self.set_mode_if_different(
|
|
||||||
file_args['path'], file_args['mode'], changed
|
|
||||||
)
|
|
||||||
return changed
|
|
||||||
|
|
||||||
def set_directory_attributes_if_different(self, file_args, changed):
|
|
||||||
changed = self.set_context_if_different(
|
|
||||||
file_args['path'], file_args['secontext'], changed
|
|
||||||
)
|
|
||||||
changed = self.set_owner_if_different(
|
|
||||||
file_args['path'], file_args['owner'], changed
|
|
||||||
)
|
|
||||||
changed = self.set_group_if_different(
|
|
||||||
file_args['path'], file_args['group'], changed
|
|
||||||
)
|
|
||||||
changed = self.set_mode_if_different(
|
|
||||||
file_args['path'], file_args['mode'], changed
|
|
||||||
)
|
|
||||||
return changed
|
|
||||||
|
|
||||||
def add_path_info(self, kwargs):
|
|
||||||
'''
|
|
||||||
for results that are files, supplement the info about the file
|
|
||||||
in the return path with stats about the file path.
|
|
||||||
'''
|
|
||||||
|
|
||||||
path = kwargs.get('path', kwargs.get('dest', None))
|
|
||||||
if path is None:
|
|
||||||
return kwargs
|
|
||||||
if os.path.exists(path):
|
|
||||||
(uid, gid) = self.user_and_group(path)
|
|
||||||
kwargs['uid'] = uid
|
|
||||||
kwargs['gid'] = gid
|
|
||||||
try:
|
|
||||||
user = pwd.getpwuid(uid)[0]
|
|
||||||
except KeyError:
|
|
||||||
user = str(uid)
|
|
||||||
try:
|
|
||||||
group = grp.getgrgid(gid)[0]
|
|
||||||
except KeyError:
|
|
||||||
group = str(gid)
|
|
||||||
kwargs['owner'] = user
|
|
||||||
kwargs['group'] = group
|
|
||||||
st = os.lstat(path)
|
|
||||||
kwargs['mode'] = oct(stat.S_IMODE(st[stat.ST_MODE]))
|
|
||||||
# secontext not yet supported
|
|
||||||
if os.path.islink(path):
|
|
||||||
kwargs['state'] = 'link'
|
|
||||||
elif os.path.isdir(path):
|
|
||||||
kwargs['state'] = 'directory'
|
|
||||||
else:
|
|
||||||
kwargs['state'] = 'file'
|
|
||||||
if HAVE_SELINUX and self.selinux_enabled():
|
|
||||||
kwargs['secontext'] = ':'.join(self.selinux_context(path))
|
|
||||||
kwargs['size'] = st[stat.ST_SIZE]
|
|
||||||
else:
|
|
||||||
kwargs['state'] = 'absent'
|
|
||||||
return kwargs
|
|
||||||
|
|
||||||
|
|
||||||
def _handle_aliases(self):
|
|
||||||
aliases_results = {} #alias:canon
|
|
||||||
for (k,v) in self.argument_spec.iteritems():
|
|
||||||
self._legal_inputs.append(k)
|
|
||||||
aliases = v.get('aliases', None)
|
|
||||||
default = v.get('default', None)
|
|
||||||
required = v.get('required', False)
|
|
||||||
if default is not None and required:
|
|
||||||
# not alias specific but this is a good place to check this
|
|
||||||
self.fail_json(msg="internal error: required and default are mutally exclusive for %s" % k)
|
|
||||||
if aliases is None:
|
|
||||||
continue
|
|
||||||
if type(aliases) != list:
|
|
||||||
self.fail_json(msg='internal error: aliases must be a list')
|
|
||||||
for alias in aliases:
|
|
||||||
self._legal_inputs.append(alias)
|
|
||||||
aliases_results[alias] = k
|
|
||||||
if alias in self.params:
|
|
||||||
self.params[k] = self.params[alias]
|
|
||||||
|
|
||||||
return aliases_results
|
|
||||||
|
|
||||||
def _check_for_check_mode(self):
|
|
||||||
for (k,v) in self.params.iteritems():
|
|
||||||
if k == 'CHECKMODE':
|
|
||||||
if not self.supports_check_mode:
|
|
||||||
self.exit_json(skipped=True, msg="remote module does not support check mode")
|
|
||||||
if self.supports_check_mode:
|
|
||||||
self.check_mode = True
|
|
||||||
|
|
||||||
def _check_invalid_arguments(self):
|
|
||||||
for (k,v) in self.params.iteritems():
|
|
||||||
if k == 'CHECKMODE':
|
|
||||||
continue
|
|
||||||
if k not in self._legal_inputs:
|
|
||||||
self.fail_json(msg="unsupported parameter for module: %s" % k)
|
|
||||||
|
|
||||||
def _count_terms(self, check):
|
|
||||||
count = 0
|
|
||||||
for term in check:
|
|
||||||
if term in self.params:
|
|
||||||
count += 1
|
|
||||||
return count
|
|
||||||
|
|
||||||
def _check_mutually_exclusive(self, spec):
|
|
||||||
if spec is None:
|
|
||||||
return
|
|
||||||
for check in spec:
|
|
||||||
count = self._count_terms(check)
|
|
||||||
if count > 1:
|
|
||||||
self.fail_json(msg="parameters are mutually exclusive: %s" % check)
|
|
||||||
|
|
||||||
def _check_required_one_of(self, spec):
|
|
||||||
if spec is None:
|
|
||||||
return
|
|
||||||
for check in spec:
|
|
||||||
count = self._count_terms(check)
|
|
||||||
if count == 0:
|
|
||||||
self.fail_json(msg="one of the following is required: %s" % ','.join(check))
|
|
||||||
|
|
||||||
def _check_required_together(self, spec):
|
|
||||||
if spec is None:
|
|
||||||
return
|
|
||||||
for check in spec:
|
|
||||||
counts = [ self._count_terms([field]) for field in check ]
|
|
||||||
non_zero = [ c for c in counts if c > 0 ]
|
|
||||||
if len(non_zero) > 0:
|
|
||||||
if 0 in counts:
|
|
||||||
self.fail_json(msg="parameters are required together: %s" % check)
|
|
||||||
|
|
||||||
def _check_required_arguments(self):
|
|
||||||
''' ensure all required arguments are present '''
|
|
||||||
missing = []
|
|
||||||
for (k,v) in self.argument_spec.iteritems():
|
|
||||||
required = v.get('required', False)
|
|
||||||
if required and k not in self.params:
|
|
||||||
missing.append(k)
|
|
||||||
if len(missing) > 0:
|
|
||||||
self.fail_json(msg="missing required arguments: %s" % ",".join(missing))
|
|
||||||
|
|
||||||
def _check_argument_values(self):
|
|
||||||
''' ensure all arguments have the requested values, and there are no stray arguments '''
|
|
||||||
for (k,v) in self.argument_spec.iteritems():
|
|
||||||
choices = v.get('choices',None)
|
|
||||||
if choices is None:
|
|
||||||
continue
|
|
||||||
if type(choices) == list:
|
|
||||||
if k in self.params:
|
|
||||||
if self.params[k] not in choices:
|
|
||||||
choices_str=",".join([str(c) for c in choices])
|
|
||||||
msg="value of %s must be one of: %s, got: %s" % (k, choices_str, self.params[k])
|
|
||||||
self.fail_json(msg=msg)
|
|
||||||
else:
|
|
||||||
self.fail_json(msg="internal error: do not know how to interpret argument_spec")
|
|
||||||
|
|
||||||
def _check_argument_types(self):
|
|
||||||
''' ensure all arguments have the requested type '''
|
|
||||||
for (k, v) in self.argument_spec.iteritems():
|
|
||||||
wanted = v.get('type', None)
|
|
||||||
if wanted is None:
|
|
||||||
continue
|
|
||||||
if k not in self.params:
|
|
||||||
continue
|
|
||||||
|
|
||||||
value = self.params[k]
|
|
||||||
is_invalid = False
|
|
||||||
|
|
||||||
if wanted == 'str':
|
|
||||||
if not isinstance(value, basestring):
|
|
||||||
self.params[k] = str(value)
|
|
||||||
elif wanted == 'list':
|
|
||||||
if not isinstance(value, list):
|
|
||||||
if isinstance(value, basestring):
|
|
||||||
self.params[k] = value.split(",")
|
|
||||||
else:
|
|
||||||
is_invalid = True
|
|
||||||
elif wanted == 'dict':
|
|
||||||
if not isinstance(value, dict):
|
|
||||||
if isinstance(value, basestring):
|
|
||||||
self.params[k] = dict([x.split("=", 1) for x in value.split(",")])
|
|
||||||
else:
|
|
||||||
is_invalid = True
|
|
||||||
elif wanted == 'bool':
|
|
||||||
if not isinstance(value, bool):
|
|
||||||
if isinstance(value, basestring):
|
|
||||||
self.params[k] = self.boolean(value)
|
|
||||||
else:
|
|
||||||
is_invalid = True
|
|
||||||
elif wanted == 'int':
|
|
||||||
if not isinstance(value, int):
|
|
||||||
if isinstance(value, basestring):
|
|
||||||
self.params[k] = int(value)
|
|
||||||
else:
|
|
||||||
is_invalid = True
|
|
||||||
else:
|
|
||||||
self.fail_json(msg="implementation error: unknown type %s requested for %s" % (wanted, k))
|
|
||||||
|
|
||||||
if is_invalid:
|
|
||||||
self.fail_json(msg="argument %s is of invalid type: %s, required: %s" % (k, type(value), wanted))
|
|
||||||
|
|
||||||
def _set_defaults(self, pre=True):
|
|
||||||
for (k,v) in self.argument_spec.iteritems():
|
|
||||||
default = v.get('default', None)
|
|
||||||
if pre == True:
|
|
||||||
# this prevents setting defaults on required items
|
|
||||||
if default is not None and k not in self.params:
|
|
||||||
self.params[k] = default
|
|
||||||
else:
|
|
||||||
# make sure things without a default still get set None
|
|
||||||
if k not in self.params:
|
|
||||||
self.params[k] = default
|
|
||||||
|
|
||||||
def _load_params(self):
|
|
||||||
''' read the input and return a dictionary and the arguments string '''
|
|
||||||
args = MODULE_ARGS
|
|
||||||
items = shlex.split(args)
|
|
||||||
params = {}
|
|
||||||
for x in items:
|
|
||||||
try:
|
|
||||||
(k, v) = x.split("=",1)
|
|
||||||
except Exception, e:
|
|
||||||
self.fail_json(msg="this module requires key=value arguments (%s)" % items)
|
|
||||||
params[k] = v
|
|
||||||
params2 = json.loads(MODULE_COMPLEX_ARGS)
|
|
||||||
params2.update(params)
|
|
||||||
return (params2, args)
|
|
||||||
|
|
||||||
def _log_invocation(self):
|
|
||||||
''' log that ansible ran the module '''
|
|
||||||
# TODO: generalize a separate log function and make log_invocation use it
|
|
||||||
# Sanitize possible password argument when logging.
|
|
||||||
log_args = dict()
|
|
||||||
passwd_keys = ['password', 'login_password']
|
|
||||||
|
|
||||||
for param in self.params:
|
|
||||||
canon = self.aliases.get(param, param)
|
|
||||||
arg_opts = self.argument_spec.get(canon, {})
|
|
||||||
no_log = arg_opts.get('no_log', False)
|
|
||||||
|
|
||||||
if no_log:
|
|
||||||
log_args[param] = 'NOT_LOGGING_PARAMETER'
|
|
||||||
elif param in passwd_keys:
|
|
||||||
log_args[param] = 'NOT_LOGGING_PASSWORD'
|
|
||||||
else:
|
|
||||||
log_args[param] = self.params[param]
|
|
||||||
|
|
||||||
module = 'ansible-%s' % os.path.basename(__file__)
|
|
||||||
msg = ''
|
|
||||||
for arg in log_args:
|
|
||||||
msg = msg + arg + '=' + str(log_args[arg]) + ' '
|
|
||||||
if msg:
|
|
||||||
msg = 'Invoked with %s' % msg
|
|
||||||
else:
|
|
||||||
msg = 'Invoked'
|
|
||||||
|
|
||||||
if (has_journal):
|
|
||||||
journal_args = ["MESSAGE=%s %s" % (module, msg)]
|
|
||||||
journal_args.append("MODULE=%s" % os.path.basename(__file__))
|
|
||||||
for arg in log_args:
|
|
||||||
journal_args.append(arg.upper() + "=" + str(log_args[arg]))
|
|
||||||
try:
|
|
||||||
journal.sendv(*journal_args)
|
|
||||||
except IOError, e:
|
|
||||||
# fall back to syslog since logging to journal failed
|
|
||||||
syslog.openlog(module, 0, syslog.LOG_USER)
|
|
||||||
syslog.syslog(syslog.LOG_NOTICE, msg)
|
|
||||||
else:
|
|
||||||
syslog.openlog(module, 0, syslog.LOG_USER)
|
|
||||||
syslog.syslog(syslog.LOG_NOTICE, msg)
|
|
||||||
|
|
||||||
def get_bin_path(self, arg, required=False, opt_dirs=[]):
|
|
||||||
'''
|
|
||||||
find system executable in PATH.
|
|
||||||
Optional arguments:
|
|
||||||
- required: if executable is not found and required is true, fail_json
|
|
||||||
- opt_dirs: optional list of directories to search in addition to PATH
|
|
||||||
if found return full path; otherwise return None
|
|
||||||
'''
|
|
||||||
sbin_paths = ['/sbin', '/usr/sbin', '/usr/local/sbin']
|
|
||||||
paths = []
|
|
||||||
for d in opt_dirs:
|
|
||||||
if d is not None and os.path.exists(d):
|
|
||||||
paths.append(d)
|
|
||||||
paths += os.environ.get('PATH', '').split(os.pathsep)
|
|
||||||
bin_path = None
|
|
||||||
# mangle PATH to include /sbin dirs
|
|
||||||
for p in sbin_paths:
|
|
||||||
if p not in paths and os.path.exists(p):
|
|
||||||
paths.append(p)
|
|
||||||
for d in paths:
|
|
||||||
path = os.path.join(d, arg)
|
|
||||||
if os.path.exists(path) and self.is_executable(path):
|
|
||||||
bin_path = path
|
|
||||||
break
|
|
||||||
if required and bin_path is None:
|
|
||||||
self.fail_json(msg='Failed to find required executable %s' % arg)
|
|
||||||
return bin_path
|
|
||||||
|
|
||||||
def boolean(self, arg):
|
|
||||||
''' return a bool for the arg '''
|
|
||||||
if arg is None or type(arg) == bool:
|
|
||||||
return arg
|
|
||||||
if type(arg) in types.StringTypes:
|
|
||||||
arg = arg.lower()
|
|
||||||
if arg in BOOLEANS_TRUE:
|
|
||||||
return True
|
|
||||||
elif arg in BOOLEANS_FALSE:
|
|
||||||
return False
|
|
||||||
else:
|
|
||||||
self.fail_json(msg='Boolean %s not in either boolean list' % arg)
|
|
||||||
|
|
||||||
def jsonify(self, data):
|
|
||||||
return json.dumps(data)
|
|
||||||
|
|
||||||
def from_json(self, data):
|
|
||||||
return json.loads(data)
|
|
||||||
|
|
||||||
def exit_json(self, **kwargs):
|
|
||||||
''' return from the module, without error '''
|
|
||||||
self.add_path_info(kwargs)
|
|
||||||
if not 'changed' in kwargs:
|
|
||||||
kwargs['changed'] = False
|
|
||||||
print self.jsonify(kwargs)
|
|
||||||
sys.exit(0)
|
|
||||||
|
|
||||||
def fail_json(self, **kwargs):
|
|
||||||
''' return from the module, with an error message '''
|
|
||||||
self.add_path_info(kwargs)
|
|
||||||
assert 'msg' in kwargs, "implementation error -- msg to explain the error is required"
|
|
||||||
kwargs['failed'] = True
|
|
||||||
print self.jsonify(kwargs)
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
def is_executable(self, path):
|
|
||||||
'''is the given path executable?'''
|
|
||||||
return (stat.S_IXUSR & os.stat(path)[stat.ST_MODE]
|
|
||||||
or stat.S_IXGRP & os.stat(path)[stat.ST_MODE]
|
|
||||||
or stat.S_IXOTH & os.stat(path)[stat.ST_MODE])
|
|
||||||
|
|
||||||
def digest_from_file(self, filename, digest_method):
|
|
||||||
''' Return hex digest of local file for a given digest_method, or None if file is not present. '''
|
|
||||||
if not os.path.exists(filename):
|
|
||||||
return None
|
|
||||||
if os.path.isdir(filename):
|
|
||||||
self.fail_json(msg="attempted to take checksum of directory: %s" % filename)
|
|
||||||
digest = digest_method
|
|
||||||
blocksize = 64 * 1024
|
|
||||||
infile = open(filename, 'rb')
|
|
||||||
block = infile.read(blocksize)
|
|
||||||
while block:
|
|
||||||
digest.update(block)
|
|
||||||
block = infile.read(blocksize)
|
|
||||||
infile.close()
|
|
||||||
return digest.hexdigest()
|
|
||||||
|
|
||||||
def md5(self, filename):
|
|
||||||
''' Return MD5 hex digest of local file using digest_from_file(). '''
|
|
||||||
return self.digest_from_file(filename, _md5())
|
|
||||||
|
|
||||||
def sha256(self, filename):
|
|
||||||
''' Return SHA-256 hex digest of local file using digest_from_file(). '''
|
|
||||||
if not HAVE_HASHLIB:
|
|
||||||
self.fail_json(msg="SHA-256 checksums require hashlib, which is available in Python 2.5 and higher")
|
|
||||||
return self.digest_from_file(filename, _sha256())
|
|
||||||
|
|
||||||
def backup_local(self, fn):
|
|
||||||
'''make a date-marked backup of the specified file, return True or False on success or failure'''
|
|
||||||
# backups named basename-YYYY-MM-DD@HH:MM~
|
|
||||||
ext = time.strftime("%Y-%m-%d@%H:%M~", time.localtime(time.time()))
|
|
||||||
backupdest = '%s.%s' % (fn, ext)
|
|
||||||
|
|
||||||
try:
|
|
||||||
shutil.copy2(fn, backupdest)
|
|
||||||
except shutil.Error, e:
|
|
||||||
self.fail_json(msg='Could not make backup of %s to %s: %s' % (fn, backupdest, e))
|
|
||||||
return backupdest
|
|
||||||
|
|
||||||
def cleanup(self,tmpfile):
|
|
||||||
if os.path.exists(tmpfile):
|
|
||||||
try:
|
|
||||||
os.unlink(tmpfile)
|
|
||||||
except OSError, e:
|
|
||||||
sys.stderr.write("could not cleanup %s: %s" % (tmpfile, e))
|
|
||||||
|
|
||||||
def atomic_move(self, src, dest):
|
|
||||||
'''atomically move src to dest, copying attributes from dest, returns true on success
|
|
||||||
it uses os.rename to ensure this as it is an atomic operation, rest of the function is
|
|
||||||
to work around limitations, corner cases and ensure selinux context is saved if possible'''
|
|
||||||
context = None
|
|
||||||
if os.path.exists(dest):
|
|
||||||
try:
|
|
||||||
st = os.stat(dest)
|
|
||||||
os.chmod(src, st.st_mode & 07777)
|
|
||||||
os.chown(src, st.st_uid, st.st_gid)
|
|
||||||
except OSError, e:
|
|
||||||
if e.errno != errno.EPERM:
|
|
||||||
raise
|
|
||||||
if self.selinux_enabled():
|
|
||||||
context = self.selinux_context(dest)
|
|
||||||
else:
|
|
||||||
if self.selinux_enabled():
|
|
||||||
context = self.selinux_default_context(dest)
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Optimistically try a rename, solves some corner cases and can avoid useless work.
|
|
||||||
os.rename(src, dest)
|
|
||||||
except (IOError,OSError), e:
|
|
||||||
# only try workarounds for errno 18 (cross device), 1 (not permited) and 13 (permission denied)
|
|
||||||
if e.errno != errno.EPERM and e.errno != errno.EXDEV and e.errno != errno.EACCES:
|
|
||||||
self.fail_json(msg='Could not replace file: %s to %s: %s' % (src, dest, e))
|
|
||||||
|
|
||||||
dest_dir = os.path.dirname(dest)
|
|
||||||
dest_file = os.path.basename(dest)
|
|
||||||
tmp_dest = "%s/.%s.%s.%s" % (dest_dir,dest_file,os.getpid(),time.time())
|
|
||||||
|
|
||||||
try: # leaves tmp file behind when sudo and not root
|
|
||||||
if os.getenv("SUDO_USER") and os.getuid() != 0:
|
|
||||||
# cleanup will happen by 'rm' of tempdir
|
|
||||||
shutil.copy(src, tmp_dest)
|
|
||||||
else:
|
|
||||||
shutil.move(src, tmp_dest)
|
|
||||||
if self.selinux_enabled():
|
|
||||||
self.set_context_if_different(tmp_dest, context, False)
|
|
||||||
os.rename(tmp_dest, dest)
|
|
||||||
except (shutil.Error, OSError, IOError), e:
|
|
||||||
self.cleanup(tmp_dest)
|
|
||||||
self.fail_json(msg='Could not replace file: %s to %s: %s' % (src, dest, e))
|
|
||||||
|
|
||||||
if self.selinux_enabled():
|
|
||||||
# rename might not preserve context
|
|
||||||
self.set_context_if_different(dest, context, False)
|
|
||||||
|
|
||||||
def run_command(self, args, check_rc=False, close_fds=False, executable=None, data=None, binary_data=False):
|
|
||||||
'''
|
|
||||||
Execute a command, returns rc, stdout, and stderr.
|
|
||||||
args is the command to run
|
|
||||||
If args is a list, the command will be run with shell=False.
|
|
||||||
Otherwise, the command will be run with shell=True when args is a string.
|
|
||||||
Other arguments:
|
|
||||||
- check_rc (boolean) Whether to call fail_json in case of
|
|
||||||
non zero RC. Default is False.
|
|
||||||
- close_fds (boolean) See documentation for subprocess.Popen().
|
|
||||||
Default is False.
|
|
||||||
- executable (string) See documentation for subprocess.Popen().
|
|
||||||
Default is None.
|
|
||||||
'''
|
|
||||||
if isinstance(args, list):
|
|
||||||
shell = False
|
|
||||||
elif isinstance(args, basestring):
|
|
||||||
shell = True
|
|
||||||
else:
|
|
||||||
msg = "Argument 'args' to run_command must be list or string"
|
|
||||||
self.fail_json(rc=257, cmd=args, msg=msg)
|
|
||||||
rc = 0
|
|
||||||
msg = None
|
|
||||||
st_in = None
|
|
||||||
if data:
|
|
||||||
st_in = subprocess.PIPE
|
|
||||||
try:
|
|
||||||
cmd = subprocess.Popen(args,
|
|
||||||
executable=executable,
|
|
||||||
shell=shell,
|
|
||||||
close_fds=close_fds,
|
|
||||||
stdin=st_in,
|
|
||||||
stdout=subprocess.PIPE,
|
|
||||||
stderr=subprocess.PIPE)
|
|
||||||
if data:
|
|
||||||
if not binary_data:
|
|
||||||
data += '\\n'
|
|
||||||
out, err = cmd.communicate(input=data)
|
|
||||||
rc = cmd.returncode
|
|
||||||
except (OSError, IOError), e:
|
|
||||||
self.fail_json(rc=e.errno, msg=str(e), cmd=args)
|
|
||||||
except:
|
|
||||||
self.fail_json(rc=257, msg=traceback.format_exc(), cmd=args)
|
|
||||||
if rc != 0 and check_rc:
|
|
||||||
msg = err.rstrip()
|
|
||||||
self.fail_json(cmd=args, rc=rc, stdout=out, stderr=err, msg=msg)
|
|
||||||
return (rc, out, err)
|
|
||||||
|
|
||||||
def pretty_bytes(self,size):
|
|
||||||
ranges = (
|
|
||||||
(1<<70L, 'ZB'),
|
|
||||||
(1<<60L, 'EB'),
|
|
||||||
(1<<50L, 'PB'),
|
|
||||||
(1<<40L, 'TB'),
|
|
||||||
(1<<30L, 'GB'),
|
|
||||||
(1<<20L, 'MB'),
|
|
||||||
(1<<10L, 'KB'),
|
|
||||||
(1, 'Bytes')
|
|
||||||
)
|
|
||||||
for limit, suffix in ranges:
|
|
||||||
if size >= limit:
|
|
||||||
break
|
|
||||||
return '%.2f %s' % (float(size)/ limit, suffix)
|
|
|
@ -353,7 +353,7 @@ class PlayBook(object):
|
||||||
else:
|
else:
|
||||||
name = task.name
|
name = task.name
|
||||||
|
|
||||||
self.callbacks.on_task_start(template(play.basedir, name, task.module_vars, lookup_fatal=False, filter_fatal=False, lookups=False), is_handler)
|
self.callbacks.on_task_start(template(play.basedir, name, task.module_vars, lookup_fatal=False, filter_fatal=False), is_handler)
|
||||||
if hasattr(self.callbacks, 'skip_task') and self.callbacks.skip_task:
|
if hasattr(self.callbacks, 'skip_task') and self.callbacks.skip_task:
|
||||||
ansible.callbacks.set_task(self.callbacks, None)
|
ansible.callbacks.set_task(self.callbacks, None)
|
||||||
ansible.callbacks.set_task(self.runner_callbacks, None)
|
ansible.callbacks.set_task(self.runner_callbacks, None)
|
||||||
|
|
|
@ -40,7 +40,6 @@ from ansible.utils import template
|
||||||
from ansible.utils import check_conditional
|
from ansible.utils import check_conditional
|
||||||
from ansible import errors
|
from ansible import errors
|
||||||
from ansible import module_common
|
from ansible import module_common
|
||||||
from ansible.module_common import ModuleReplacer
|
|
||||||
import poller
|
import poller
|
||||||
import connection
|
import connection
|
||||||
from return_data import ReturnData
|
from return_data import ReturnData
|
||||||
|
@ -52,7 +51,6 @@ try:
|
||||||
except ImportError:
|
except ImportError:
|
||||||
HAS_ATFORK=False
|
HAS_ATFORK=False
|
||||||
|
|
||||||
module_replacer = ModuleReplacer(strip_comments=False)
|
|
||||||
multiprocessing_runner = None
|
multiprocessing_runner = None
|
||||||
|
|
||||||
OUTPUT_LOCKFILE = tempfile.TemporaryFile()
|
OUTPUT_LOCKFILE = tempfile.TemporaryFile()
|
||||||
|
@ -73,6 +71,11 @@ def _executor_hook(job_queue, result_queue, new_stdin):
|
||||||
host = job_queue.get(block=False)
|
host = job_queue.get(block=False)
|
||||||
return_data = multiprocessing_runner._executor(host, new_stdin)
|
return_data = multiprocessing_runner._executor(host, new_stdin)
|
||||||
result_queue.put(return_data)
|
result_queue.put(return_data)
|
||||||
|
|
||||||
|
if 'LEGACY_TEMPLATE_WARNING' in return_data.flags:
|
||||||
|
# pass data back up across the multiprocessing fork boundary
|
||||||
|
template.Flags.LEGACY_TEMPLATE_WARNING = True
|
||||||
|
|
||||||
except Queue.Empty:
|
except Queue.Empty:
|
||||||
pass
|
pass
|
||||||
except:
|
except:
|
||||||
|
@ -368,6 +371,15 @@ class Runner(object):
|
||||||
def _executor(self, host, new_stdin):
|
def _executor(self, host, new_stdin):
|
||||||
''' handler for multiprocessing library '''
|
''' handler for multiprocessing library '''
|
||||||
|
|
||||||
|
def get_flags():
|
||||||
|
# flags are a way of passing arbitrary event information
|
||||||
|
# back up the chain, since multiprocessing forks and doesn't
|
||||||
|
# allow state exchange
|
||||||
|
flags = []
|
||||||
|
if template.Flags.LEGACY_TEMPLATE_WARNING:
|
||||||
|
flags.append('LEGACY_TEMPLATE_WARNING')
|
||||||
|
return flags
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if not new_stdin:
|
if not new_stdin:
|
||||||
self._new_stdin = os.fdopen(os.dup(sys.stdin.fileno()))
|
self._new_stdin = os.fdopen(os.dup(sys.stdin.fileno()))
|
||||||
|
@ -377,6 +389,7 @@ class Runner(object):
|
||||||
exec_rc = self._executor_internal(host, new_stdin)
|
exec_rc = self._executor_internal(host, new_stdin)
|
||||||
if type(exec_rc) != ReturnData:
|
if type(exec_rc) != ReturnData:
|
||||||
raise Exception("unexpected return type: %s" % type(exec_rc))
|
raise Exception("unexpected return type: %s" % type(exec_rc))
|
||||||
|
exec_rc.flags = get_flags()
|
||||||
# redundant, right?
|
# redundant, right?
|
||||||
if not exec_rc.comm_ok:
|
if not exec_rc.comm_ok:
|
||||||
self.callbacks.on_unreachable(host, exec_rc.result)
|
self.callbacks.on_unreachable(host, exec_rc.result)
|
||||||
|
@ -384,11 +397,11 @@ class Runner(object):
|
||||||
except errors.AnsibleError, ae:
|
except errors.AnsibleError, ae:
|
||||||
msg = str(ae)
|
msg = str(ae)
|
||||||
self.callbacks.on_unreachable(host, msg)
|
self.callbacks.on_unreachable(host, msg)
|
||||||
return ReturnData(host=host, comm_ok=False, result=dict(failed=True, msg=msg))
|
return ReturnData(host=host, comm_ok=False, result=dict(failed=True, msg=msg), flags=get_flags())
|
||||||
except Exception:
|
except Exception:
|
||||||
msg = traceback.format_exc()
|
msg = traceback.format_exc()
|
||||||
self.callbacks.on_unreachable(host, msg)
|
self.callbacks.on_unreachable(host, msg)
|
||||||
return ReturnData(host=host, comm_ok=False, result=dict(failed=True, msg=msg))
|
return ReturnData(host=host, comm_ok=False, result=dict(failed=True, msg=msg), flags=get_flags())
|
||||||
|
|
||||||
# *****************************************************
|
# *****************************************************
|
||||||
|
|
||||||
|
@ -825,19 +838,60 @@ class Runner(object):
|
||||||
def _copy_module(self, conn, tmp, module_name, module_args, inject, complex_args=None):
|
def _copy_module(self, conn, tmp, module_name, module_args, inject, complex_args=None):
|
||||||
''' transfer a module over SFTP, does not run it '''
|
''' transfer a module over SFTP, does not run it '''
|
||||||
|
|
||||||
|
# FIXME if complex args is none, set to {}
|
||||||
|
|
||||||
|
if module_name.startswith("/"):
|
||||||
|
raise errors.AnsibleFileNotFound("%s is not a module" % module_name)
|
||||||
|
|
||||||
# Search module path(s) for named module.
|
# Search module path(s) for named module.
|
||||||
in_path = utils.plugins.module_finder.find_plugin(module_name)
|
in_path = utils.plugins.module_finder.find_plugin(module_name)
|
||||||
if in_path is None:
|
if in_path is None:
|
||||||
raise errors.AnsibleFileNotFound("module %s not found in %s" % (module_name, utils.plugins.module_finder.print_paths()))
|
raise errors.AnsibleFileNotFound("module %s not found in %s" % (module_name, utils.plugins.module_finder.print_paths()))
|
||||||
|
|
||||||
out_path = os.path.join(tmp, module_name)
|
out_path = os.path.join(tmp, module_name)
|
||||||
|
|
||||||
# insert shared code and arguments into the module
|
module_data = ""
|
||||||
(module_data, module_style, shebang) = module_replacer.modify_module(
|
module_style = 'old'
|
||||||
in_path, complex_args, module_args, inject
|
|
||||||
)
|
with open(in_path) as f:
|
||||||
|
module_data = f.read()
|
||||||
|
if module_common.REPLACER in module_data:
|
||||||
|
module_style = 'new'
|
||||||
|
if 'WANT_JSON' in module_data:
|
||||||
|
module_style = 'non_native_want_json'
|
||||||
|
|
||||||
|
complex_args_json = utils.jsonify(complex_args)
|
||||||
|
# We force conversion of module_args to str because module_common calls shlex.split,
|
||||||
|
# a standard library function that incorrectly handles Unicode input before Python 2.7.3.
|
||||||
|
encoded_args = repr(module_args.encode('utf-8'))
|
||||||
|
encoded_lang = repr(C.DEFAULT_MODULE_LANG)
|
||||||
|
encoded_complex = repr(complex_args_json)
|
||||||
|
|
||||||
|
module_data = module_data.replace(module_common.REPLACER, module_common.MODULE_COMMON)
|
||||||
|
module_data = module_data.replace(module_common.REPLACER_ARGS, encoded_args)
|
||||||
|
module_data = module_data.replace(module_common.REPLACER_LANG, encoded_lang)
|
||||||
|
module_data = module_data.replace(module_common.REPLACER_COMPLEX, encoded_complex)
|
||||||
|
|
||||||
|
if module_style == 'new':
|
||||||
|
facility = C.DEFAULT_SYSLOG_FACILITY
|
||||||
|
if 'ansible_syslog_facility' in inject:
|
||||||
|
facility = inject['ansible_syslog_facility']
|
||||||
|
module_data = module_data.replace('syslog.LOG_USER', "syslog.%s" % facility)
|
||||||
|
|
||||||
|
lines = module_data.split("\n")
|
||||||
|
shebang = None
|
||||||
|
if lines[0].startswith("#!"):
|
||||||
|
shebang = lines[0].strip()
|
||||||
|
args = shlex.split(str(shebang[2:]))
|
||||||
|
interpreter = args[0]
|
||||||
|
interpreter_config = 'ansible_%s_interpreter' % os.path.basename(interpreter)
|
||||||
|
|
||||||
|
if interpreter_config in inject:
|
||||||
|
lines[0] = shebang = "#!%s %s" % (inject[interpreter_config], " ".join(args[1:]))
|
||||||
|
module_data = "\n".join(lines)
|
||||||
|
|
||||||
# ship the module
|
|
||||||
self._transfer_str(conn, tmp, module_name, module_data)
|
self._transfer_str(conn, tmp, module_name, module_data)
|
||||||
|
|
||||||
return (out_path, module_style, shebang)
|
return (out_path, module_style, shebang)
|
||||||
|
|
||||||
# *****************************************************
|
# *****************************************************
|
||||||
|
|
|
@ -23,6 +23,12 @@ import yaml
|
||||||
import copy
|
import copy
|
||||||
import optparse
|
import optparse
|
||||||
import operator
|
import operator
|
||||||
|
from ansible import errors
|
||||||
|
from ansible import __version__
|
||||||
|
from ansible.utils.plugins import *
|
||||||
|
from ansible.utils import template
|
||||||
|
from ansible.callbacks import display
|
||||||
|
import ansible.constants as C
|
||||||
import time
|
import time
|
||||||
import StringIO
|
import StringIO
|
||||||
import stat
|
import stat
|
||||||
|
@ -37,15 +43,6 @@ import getpass
|
||||||
import sys
|
import sys
|
||||||
import textwrap
|
import textwrap
|
||||||
|
|
||||||
# ansible
|
|
||||||
from ansible import errors
|
|
||||||
from ansible import __version__
|
|
||||||
from ansible.utils.plugins import *
|
|
||||||
from ansible.utils import template
|
|
||||||
from ansible.callbacks import display
|
|
||||||
import ansible.constants as C
|
|
||||||
import filesystem
|
|
||||||
|
|
||||||
VERBOSITY=0
|
VERBOSITY=0
|
||||||
|
|
||||||
# list of all deprecation messages to prevent duplicate display
|
# list of all deprecation messages to prevent duplicate display
|
||||||
|
@ -227,7 +224,7 @@ def unfrackpath(path):
|
||||||
example:
|
example:
|
||||||
'$HOME/../../var/mail' becomes '/var/spool/mail'
|
'$HOME/../../var/mail' becomes '/var/spool/mail'
|
||||||
'''
|
'''
|
||||||
return filesystem.unfrackpath(path)
|
return os.path.normpath(os.path.realpath(os.path.expandvars(os.path.expanduser(path))))
|
||||||
|
|
||||||
def prepare_writeable_dir(tree,mode=0777):
|
def prepare_writeable_dir(tree,mode=0777):
|
||||||
''' make sure a directory exists and is writeable '''
|
''' make sure a directory exists and is writeable '''
|
||||||
|
@ -253,11 +250,33 @@ def path_dwim(basedir, given):
|
||||||
'''
|
'''
|
||||||
make relative paths work like folks expect.
|
make relative paths work like folks expect.
|
||||||
'''
|
'''
|
||||||
return filesystem.path_dwim(basedir, given)
|
|
||||||
|
if given.startswith("/"):
|
||||||
|
return os.path.abspath(given)
|
||||||
|
elif given.startswith("~"):
|
||||||
|
return os.path.abspath(os.path.expanduser(given))
|
||||||
|
else:
|
||||||
|
return os.path.abspath(os.path.join(basedir, given))
|
||||||
|
|
||||||
def path_dwim_relative(original, dirname, source, playbook_base, check=True):
|
def path_dwim_relative(original, dirname, source, playbook_base, check=True):
|
||||||
''' find one file in a directory one level up in a dir named dirname relative to current '''
|
''' find one file in a directory one level up in a dir named dirname relative to current '''
|
||||||
return filesystem.path_dwim_relative(original, dirname, source, playbook_base, check=True)
|
# (used by roles code)
|
||||||
|
|
||||||
|
basedir = os.path.dirname(original)
|
||||||
|
if os.path.islink(basedir):
|
||||||
|
basedir = unfrackpath(basedir)
|
||||||
|
template2 = os.path.join(basedir, dirname, source)
|
||||||
|
else:
|
||||||
|
template2 = os.path.join(basedir, '..', dirname, source)
|
||||||
|
source2 = path_dwim(basedir, template2)
|
||||||
|
if os.path.exists(source2):
|
||||||
|
return source2
|
||||||
|
obvious_local_path = path_dwim(playbook_base, source)
|
||||||
|
if os.path.exists(obvious_local_path):
|
||||||
|
return obvious_local_path
|
||||||
|
if check:
|
||||||
|
raise errors.AnsibleError("input file not found at %s or %s" % (source2, obvious_local_path))
|
||||||
|
return source2 # which does not exist
|
||||||
|
|
||||||
def json_loads(data):
|
def json_loads(data):
|
||||||
''' parse a JSON string and return a data structure '''
|
''' parse a JSON string and return a data structure '''
|
||||||
|
|
|
@ -1,64 +0,0 @@
|
||||||
# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
|
|
||||||
#
|
|
||||||
# This file is part of Ansible
|
|
||||||
#
|
|
||||||
# Ansible is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
#
|
|
||||||
# Ansible is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
from ansible import errors
|
|
||||||
from ansible import __version__
|
|
||||||
import ansible.constants as C
|
|
||||||
|
|
||||||
def path_dwim(basedir, given):
|
|
||||||
'''
|
|
||||||
make relative paths work like folks expect.
|
|
||||||
'''
|
|
||||||
|
|
||||||
if given.startswith("/"):
|
|
||||||
return os.path.abspath(given)
|
|
||||||
elif given.startswith("~"):
|
|
||||||
return os.path.abspath(os.path.expanduser(given))
|
|
||||||
else:
|
|
||||||
return os.path.abspath(os.path.join(basedir, given))
|
|
||||||
|
|
||||||
def path_dwim_relative(original, dirname, source, playbook_base, check=True):
|
|
||||||
''' find one file in a directory one level up in a dir named dirname relative to current '''
|
|
||||||
# (used by roles code)
|
|
||||||
|
|
||||||
basedir = os.path.dirname(original)
|
|
||||||
if os.path.islink(basedir):
|
|
||||||
basedir = unfrackpath(basedir)
|
|
||||||
template2 = os.path.join(basedir, dirname, source)
|
|
||||||
else:
|
|
||||||
template2 = os.path.join(basedir, '..', dirname, source)
|
|
||||||
source2 = path_dwim(basedir, template2)
|
|
||||||
if os.path.exists(source2):
|
|
||||||
return source2
|
|
||||||
obvious_local_path = path_dwim(playbook_base, source)
|
|
||||||
if os.path.exists(obvious_local_path):
|
|
||||||
return obvious_local_path
|
|
||||||
if check:
|
|
||||||
raise errors.AnsibleError("input file not found at %s or %s" % (source2, obvious_local_path))
|
|
||||||
return source2 # which does not exist
|
|
||||||
|
|
||||||
def unfrackpath(path):
|
|
||||||
'''
|
|
||||||
returns a path that is free of symlinks, environment
|
|
||||||
variables, relative path traversals and symbols (~)
|
|
||||||
example:
|
|
||||||
'$HOME/../../var/mail' becomes '/var/spool/mail'
|
|
||||||
'''
|
|
||||||
return os.path.normpath(os.path.realpath(os.path.expandvars(os.path.expanduser(path))))
|
|
||||||
|
|
|
@ -21,18 +21,15 @@ import codecs
|
||||||
import jinja2
|
import jinja2
|
||||||
from jinja2.runtime import StrictUndefined
|
from jinja2.runtime import StrictUndefined
|
||||||
from jinja2.exceptions import TemplateSyntaxError
|
from jinja2.exceptions import TemplateSyntaxError
|
||||||
|
import yaml
|
||||||
|
import json
|
||||||
from ansible import errors
|
from ansible import errors
|
||||||
import ansible.constants as C
|
import ansible.constants as C
|
||||||
import time
|
import time
|
||||||
|
import subprocess
|
||||||
import datetime
|
import datetime
|
||||||
import pwd
|
import pwd
|
||||||
import ast
|
import ast
|
||||||
import plugins
|
|
||||||
import filesystem
|
|
||||||
|
|
||||||
FILTER_PLUGINS = None
|
|
||||||
_LISTRE = re.compile(r"(\w+)\[(\d+)\]")
|
|
||||||
JINJA2_OVERRIDE='#jinja2:'
|
|
||||||
|
|
||||||
class Globals(object):
|
class Globals(object):
|
||||||
|
|
||||||
|
@ -47,9 +44,10 @@ def _get_filters():
|
||||||
if Globals.FILTERS is not None:
|
if Globals.FILTERS is not None:
|
||||||
return Globals.FILTERS
|
return Globals.FILTERS
|
||||||
|
|
||||||
my_plugins = [ x for x in plugins.filter_loader.all()]
|
from ansible import utils
|
||||||
|
plugins = [ x for x in utils.plugins.filter_loader.all()]
|
||||||
filters = {}
|
filters = {}
|
||||||
for fp in my_plugins:
|
for fp in plugins:
|
||||||
filters.update(fp.filters())
|
filters.update(fp.filters())
|
||||||
Globals.FILTERS = filters
|
Globals.FILTERS = filters
|
||||||
|
|
||||||
|
@ -57,17 +55,33 @@ def _get_filters():
|
||||||
|
|
||||||
def _get_extensions():
|
def _get_extensions():
|
||||||
''' return jinja2 extensions to load '''
|
''' return jinja2 extensions to load '''
|
||||||
# if some extensions are set via jinja_extensions in ansible.cfg, we try
|
|
||||||
# to load them with the jinja environment
|
'''
|
||||||
|
if some extensions are set via jinja_extensions in ansible.cfg, we try
|
||||||
|
to load them with the jinja environment
|
||||||
|
'''
|
||||||
jinja_exts = []
|
jinja_exts = []
|
||||||
if C.DEFAULT_JINJA2_EXTENSIONS:
|
if C.DEFAULT_JINJA2_EXTENSIONS:
|
||||||
# Let's make sure the configuration directive doesn't contain spaces
|
'''
|
||||||
# and split extensions in an array
|
Let's make sure the configuration directive doesn't contain spaces
|
||||||
|
and split extensions in an array
|
||||||
|
'''
|
||||||
jinja_exts = C.DEFAULT_JINJA2_EXTENSIONS.replace(" ", "").split(',')
|
jinja_exts = C.DEFAULT_JINJA2_EXTENSIONS.replace(" ", "").split(',')
|
||||||
|
|
||||||
return jinja_exts
|
return jinja_exts
|
||||||
|
|
||||||
|
class Flags:
|
||||||
|
LEGACY_TEMPLATE_WARNING = False
|
||||||
|
|
||||||
|
# TODO: refactor this file
|
||||||
|
|
||||||
|
FILTER_PLUGINS = None
|
||||||
|
_LISTRE = re.compile(r"(\w+)\[(\d+)\]")
|
||||||
|
JINJA2_OVERRIDE='#jinja2:'
|
||||||
|
|
||||||
def lookup(name, *args, **kwargs):
|
def lookup(name, *args, **kwargs):
|
||||||
instance = plugins.lookup_loader.get(name.lower(), basedir=kwargs.get('basedir',None))
|
from ansible import utils
|
||||||
|
instance = utils.plugins.lookup_loader.get(name.lower(), basedir=kwargs.get('basedir',None))
|
||||||
vars = kwargs.get('vars', None)
|
vars = kwargs.get('vars', None)
|
||||||
|
|
||||||
if instance is not None:
|
if instance is not None:
|
||||||
|
@ -81,15 +95,8 @@ def _legacy_varFindLimitSpace(basedir, vars, space, part, lookup_fatal, depth, e
|
||||||
|
|
||||||
basically does space.get(part, None), but with
|
basically does space.get(part, None), but with
|
||||||
templating for part and a few more things
|
templating for part and a few more things
|
||||||
|
|
||||||
DEPRECATED
|
|
||||||
LEGACY VARIABLES ARE SLATED FOR REMOVAL IN ANSIBLE 1.5
|
|
||||||
use {{ foo }} INSTEAD
|
|
||||||
'''
|
'''
|
||||||
|
|
||||||
if not C.DEFAULT_LEGACY_PLAYBOOK_VARIABLES:
|
|
||||||
raise Exception("we should not be here")
|
|
||||||
|
|
||||||
# Previous part couldn't be found, nothing to limit to
|
# Previous part couldn't be found, nothing to limit to
|
||||||
if space is None:
|
if space is None:
|
||||||
return space
|
return space
|
||||||
|
@ -140,16 +147,12 @@ def _legacy_varFind(basedir, text, vars, lookup_fatal, depth, expand_lists):
|
||||||
end=<index into text where the variable ends>)
|
end=<index into text where the variable ends>)
|
||||||
or None if no variable could be found in text. If replacement is None, it should be replaced with the
|
or None if no variable could be found in text. If replacement is None, it should be replaced with the
|
||||||
original data in the caller.
|
original data in the caller.
|
||||||
|
|
||||||
DEPRECATED
|
|
||||||
LEGACY VARIABLES ARE SLATED FOR REMOVAL IN ANSIBLE 1.5
|
|
||||||
use {{ foo }} INSTEAD
|
|
||||||
'''
|
'''
|
||||||
|
|
||||||
# short circuit this whole function if we have specified we don't want
|
# short circuit this whole function if we have specified we don't want
|
||||||
# legacy var replacement
|
# legacy var replacement
|
||||||
if not C.DEFAULT_LEGACY_PLAYBOOK_VARIABLES:
|
if C.DEFAULT_LEGACY_PLAYBOOK_VARIABLES == False:
|
||||||
raise Exception("we should not be here")
|
return None
|
||||||
|
|
||||||
start = text.find("$")
|
start = text.find("$")
|
||||||
if start == -1:
|
if start == -1:
|
||||||
|
@ -221,6 +224,7 @@ def _legacy_varFind(basedir, text, vars, lookup_fatal, depth, expand_lists):
|
||||||
if basedir is None:
|
if basedir is None:
|
||||||
return {'replacement': None, 'start': start, 'end': end}
|
return {'replacement': None, 'start': start, 'end': end}
|
||||||
var_end -= 1
|
var_end -= 1
|
||||||
|
from ansible import utils
|
||||||
args = text[part_start:var_end]
|
args = text[part_start:var_end]
|
||||||
if lookup_plugin_name == 'LOOKUP':
|
if lookup_plugin_name == 'LOOKUP':
|
||||||
lookup_plugin_name, args = args.split(",", 1)
|
lookup_plugin_name, args = args.split(",", 1)
|
||||||
|
@ -232,7 +236,7 @@ def _legacy_varFind(basedir, text, vars, lookup_fatal, depth, expand_lists):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
instance = plugins.lookup_loader.get(lookup_plugin_name.lower(), basedir=basedir)
|
instance = utils.plugins.lookup_loader.get(lookup_plugin_name.lower(), basedir=basedir)
|
||||||
if instance is not None:
|
if instance is not None:
|
||||||
try:
|
try:
|
||||||
replacement = instance.run(args, inject=vars)
|
replacement = instance.run(args, inject=vars)
|
||||||
|
@ -256,25 +260,16 @@ def _legacy_varFind(basedir, text, vars, lookup_fatal, depth, expand_lists):
|
||||||
return dict(replacement=space, start=start, end=end)
|
return dict(replacement=space, start=start, end=end)
|
||||||
|
|
||||||
def legacy_varReplace(basedir, raw, vars, lookup_fatal=True, depth=0, expand_lists=False):
|
def legacy_varReplace(basedir, raw, vars, lookup_fatal=True, depth=0, expand_lists=False):
|
||||||
''' Perform variable replacement of $variables in string raw using vars dictionary
|
''' Perform variable replacement of $variables in string raw using vars dictionary '''
|
||||||
|
# this code originally from yum
|
||||||
DEPRECATED
|
|
||||||
LEGACY VARIABLES ARE SLATED FOR REMOVAL IN ANSIBLE 1.5
|
|
||||||
use {{ foo }} INSTEAD
|
|
||||||
'''
|
|
||||||
|
|
||||||
if not C.DEFAULT_LEGACY_PLAYBOOK_VARIABLES:
|
|
||||||
raise Exception("we should not be here")
|
|
||||||
|
|
||||||
# this code originally from yum (and later modified a lot)
|
|
||||||
|
|
||||||
orig = raw
|
orig = raw
|
||||||
|
|
||||||
if not isinstance(raw, unicode):
|
if not isinstance(raw, unicode):
|
||||||
raw = raw.decode("utf-8")
|
raw = raw.decode("utf-8")
|
||||||
|
|
||||||
#if (depth > 20):
|
if (depth > 20):
|
||||||
# raise errors.AnsibleError("template recursion depth exceeded")
|
raise errors.AnsibleError("template recursion depth exceeded")
|
||||||
|
|
||||||
done = [] # Completed chunks to return
|
done = [] # Completed chunks to return
|
||||||
|
|
||||||
|
@ -302,64 +297,12 @@ def legacy_varReplace(basedir, raw, vars, lookup_fatal=True, depth=0, expand_lis
|
||||||
|
|
||||||
result = ''.join(done)
|
result = ''.join(done)
|
||||||
|
|
||||||
previous_old_style_vars = orig.count('$')
|
if result != orig:
|
||||||
new_old_style_vars = result.count('$')
|
|
||||||
if previous_old_style_vars != new_old_style_vars:
|
|
||||||
from ansible import utils
|
from ansible import utils
|
||||||
utils.deprecated("Legacy variable substitution, such as using ${foo} or $foo instead of {{ foo }} is currently valid but will be phased out and has been out of favor since version 1.2. This is the last of legacy features on our deprecation list. You may continue to use this if you have specific needs for now","1.5")
|
utils.deprecated("Legacy variable subsitution, such as using ${foo} or $foo instead of {{ foo }} is currently valid but will be phased out and has been out of favor since version 1.2. This is the last of legacy features on our deprecation list. You may continue to use this if you have specific needs for now","1.6")
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def fix_ds(basedir, vars, original, depth=0):
|
def template(basedir, varname, vars, lookup_fatal=True, depth=0, expand_lists=True, convert_bare=False, fail_on_undefined=False, filter_fatal=True):
|
||||||
''' used to massage the input dictionary to avoid surprises later and minimize more complex recursive problems '''
|
|
||||||
while (depth < 20):
|
|
||||||
depth = depth + 1
|
|
||||||
vars2 = _fix_ds(basedir, vars, original, depth=depth)
|
|
||||||
if vars2 == vars:
|
|
||||||
return vars
|
|
||||||
vars = vars2
|
|
||||||
return vars
|
|
||||||
|
|
||||||
def _fix_ds(basedir, vars, original, depth=0):
|
|
||||||
if isinstance(vars, dict):
|
|
||||||
return dict([ (k, fix_ds(basedir, v, original, depth=depth+1)) for (k,v) in vars.iteritems() ])
|
|
||||||
if isinstance(vars, (dict, tuple)):
|
|
||||||
return [ fix_ds(basedir, x,original, depth=depth+1) for x in vars ]
|
|
||||||
if isinstance(vars, basestring) and "{{" in vars and not "|" in vars and not "lookup(" in vars:
|
|
||||||
return lightweight_var_template(basedir, vars, original)
|
|
||||||
return vars
|
|
||||||
|
|
||||||
def lightweight_var_template(basedir, input, vars):
|
|
||||||
return template_from_string(basedir, input, vars, fail_on_undefined=False, lookups=True, filters=True)
|
|
||||||
|
|
||||||
def template(basedir, input_value, vars, lookup_fatal=True, depth=-1, expand_lists=True, convert_bare=False, fail_on_undefined=False, filter_fatal=True, lookups=True):
|
|
||||||
|
|
||||||
vars = fix_ds(basedir, vars, vars.copy())
|
|
||||||
|
|
||||||
last_time = input_value
|
|
||||||
result = None
|
|
||||||
changed = True
|
|
||||||
while changed:
|
|
||||||
result = _template(
|
|
||||||
basedir,
|
|
||||||
last_time,
|
|
||||||
vars,
|
|
||||||
lookup_fatal=lookup_fatal,
|
|
||||||
depth=depth,
|
|
||||||
expand_lists=expand_lists,
|
|
||||||
convert_bare=convert_bare,
|
|
||||||
fail_on_undefined=fail_on_undefined,
|
|
||||||
filter_fatal=filter_fatal,
|
|
||||||
lookups=lookups,
|
|
||||||
)
|
|
||||||
if last_time == result:
|
|
||||||
changed = False
|
|
||||||
last_time = result
|
|
||||||
depth = depth + 1
|
|
||||||
if depth > 20:
|
|
||||||
raise errors.AnsibleError("template recursion depth exceeded")
|
|
||||||
return result
|
|
||||||
|
|
||||||
def _template(basedir, varname, vars, lookup_fatal=True, depth=0, expand_lists=True, convert_bare=False, fail_on_undefined=False, filter_fatal=True, lookups=True):
|
|
||||||
''' templates a data structure by traversing it and substituting for other data structures '''
|
''' templates a data structure by traversing it and substituting for other data structures '''
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -370,47 +313,109 @@ def _template(basedir, varname, vars, lookup_fatal=True, depth=0, expand_lists=T
|
||||||
|
|
||||||
if isinstance(varname, basestring):
|
if isinstance(varname, basestring):
|
||||||
if '{{' in varname or '{%' in varname:
|
if '{{' in varname or '{%' in varname:
|
||||||
varname = template_from_string(basedir, varname, vars, fail_on_undefined, lookups=lookups)
|
varname = template_from_string(basedir, varname, vars, fail_on_undefined)
|
||||||
|
|
||||||
if not C.DEFAULT_LEGACY_PLAYBOOK_VARIABLES:
|
|
||||||
return varname
|
|
||||||
|
|
||||||
if not '$' in varname:
|
if not '$' in varname:
|
||||||
return varname
|
return varname
|
||||||
|
|
||||||
m = _legacy_varFind(basedir, varname, vars, lookup_fatal, depth, expand_lists)
|
m = _legacy_varFind(basedir, varname, vars, lookup_fatal, depth, expand_lists)
|
||||||
if not m:
|
if not m:
|
||||||
return varname
|
return varname
|
||||||
if m['start'] == 0 and m['end'] == len(varname):
|
if m['start'] == 0 and m['end'] == len(varname):
|
||||||
if m['replacement'] is not None:
|
if m['replacement'] is not None:
|
||||||
|
Flags.LEGACY_TEMPLATE_WARNING = True
|
||||||
return template(basedir, m['replacement'], vars, lookup_fatal, depth, expand_lists)
|
return template(basedir, m['replacement'], vars, lookup_fatal, depth, expand_lists)
|
||||||
else:
|
else:
|
||||||
return varname
|
return varname
|
||||||
else:
|
else:
|
||||||
|
Flags.LEGACY_TEMPLATE_WARNING = True
|
||||||
return legacy_varReplace(basedir, varname, vars, lookup_fatal, depth, expand_lists)
|
return legacy_varReplace(basedir, varname, vars, lookup_fatal, depth, expand_lists)
|
||||||
|
|
||||||
elif isinstance(varname, (list, tuple)):
|
elif isinstance(varname, (list, tuple)):
|
||||||
return [ template(basedir, v, vars, lookup_fatal, depth, expand_lists, fail_on_undefined=fail_on_undefined) for v in varname]
|
return [template(basedir, v, vars, lookup_fatal, depth, expand_lists, fail_on_undefined=fail_on_undefined) for v in varname]
|
||||||
elif isinstance(varname, dict):
|
elif isinstance(varname, dict):
|
||||||
return dict([
|
d = {}
|
||||||
(k, template(
|
for (k, v) in varname.iteritems():
|
||||||
basedir, v, vars, lookup_fatal, depth, expand_lists, fail_on_undefined=fail_on_undefined)
|
d[k] = template(basedir, v, vars, lookup_fatal, depth, expand_lists, fail_on_undefined=fail_on_undefined)
|
||||||
) for (k,v) in varname.iteritems()
|
return d
|
||||||
])
|
|
||||||
else:
|
else:
|
||||||
return varname
|
return varname
|
||||||
|
|
||||||
except errors.AnsibleFilterError:
|
except errors.AnsibleFilterError:
|
||||||
if filter_fatal:
|
if filter_fatal:
|
||||||
raise
|
raise
|
||||||
else:
|
else:
|
||||||
return varname
|
return varname
|
||||||
|
|
||||||
|
|
||||||
|
class _jinja2_vars(object):
|
||||||
|
'''
|
||||||
|
Helper class to template all variable content before jinja2 sees it.
|
||||||
|
This is done by hijacking the variable storage that jinja2 uses, and
|
||||||
|
overriding __contains__ and __getitem__ to look like a dict. Added bonus
|
||||||
|
is avoiding duplicating the large hashes that inject tends to be.
|
||||||
|
To facilitate using builtin jinja2 things like range, globals are handled
|
||||||
|
here.
|
||||||
|
extras is a list of locals to also search for variables.
|
||||||
|
'''
|
||||||
|
|
||||||
|
def __init__(self, basedir, vars, globals, fail_on_undefined, *extras):
|
||||||
|
self.basedir = basedir
|
||||||
|
self.vars = vars
|
||||||
|
self.globals = globals
|
||||||
|
self.fail_on_undefined = fail_on_undefined
|
||||||
|
self.extras = extras
|
||||||
|
|
||||||
|
def __contains__(self, k):
|
||||||
|
if k in self.vars:
|
||||||
|
return True
|
||||||
|
for i in self.extras:
|
||||||
|
if k in i:
|
||||||
|
return True
|
||||||
|
if k in self.globals:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def __getitem__(self, varname):
|
||||||
|
if varname not in self.vars:
|
||||||
|
for i in self.extras:
|
||||||
|
if varname in i:
|
||||||
|
return i[varname]
|
||||||
|
if varname in self.globals:
|
||||||
|
return self.globals[varname]
|
||||||
|
else:
|
||||||
|
raise KeyError("undefined variable: %s" % varname)
|
||||||
|
var = self.vars[varname]
|
||||||
|
# HostVars is special, return it as-is
|
||||||
|
if isinstance(var, dict) and type(var) != dict:
|
||||||
|
return var
|
||||||
|
else:
|
||||||
|
return template(self.basedir, var, self.vars, fail_on_undefined=self.fail_on_undefined)
|
||||||
|
|
||||||
|
def add_locals(self, locals):
|
||||||
|
'''
|
||||||
|
If locals are provided, create a copy of self containing those
|
||||||
|
locals in addition to what is already in this variable proxy.
|
||||||
|
'''
|
||||||
|
if locals is None:
|
||||||
|
return self
|
||||||
|
return _jinja2_vars(self.basedir, self.vars, self.globals, self.fail_on_undefined, locals, *self.extras)
|
||||||
|
|
||||||
|
class J2Template(jinja2.environment.Template):
|
||||||
|
'''
|
||||||
|
This class prevents Jinja2 from running _jinja2_vars through dict()
|
||||||
|
Without this, {% include %} and similar will create new contexts unlike
|
||||||
|
the special one created in template_from_file. This ensures they are all
|
||||||
|
alike, with the exception of potential locals.
|
||||||
|
'''
|
||||||
|
def new_context(self, vars=None, shared=False, locals=None):
|
||||||
|
return jinja2.runtime.Context(self.environment, vars.add_locals(locals), self.name, self.blocks)
|
||||||
|
|
||||||
def template_from_file(basedir, path, vars):
|
def template_from_file(basedir, path, vars):
|
||||||
''' run a file through the templating engine '''
|
''' run a file through the templating engine '''
|
||||||
|
|
||||||
fail_on_undefined = C.DEFAULT_UNDEFINED_VAR_BEHAVIOR
|
fail_on_undefined = C.DEFAULT_UNDEFINED_VAR_BEHAVIOR
|
||||||
|
|
||||||
realpath = filesystem.path_dwim(basedir, path)
|
from ansible import utils
|
||||||
|
realpath = utils.path_dwim(basedir, path)
|
||||||
loader=jinja2.FileSystemLoader([basedir,os.path.dirname(realpath)])
|
loader=jinja2.FileSystemLoader([basedir,os.path.dirname(realpath)])
|
||||||
|
|
||||||
def my_lookup(*args, **kwargs):
|
def my_lookup(*args, **kwargs):
|
||||||
|
@ -440,20 +445,27 @@ def template_from_file(basedir, path, vars):
|
||||||
(key,val) = pair.split(':')
|
(key,val) = pair.split(':')
|
||||||
setattr(environment,key.strip(),ast.literal_eval(val.strip()))
|
setattr(environment,key.strip(),ast.literal_eval(val.strip()))
|
||||||
|
|
||||||
|
environment.template_class = J2Template
|
||||||
|
try:
|
||||||
|
t = environment.from_string(data)
|
||||||
|
except TemplateSyntaxError, e:
|
||||||
|
# Throw an exception which includes a more user friendly error message
|
||||||
|
values = {'name': realpath, 'lineno': e.lineno, 'error': str(e)}
|
||||||
|
msg = 'file: %(name)s, line number: %(lineno)s, error: %(error)s' % \
|
||||||
|
values
|
||||||
|
error = errors.AnsibleError(msg)
|
||||||
|
raise error
|
||||||
vars = vars.copy()
|
vars = vars.copy()
|
||||||
try:
|
try:
|
||||||
template_uid = pwd.getpwuid(os.stat(realpath).st_uid).pw_name
|
template_uid = pwd.getpwuid(os.stat(realpath).st_uid).pw_name
|
||||||
except:
|
except:
|
||||||
template_uid = os.stat(realpath).st_uid
|
template_uid = os.stat(realpath).st_uid
|
||||||
|
vars['template_host'] = os.uname()[1]
|
||||||
vars.update(dict(
|
vars['template_path'] = realpath
|
||||||
template_host = os.uname()[1],
|
vars['template_mtime'] = datetime.datetime.fromtimestamp(os.path.getmtime(realpath))
|
||||||
template_path = realpath,
|
vars['template_uid'] = template_uid
|
||||||
template_mtime = datetime.datetime.fromtimestamp(os.path.getmtime(realpath)),
|
vars['template_fullpath'] = os.path.abspath(realpath)
|
||||||
template_uid = template_uid,
|
vars['template_run_date'] = datetime.datetime.now()
|
||||||
template_fullpath = os.path.abspath(realpath),
|
|
||||||
template_run_date = datetime.datetime.now(),
|
|
||||||
))
|
|
||||||
|
|
||||||
managed_default = C.DEFAULT_MANAGED_STR
|
managed_default = C.DEFAULT_MANAGED_STR
|
||||||
managed_str = managed_default.format(
|
managed_str = managed_default.format(
|
||||||
|
@ -466,75 +478,60 @@ def template_from_file(basedir, path, vars):
|
||||||
time.localtime(os.path.getmtime(realpath))
|
time.localtime(os.path.getmtime(realpath))
|
||||||
)
|
)
|
||||||
|
|
||||||
# this double template pass is here to detect errors while we still have context
|
# This line performs deep Jinja2 magic that uses the _jinja2_vars object for vars
|
||||||
# actual recursion is handled by the mainline template function further down
|
# Ideally, this could use some API where setting shared=True and the object won't get
|
||||||
|
# passed through dict(o), but I have not found that yet.
|
||||||
try:
|
try:
|
||||||
t = environment.from_string(data)
|
res = jinja2.utils.concat(t.root_render_func(t.new_context(_jinja2_vars(basedir, vars, t.globals, fail_on_undefined), shared=True)))
|
||||||
res = t.render(vars)
|
|
||||||
except jinja2.exceptions.UndefinedError, e:
|
except jinja2.exceptions.UndefinedError, e:
|
||||||
raise errors.AnsibleUndefinedVariable("One or more undefined variables: %s" % str(e))
|
raise errors.AnsibleUndefinedVariable("One or more undefined variables: %s" % str(e))
|
||||||
except TemplateSyntaxError, e:
|
|
||||||
# Throw an exception which includes a more user friendly error message
|
|
||||||
values = dict(name=realpath, lineno=e.lineno, error=str(e))
|
|
||||||
msg = 'file: %(name)s, line number: %(lineno)s, error: %(error)s' % values
|
|
||||||
error = errors.AnsibleError(msg)
|
|
||||||
raise error
|
|
||||||
|
|
||||||
if data.endswith('\n') and not res.endswith('\n'):
|
if data.endswith('\n') and not res.endswith('\n'):
|
||||||
res = res + '\n'
|
res = res + '\n'
|
||||||
return template(basedir, res, vars)
|
return template(basedir, res, vars)
|
||||||
|
|
||||||
def template_from_string(basedir, data, vars, fail_on_undefined=False, lookups=True, filters=True):
|
def template_from_string(basedir, data, vars, fail_on_undefined=False):
|
||||||
''' run a string through the (Jinja2) templating engine '''
|
''' run a string through the (Jinja2) templating engine '''
|
||||||
|
|
||||||
def my_lookup(*args, **kwargs):
|
|
||||||
kwargs['vars'] = vars
|
|
||||||
return lookup(*args, basedir=basedir, **kwargs)
|
|
||||||
|
|
||||||
if type(data) == str:
|
|
||||||
data = unicode(data, 'utf-8')
|
|
||||||
environment = jinja2.Environment(trim_blocks=True, undefined=StrictUndefined, extensions=_get_extensions())
|
|
||||||
|
|
||||||
if filters:
|
|
||||||
environment.filters.update(_get_filters())
|
|
||||||
|
|
||||||
if '_original_file' in vars:
|
|
||||||
basedir = os.path.dirname(vars['_original_file'])
|
|
||||||
filesdir = os.path.abspath(os.path.join(basedir, '..', 'files'))
|
|
||||||
if os.path.exists(filesdir):
|
|
||||||
basedir = filesdir
|
|
||||||
|
|
||||||
# TODO: may need some way of using lookup plugins here seeing we aren't calling
|
|
||||||
# the legacy engine, lookup() as a function, perhaps?
|
|
||||||
|
|
||||||
if type(data) == str:
|
|
||||||
data = unicode(data, 'utf-8')
|
|
||||||
environment = jinja2.Environment(trim_blocks=True, undefined=StrictUndefined, extensions=_get_extensions())
|
|
||||||
environment.filters.update(_get_filters())
|
|
||||||
|
|
||||||
if '_original_file' in vars:
|
|
||||||
basedir = os.path.dirname(vars['_original_file'])
|
|
||||||
filesdir = os.path.abspath(os.path.join(basedir, '..', 'files'))
|
|
||||||
if os.path.exists(filesdir):
|
|
||||||
basedir = filesdir
|
|
||||||
|
|
||||||
# TODO: may need some way of using lookup plugins here seeing we aren't calling
|
|
||||||
# the legacy engine, lookup() as a function, perhaps?
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
t = environment.from_string(data.decode('utf-8'))
|
if type(data) == str:
|
||||||
except Exception, e:
|
data = unicode(data, 'utf-8')
|
||||||
if 'recursion' in str(e):
|
environment = jinja2.Environment(trim_blocks=True, undefined=StrictUndefined, extensions=_get_extensions())
|
||||||
raise errors.AnsibleError("recursive loop detected in template string: %s" % data)
|
environment.filters.update(_get_filters())
|
||||||
else:
|
environment.template_class = J2Template
|
||||||
return data
|
|
||||||
|
if '_original_file' in vars:
|
||||||
|
basedir = os.path.dirname(vars['_original_file'])
|
||||||
|
filesdir = os.path.abspath(os.path.join(basedir, '..', 'files'))
|
||||||
|
if os.path.exists(filesdir):
|
||||||
|
basedir = filesdir
|
||||||
|
|
||||||
|
# TODO: may need some way of using lookup plugins here seeing we aren't calling
|
||||||
|
# the legacy engine, lookup() as a function, perhaps?
|
||||||
|
|
||||||
|
data = data.decode('utf-8')
|
||||||
|
try:
|
||||||
|
t = environment.from_string(data)
|
||||||
|
except Exception, e:
|
||||||
|
if 'recursion' in str(e):
|
||||||
|
raise errors.AnsibleError("recursive loop detected in template string: %s" % data)
|
||||||
|
else:
|
||||||
|
return data
|
||||||
|
|
||||||
|
def my_lookup(*args, **kwargs):
|
||||||
|
kwargs['vars'] = vars
|
||||||
|
return lookup(*args, basedir=basedir, **kwargs)
|
||||||
|
|
||||||
if lookups:
|
|
||||||
t.globals['lookup'] = my_lookup
|
t.globals['lookup'] = my_lookup
|
||||||
|
|
||||||
try:
|
|
||||||
return t.render(vars)
|
|
||||||
except jinja2.exceptions.UndefinedError:
|
jvars =_jinja2_vars(basedir, vars, t.globals, fail_on_undefined)
|
||||||
|
new_context = t.new_context(jvars, shared=True)
|
||||||
|
rf = t.root_render_func(new_context)
|
||||||
|
res = jinja2.utils.concat(rf)
|
||||||
|
return res
|
||||||
|
except (jinja2.exceptions.UndefinedError, errors.AnsibleUndefinedVariable):
|
||||||
if fail_on_undefined:
|
if fail_on_undefined:
|
||||||
raise
|
raise
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -48,9 +48,7 @@ def main():
|
||||||
result['ping'] = module.params['data']
|
result['ping'] = module.params['data']
|
||||||
module.exit_json(**result)
|
module.exit_json(**result)
|
||||||
|
|
||||||
### boilerplate: import common module snippets here
|
# this is magic, see lib/ansible/module_common.py
|
||||||
from ansible.module_utils.basic import *
|
#<<INCLUDE_ANSIBLE_MODULE_COMMON>>
|
||||||
|
|
||||||
### invoke the module
|
|
||||||
main()
|
main()
|
||||||
|
|
||||||
|
|
|
@ -1205,11 +1205,7 @@ def main():
|
||||||
|
|
||||||
module.exit_json(**result)
|
module.exit_json(**result)
|
||||||
|
|
||||||
### boilerplate: import common module snippets here
|
# this is magic, see lib/ansible/module_common.py
|
||||||
from ansible.module_utils.basic import *
|
#<<INCLUDE_ANSIBLE_MODULE_COMMON>>
|
||||||
|
|
||||||
### invoke the module
|
|
||||||
main()
|
main()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -9,7 +9,6 @@ from nose.plugins.skip import SkipTest
|
||||||
|
|
||||||
import ansible.utils
|
import ansible.utils
|
||||||
import ansible.utils.template as template2
|
import ansible.utils.template as template2
|
||||||
import ansible.constants as C
|
|
||||||
|
|
||||||
class TestUtils(unittest.TestCase):
|
class TestUtils(unittest.TestCase):
|
||||||
|
|
||||||
|
@ -17,10 +16,6 @@ class TestUtils(unittest.TestCase):
|
||||||
### varReplace function tests
|
### varReplace function tests
|
||||||
|
|
||||||
def test_varReplace_var_complex_var(self):
|
def test_varReplace_var_complex_var(self):
|
||||||
|
|
||||||
old_setting = C.DEFAULT_LEGACY_PLAYBOOK_VARIABLES
|
|
||||||
C.DEFAULT_LEGACY_PLAYBOOK_VARIABLES = True
|
|
||||||
|
|
||||||
vars = {
|
vars = {
|
||||||
'x': '$y',
|
'x': '$y',
|
||||||
'y': {
|
'y': {
|
||||||
|
@ -31,8 +26,6 @@ class TestUtils(unittest.TestCase):
|
||||||
res = template2.template(None, template, vars)
|
res = template2.template(None, template, vars)
|
||||||
assert res == 'result'
|
assert res == 'result'
|
||||||
|
|
||||||
C.DEFAULT_LEGACY_PLAYBOOK_VARIABLES = old_setting
|
|
||||||
|
|
||||||
#####################################
|
#####################################
|
||||||
### template_ds function tests
|
### template_ds function tests
|
||||||
|
|
||||||
|
@ -60,9 +53,6 @@ class TestUtils(unittest.TestCase):
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
|
|
||||||
old_setting = C.DEFAULT_LEGACY_PLAYBOOK_VARIABLES
|
|
||||||
C.DEFAULT_LEGACY_PLAYBOOK_VARIABLES = True
|
|
||||||
|
|
||||||
template = '${data.var}'
|
template = '${data.var}'
|
||||||
res = template2.template(None, template, vars)
|
res = template2.template(None, template, vars)
|
||||||
assert sorted(res) == sorted(vars['data']['var'])
|
assert sorted(res) == sorted(vars['data']['var'])
|
||||||
|
@ -79,8 +69,6 @@ class TestUtils(unittest.TestCase):
|
||||||
res = template2.template(None, template, vars)
|
res = template2.template(None, template, vars)
|
||||||
assert res == template
|
assert res == template
|
||||||
|
|
||||||
C.DEFAULT_LEGACY_PLAYBOOK_VARIABLES = old_setting
|
|
||||||
|
|
||||||
#####################################
|
#####################################
|
||||||
### Template function tests
|
### Template function tests
|
||||||
|
|
||||||
|
@ -93,6 +81,15 @@ class TestUtils(unittest.TestCase):
|
||||||
|
|
||||||
assert res == 'hello world'
|
assert res == 'hello world'
|
||||||
|
|
||||||
|
def test_template_whitespace(self):
|
||||||
|
vars = {
|
||||||
|
'who': 'world',
|
||||||
|
}
|
||||||
|
|
||||||
|
res = template2.template_from_file("test", "template-whitespace", vars)
|
||||||
|
|
||||||
|
assert res == 'hello world\n'
|
||||||
|
|
||||||
def test_template_unicode(self):
|
def test_template_unicode(self):
|
||||||
vars = {
|
vars = {
|
||||||
'who': u'wórld',
|
'who': u'wórld',
|
||||||
|
|
Loading…
Reference in a new issue