2014-10-02 19:07:05 +02:00
|
|
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
|
|
|
|
#
|
|
|
|
# This file is part of Ansible
|
|
|
|
#
|
|
|
|
# Ansible is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# Ansible is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
2014-10-16 01:18:12 +02:00
|
|
|
|
|
|
|
# Make coding more python3-ish
|
|
|
|
from __future__ import (absolute_import, division, print_function)
|
|
|
|
__metaclass__ = type
|
|
|
|
|
2015-08-28 17:35:43 +02:00
|
|
|
import ast
|
2017-02-13 20:34:29 +01:00
|
|
|
import random
|
|
|
|
import uuid
|
2017-02-10 21:54:53 +01:00
|
|
|
|
2015-09-01 20:20:16 +02:00
|
|
|
from collections import MutableMapping
|
2017-05-30 19:09:44 +02:00
|
|
|
from json import dumps
|
2015-08-28 17:35:43 +02:00
|
|
|
|
2014-11-14 23:14:08 +01:00
|
|
|
|
|
|
|
from ansible import constants as C
|
2017-03-07 19:44:36 +01:00
|
|
|
from ansible.errors import AnsibleError, AnsibleOptionsError
|
2017-05-30 19:09:44 +02:00
|
|
|
from ansible.module_utils.six import iteritems, string_types
|
2016-09-07 07:54:17 +02:00
|
|
|
from ansible.module_utils._text import to_native, to_text
|
2017-05-30 19:09:44 +02:00
|
|
|
from ansible.parsing.splitter import parse_kv
|
2016-01-18 21:08:07 +01:00
|
|
|
|
2017-03-23 21:35:05 +01:00
|
|
|
|
2017-05-30 19:09:44 +02:00
|
|
|
_MAXSIZE = 2 ** 32
|
|
|
|
cur_id = 0
|
|
|
|
node_mac = ("%012x" % uuid.getnode())[:12]
|
2017-02-13 22:53:47 +01:00
|
|
|
random_int = ("%08x" % random.randint(0, _MAXSIZE))[:8]
|
2017-02-13 20:34:29 +01:00
|
|
|
|
2017-03-23 21:35:05 +01:00
|
|
|
|
2017-02-10 21:54:53 +01:00
|
|
|
def get_unique_id():
|
|
|
|
global cur_id
|
|
|
|
cur_id += 1
|
2017-02-13 20:34:29 +01:00
|
|
|
return "-".join([
|
2017-02-13 23:13:40 +01:00
|
|
|
node_mac[0:8],
|
|
|
|
node_mac[8:12],
|
|
|
|
random_int[0:4],
|
|
|
|
random_int[4:8],
|
2017-02-13 20:34:29 +01:00
|
|
|
("%012x" % cur_id)[:12],
|
|
|
|
])
|
2014-11-14 23:14:08 +01:00
|
|
|
|
2017-05-30 19:09:44 +02:00
|
|
|
|
2015-09-01 20:20:16 +02:00
|
|
|
def _validate_mutable_mappings(a, b):
|
|
|
|
"""
|
|
|
|
Internal convenience function to ensure arguments are MutableMappings
|
|
|
|
|
|
|
|
This checks that all arguments are MutableMappings or raises an error
|
|
|
|
|
|
|
|
:raises AnsibleError: if one of the arguments is not a MutableMapping
|
|
|
|
"""
|
|
|
|
|
|
|
|
# If this becomes generally needed, change the signature to operate on
|
|
|
|
# a variable number of arguments instead.
|
|
|
|
|
|
|
|
if not (isinstance(a, MutableMapping) and isinstance(b, MutableMapping)):
|
2016-01-18 21:08:07 +01:00
|
|
|
myvars = []
|
|
|
|
for x in [a, b]:
|
|
|
|
try:
|
|
|
|
myvars.append(dumps(x))
|
|
|
|
except:
|
2016-09-07 07:54:17 +02:00
|
|
|
myvars.append(to_native(x))
|
2016-01-14 16:23:35 +01:00
|
|
|
raise AnsibleError("failed to combine variables, expected dicts but got a '{0}' and a '{1}': \n{2}\n{3}".format(
|
2016-01-18 21:08:07 +01:00
|
|
|
a.__class__.__name__, b.__class__.__name__, myvars[0], myvars[1])
|
2016-01-14 16:23:35 +01:00
|
|
|
)
|
2015-09-01 20:20:16 +02:00
|
|
|
|
2016-09-07 07:54:17 +02:00
|
|
|
|
2014-11-14 23:14:08 +01:00
|
|
|
def combine_vars(a, b):
|
2015-09-01 20:20:16 +02:00
|
|
|
"""
|
|
|
|
Return a copy of dictionaries of variables based on configured hash behavior
|
|
|
|
"""
|
2014-11-14 23:14:08 +01:00
|
|
|
|
|
|
|
if C.DEFAULT_HASH_BEHAVIOUR == "merge":
|
|
|
|
return merge_hash(a, b)
|
|
|
|
else:
|
2015-09-01 20:20:16 +02:00
|
|
|
# HASH_BEHAVIOUR == 'replace'
|
|
|
|
_validate_mutable_mappings(a, b)
|
2015-09-01 08:39:59 +02:00
|
|
|
result = a.copy()
|
|
|
|
result.update(b)
|
|
|
|
return result
|
2014-11-14 23:14:08 +01:00
|
|
|
|
2016-09-07 07:54:17 +02:00
|
|
|
|
2014-11-14 23:14:08 +01:00
|
|
|
def merge_hash(a, b):
|
2015-09-01 20:20:16 +02:00
|
|
|
"""
|
|
|
|
Recursively merges hash b into a so that keys from b take precedence over keys from a
|
|
|
|
"""
|
|
|
|
|
|
|
|
_validate_mutable_mappings(a, b)
|
2016-02-18 15:59:57 +01:00
|
|
|
|
|
|
|
# if a is empty or equal to b, return b
|
|
|
|
if a == {} or a == b:
|
|
|
|
return b.copy()
|
|
|
|
|
|
|
|
# if b is empty the below unfolds quickly
|
2015-09-01 20:20:16 +02:00
|
|
|
result = a.copy()
|
|
|
|
|
|
|
|
# next, iterate over b keys and values
|
|
|
|
for k, v in iteritems(b):
|
|
|
|
# if there's already such key in a
|
|
|
|
# and that key contains a MutableMapping
|
2016-02-26 15:03:51 +01:00
|
|
|
if k in result and isinstance(result[k], MutableMapping) and isinstance(v, MutableMapping):
|
2015-09-01 20:20:16 +02:00
|
|
|
# merge those dicts recursively
|
|
|
|
result[k] = merge_hash(result[k], v)
|
|
|
|
else:
|
|
|
|
# otherwise, just copy the value from b to a
|
|
|
|
result[k] = v
|
2014-11-14 23:14:08 +01:00
|
|
|
|
|
|
|
return result
|
|
|
|
|
2016-09-07 07:54:17 +02:00
|
|
|
|
2015-07-21 06:18:12 +02:00
|
|
|
def load_extra_vars(loader, options):
|
|
|
|
extra_vars = {}
|
2017-06-23 21:56:16 +02:00
|
|
|
if hasattr(options, 'extra_vars'):
|
|
|
|
for extra_vars_opt in options.extra_vars:
|
|
|
|
data = None
|
|
|
|
extra_vars_opt = to_text(extra_vars_opt, errors='surrogate_or_strict')
|
|
|
|
if extra_vars_opt.startswith(u"@"):
|
|
|
|
# Argument is a YAML file (JSON is a subset of YAML)
|
|
|
|
data = loader.load_from_file(extra_vars_opt[1:])
|
|
|
|
elif extra_vars_opt and extra_vars_opt[0] in u'[{':
|
|
|
|
# Arguments as YAML
|
|
|
|
data = loader.load(extra_vars_opt)
|
|
|
|
else:
|
|
|
|
# Arguments as Key-value
|
|
|
|
data = parse_kv(extra_vars_opt)
|
|
|
|
|
|
|
|
if isinstance(data, MutableMapping):
|
|
|
|
extra_vars = combine_vars(extra_vars, data)
|
|
|
|
else:
|
|
|
|
raise AnsibleOptionsError("Invalid extra vars data supplied. '%s' could not be made into a dictionary" % extra_vars_opt)
|
2017-02-27 15:06:31 +01:00
|
|
|
|
2015-07-21 06:18:12 +02:00
|
|
|
return extra_vars
|
2015-08-28 17:35:43 +02:00
|
|
|
|
2016-09-07 07:54:17 +02:00
|
|
|
|
2017-05-23 23:16:49 +02:00
|
|
|
def load_options_vars(options, version):
|
2017-09-17 18:25:56 +02:00
|
|
|
|
|
|
|
options_vars = {'ansible_version': version}
|
|
|
|
aliases = {'check': 'check_mode',
|
|
|
|
'diff': 'diff_mode',
|
|
|
|
'inventory': 'inventory_sources',
|
|
|
|
'subset': 'limit',
|
|
|
|
'tags': 'run_tags'}
|
|
|
|
|
|
|
|
for attr in ('check', 'diff', 'forks', 'inventory', 'skip_tags', 'subset', 'tags'):
|
|
|
|
opt = getattr(options, attr, None)
|
|
|
|
if opt is not None:
|
|
|
|
options_vars['ansible_%s' % aliases.get(attr, attr)] = opt
|
|
|
|
|
2016-01-20 19:47:09 +01:00
|
|
|
return options_vars
|
|
|
|
|
2016-09-07 07:54:17 +02:00
|
|
|
|
2015-08-28 17:35:43 +02:00
|
|
|
def isidentifier(ident):
|
|
|
|
"""
|
|
|
|
Determines, if string is valid Python identifier using the ast module.
|
2017-06-12 08:55:19 +02:00
|
|
|
Originally posted at: http://stackoverflow.com/a/29586366
|
2015-08-28 17:35:43 +02:00
|
|
|
"""
|
|
|
|
|
|
|
|
if not isinstance(ident, string_types):
|
|
|
|
return False
|
|
|
|
|
|
|
|
try:
|
|
|
|
root = ast.parse(ident)
|
|
|
|
except SyntaxError:
|
|
|
|
return False
|
|
|
|
|
|
|
|
if not isinstance(root, ast.Module):
|
|
|
|
return False
|
|
|
|
|
|
|
|
if len(root.body) != 1:
|
|
|
|
return False
|
|
|
|
|
|
|
|
if not isinstance(root.body[0], ast.Expr):
|
|
|
|
return False
|
|
|
|
|
|
|
|
if not isinstance(root.body[0].value, ast.Name):
|
|
|
|
return False
|
|
|
|
|
|
|
|
if root.body[0].value.id != ident:
|
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|