2013-10-31 21:52:37 +01:00
|
|
|
# This code is part of Ansible, but is an independent component.
|
|
|
|
# This particular file snippet, and this file snippet only, is BSD licensed.
|
|
|
|
# Modules you write using this snippet, which is embedded dynamically by Ansible
|
|
|
|
# still belong to the author of the module, and may assign their own license
|
|
|
|
# to the complete work.
|
|
|
|
#
|
|
|
|
# Copyright (c), Michael DeHaan <michael.dehaan@gmail.com>, 2012-2013
|
|
|
|
# All rights reserved.
|
|
|
|
#
|
|
|
|
# Redistribution and use in source and binary forms, with or without modification,
|
|
|
|
# are permitted provided that the following conditions are met:
|
|
|
|
#
|
|
|
|
# * Redistributions of source code must retain the above copyright
|
|
|
|
# notice, this list of conditions and the following disclaimer.
|
|
|
|
# * Redistributions in binary form must reproduce the above copyright notice,
|
|
|
|
# this list of conditions and the following disclaimer in the documentation
|
|
|
|
# and/or other materials provided with the distribution.
|
|
|
|
#
|
|
|
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
|
|
|
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
|
|
|
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
|
|
|
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
|
|
|
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
|
|
|
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
|
|
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
|
|
|
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
|
|
|
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
|
|
#
|
|
|
|
|
|
|
|
# == BEGIN DYNAMICALLY INSERTED CODE ==
|
|
|
|
|
|
|
|
MODULE_ARGS = "<<INCLUDE_ANSIBLE_MODULE_ARGS>>"
|
|
|
|
MODULE_COMPLEX_ARGS = "<<INCLUDE_ANSIBLE_MODULE_COMPLEX_ARGS>>"
|
|
|
|
|
|
|
|
BOOLEANS_TRUE = ['yes', 'on', '1', 'true', 1]
|
|
|
|
BOOLEANS_FALSE = ['no', 'off', '0', 'false', 0]
|
|
|
|
BOOLEANS = BOOLEANS_TRUE + BOOLEANS_FALSE
|
|
|
|
|
|
|
|
# ansible modules can be written in any language. To simplify
|
|
|
|
# development of Python modules, the functions available here
|
|
|
|
# can be inserted in any module source automatically by including
|
|
|
|
# #<<INCLUDE_ANSIBLE_MODULE_COMMON>> on a blank line by itself inside
|
|
|
|
# of an ansible module. The source of this common code lives
|
|
|
|
# in lib/ansible/module_common.py
|
|
|
|
|
2014-05-19 17:26:06 +02:00
|
|
|
import locale
|
2013-10-31 21:52:37 +01:00
|
|
|
import os
|
|
|
|
import re
|
2014-03-06 20:33:18 +01:00
|
|
|
import pipes
|
2013-10-31 21:52:37 +01:00
|
|
|
import shlex
|
|
|
|
import subprocess
|
|
|
|
import sys
|
|
|
|
import syslog
|
|
|
|
import types
|
|
|
|
import time
|
|
|
|
import shutil
|
|
|
|
import stat
|
2014-03-20 11:12:58 +01:00
|
|
|
import tempfile
|
2013-10-31 21:52:37 +01:00
|
|
|
import traceback
|
|
|
|
import grp
|
|
|
|
import pwd
|
|
|
|
import platform
|
|
|
|
import errno
|
2014-03-10 22:06:52 +01:00
|
|
|
import tempfile
|
2013-10-31 21:52:37 +01:00
|
|
|
|
|
|
|
try:
|
|
|
|
import json
|
|
|
|
except ImportError:
|
|
|
|
try:
|
|
|
|
import simplejson as json
|
|
|
|
except ImportError:
|
|
|
|
sys.stderr.write('Error: ansible requires a json module, none found!')
|
|
|
|
sys.exit(1)
|
|
|
|
except SyntaxError:
|
|
|
|
sys.stderr.write('SyntaxError: probably due to json and python being for different versions')
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
HAVE_SELINUX=False
|
|
|
|
try:
|
|
|
|
import selinux
|
|
|
|
HAVE_SELINUX=True
|
|
|
|
except ImportError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
HAVE_HASHLIB=False
|
|
|
|
try:
|
|
|
|
from hashlib import md5 as _md5
|
|
|
|
HAVE_HASHLIB=True
|
|
|
|
except ImportError:
|
|
|
|
from md5 import md5 as _md5
|
|
|
|
|
|
|
|
try:
|
|
|
|
from hashlib import sha256 as _sha256
|
|
|
|
except ImportError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
try:
|
2013-11-01 00:47:05 +01:00
|
|
|
from systemd import journal
|
|
|
|
has_journal = True
|
2013-10-31 21:52:37 +01:00
|
|
|
except ImportError:
|
2013-11-01 00:47:05 +01:00
|
|
|
import syslog
|
|
|
|
has_journal = False
|
2013-10-31 21:52:37 +01:00
|
|
|
|
2014-04-08 16:05:07 +02:00
|
|
|
try:
|
|
|
|
from ast import literal_eval as _literal_eval
|
|
|
|
except ImportError:
|
|
|
|
# a replacement for literal_eval that works with python 2.4. from:
|
|
|
|
# https://mail.python.org/pipermail/python-list/2009-September/551880.html
|
|
|
|
# which is essentially a cut/past from an earlier (2.6) version of python's
|
|
|
|
# ast.py
|
|
|
|
from compiler import parse
|
|
|
|
from compiler.ast import *
|
|
|
|
def _literal_eval(node_or_string):
|
|
|
|
"""
|
|
|
|
Safely evaluate an expression node or a string containing a Python
|
|
|
|
expression. The string or node provided may only consist of the following
|
|
|
|
Python literal structures: strings, numbers, tuples, lists, dicts, booleans,
|
|
|
|
and None.
|
|
|
|
"""
|
|
|
|
_safe_names = {'None': None, 'True': True, 'False': False}
|
|
|
|
if isinstance(node_or_string, basestring):
|
|
|
|
node_or_string = parse(node_or_string, mode='eval')
|
|
|
|
if isinstance(node_or_string, Expression):
|
|
|
|
node_or_string = node_or_string.node
|
|
|
|
def _convert(node):
|
|
|
|
if isinstance(node, Const) and isinstance(node.value, (basestring, int, float, long, complex)):
|
|
|
|
return node.value
|
|
|
|
elif isinstance(node, Tuple):
|
|
|
|
return tuple(map(_convert, node.nodes))
|
|
|
|
elif isinstance(node, List):
|
|
|
|
return list(map(_convert, node.nodes))
|
|
|
|
elif isinstance(node, Dict):
|
|
|
|
return dict((_convert(k), _convert(v)) for k, v in node.items)
|
|
|
|
elif isinstance(node, Name):
|
|
|
|
if node.name in _safe_names:
|
|
|
|
return _safe_names[node.name]
|
|
|
|
elif isinstance(node, UnarySub):
|
|
|
|
return -_convert(node.expr)
|
|
|
|
raise ValueError('malformed string')
|
|
|
|
return _convert(node_or_string)
|
|
|
|
|
2013-10-31 21:52:37 +01:00
|
|
|
FILE_COMMON_ARGUMENTS=dict(
|
|
|
|
src = dict(),
|
|
|
|
mode = dict(),
|
|
|
|
owner = dict(),
|
|
|
|
group = dict(),
|
|
|
|
seuser = dict(),
|
|
|
|
serole = dict(),
|
|
|
|
selevel = dict(),
|
|
|
|
setype = dict(),
|
|
|
|
# not taken by the file module, but other modules call file so it must ignore them.
|
|
|
|
content = dict(),
|
|
|
|
backup = dict(),
|
|
|
|
force = dict(),
|
2014-02-24 21:24:25 +01:00
|
|
|
remote_src = dict(), # used by assemble
|
2014-04-03 23:29:15 +02:00
|
|
|
delimiter = dict(), # used by assemble
|
2014-04-02 23:41:11 +02:00
|
|
|
directory_mode = dict(), # used by copy
|
2013-10-31 21:52:37 +01:00
|
|
|
)
|
|
|
|
|
2014-03-10 22:06:52 +01:00
|
|
|
|
2013-10-31 21:52:37 +01:00
|
|
|
def get_platform():
|
|
|
|
''' what's the platform? example: Linux is a platform. '''
|
|
|
|
return platform.system()
|
|
|
|
|
|
|
|
def get_distribution():
|
|
|
|
''' return the distribution name '''
|
|
|
|
if platform.system() == 'Linux':
|
|
|
|
try:
|
|
|
|
distribution = platform.linux_distribution()[0].capitalize()
|
2013-12-05 17:36:54 +01:00
|
|
|
if not distribution and os.path.isfile('/etc/system-release'):
|
2013-11-28 01:31:25 +01:00
|
|
|
distribution = platform.linux_distribution(supported_dists=['system'])[0].capitalize()
|
|
|
|
if 'Amazon' in distribution:
|
|
|
|
distribution = 'Amazon'
|
|
|
|
else:
|
2013-10-31 21:52:37 +01:00
|
|
|
distribution = 'OtherLinux'
|
|
|
|
except:
|
|
|
|
# FIXME: MethodMissing, I assume?
|
|
|
|
distribution = platform.dist()[0].capitalize()
|
|
|
|
else:
|
|
|
|
distribution = None
|
|
|
|
return distribution
|
|
|
|
|
2014-06-16 15:06:50 +02:00
|
|
|
def get_distribution_version():
|
|
|
|
''' return the distribution version '''
|
|
|
|
if platform.system() == 'Linux':
|
|
|
|
try:
|
|
|
|
distribution_version = platform.linux_distribution()[1]
|
2014-07-15 18:04:46 +02:00
|
|
|
if not distribution_version and os.path.isfile('/etc/system-release'):
|
|
|
|
distribution_version = platform.linux_distribution(supported_dists=['system'])[1]
|
2014-06-16 15:06:50 +02:00
|
|
|
except:
|
|
|
|
# FIXME: MethodMissing, I assume?
|
|
|
|
distribution_version = platform.dist()[1]
|
|
|
|
else:
|
|
|
|
distribution_version = None
|
|
|
|
return distribution_version
|
|
|
|
|
2013-10-31 21:52:37 +01:00
|
|
|
def load_platform_subclass(cls, *args, **kwargs):
|
|
|
|
'''
|
|
|
|
used by modules like User to have different implementations based on detected platform. See User
|
|
|
|
module for an example.
|
|
|
|
'''
|
|
|
|
|
|
|
|
this_platform = get_platform()
|
|
|
|
distribution = get_distribution()
|
|
|
|
subclass = None
|
|
|
|
|
|
|
|
# get the most specific superclass for this platform
|
|
|
|
if distribution is not None:
|
|
|
|
for sc in cls.__subclasses__():
|
|
|
|
if sc.distribution is not None and sc.distribution == distribution and sc.platform == this_platform:
|
|
|
|
subclass = sc
|
|
|
|
if subclass is None:
|
|
|
|
for sc in cls.__subclasses__():
|
|
|
|
if sc.platform == this_platform and sc.distribution is None:
|
|
|
|
subclass = sc
|
|
|
|
if subclass is None:
|
|
|
|
subclass = cls
|
|
|
|
|
|
|
|
return super(cls, subclass).__new__(subclass)
|
|
|
|
|
|
|
|
|
|
|
|
class AnsibleModule(object):
|
|
|
|
|
|
|
|
def __init__(self, argument_spec, bypass_checks=False, no_log=False,
|
|
|
|
check_invalid_arguments=True, mutually_exclusive=None, required_together=None,
|
|
|
|
required_one_of=None, add_file_common_args=False, supports_check_mode=False):
|
|
|
|
|
|
|
|
'''
|
|
|
|
common code for quickly building an ansible module in Python
|
|
|
|
(although you can write modules in anything that can return JSON)
|
|
|
|
see library/* for examples
|
|
|
|
'''
|
|
|
|
|
|
|
|
self.argument_spec = argument_spec
|
|
|
|
self.supports_check_mode = supports_check_mode
|
|
|
|
self.check_mode = False
|
2014-01-31 23:09:10 +01:00
|
|
|
self.no_log = no_log
|
2014-05-13 20:52:38 +02:00
|
|
|
self.cleanup_files = []
|
2013-10-31 21:52:37 +01:00
|
|
|
|
|
|
|
self.aliases = {}
|
|
|
|
|
|
|
|
if add_file_common_args:
|
|
|
|
for k, v in FILE_COMMON_ARGUMENTS.iteritems():
|
|
|
|
if k not in self.argument_spec:
|
|
|
|
self.argument_spec[k] = v
|
|
|
|
|
2014-05-19 17:26:06 +02:00
|
|
|
# check the locale as set by the current environment, and
|
|
|
|
# reset to LANG=C if it's an invalid/unavailable locale
|
|
|
|
self._check_locale()
|
|
|
|
|
2013-10-31 21:52:37 +01:00
|
|
|
(self.params, self.args) = self._load_params()
|
|
|
|
|
2014-03-10 22:06:52 +01:00
|
|
|
self._legal_inputs = ['CHECKMODE', 'NO_LOG']
|
2013-10-31 21:52:37 +01:00
|
|
|
|
|
|
|
self.aliases = self._handle_aliases()
|
|
|
|
|
|
|
|
if check_invalid_arguments:
|
|
|
|
self._check_invalid_arguments()
|
|
|
|
self._check_for_check_mode()
|
2014-01-31 23:09:10 +01:00
|
|
|
self._check_for_no_log()
|
2013-10-31 21:52:37 +01:00
|
|
|
|
2014-02-07 19:42:08 +01:00
|
|
|
# check exclusive early
|
|
|
|
if not bypass_checks:
|
|
|
|
self._check_mutually_exclusive(mutually_exclusive)
|
|
|
|
|
2013-10-31 21:52:37 +01:00
|
|
|
self._set_defaults(pre=True)
|
|
|
|
|
|
|
|
if not bypass_checks:
|
|
|
|
self._check_required_arguments()
|
|
|
|
self._check_argument_values()
|
|
|
|
self._check_argument_types()
|
|
|
|
self._check_required_together(required_together)
|
|
|
|
self._check_required_one_of(required_one_of)
|
|
|
|
|
|
|
|
self._set_defaults(pre=False)
|
2014-01-31 23:09:10 +01:00
|
|
|
if not self.no_log:
|
2013-10-31 21:52:37 +01:00
|
|
|
self._log_invocation()
|
|
|
|
|
2014-03-18 16:17:44 +01:00
|
|
|
# finally, make sure we're in a sane working dir
|
|
|
|
self._set_cwd()
|
|
|
|
|
2013-10-31 21:52:37 +01:00
|
|
|
def load_file_common_arguments(self, params):
|
|
|
|
'''
|
|
|
|
many modules deal with files, this encapsulates common
|
|
|
|
options that the file module accepts such that it is directly
|
|
|
|
available to all modules and they can share code.
|
|
|
|
'''
|
|
|
|
|
|
|
|
path = params.get('path', params.get('dest', None))
|
|
|
|
if path is None:
|
|
|
|
return {}
|
|
|
|
else:
|
|
|
|
path = os.path.expanduser(path)
|
|
|
|
|
|
|
|
mode = params.get('mode', None)
|
|
|
|
owner = params.get('owner', None)
|
|
|
|
group = params.get('group', None)
|
|
|
|
|
|
|
|
# selinux related options
|
|
|
|
seuser = params.get('seuser', None)
|
|
|
|
serole = params.get('serole', None)
|
|
|
|
setype = params.get('setype', None)
|
|
|
|
selevel = params.get('selevel', None)
|
|
|
|
secontext = [seuser, serole, setype]
|
|
|
|
|
|
|
|
if self.selinux_mls_enabled():
|
|
|
|
secontext.append(selevel)
|
|
|
|
|
|
|
|
default_secontext = self.selinux_default_context(path)
|
|
|
|
for i in range(len(default_secontext)):
|
|
|
|
if i is not None and secontext[i] == '_default':
|
|
|
|
secontext[i] = default_secontext[i]
|
|
|
|
|
|
|
|
return dict(
|
|
|
|
path=path, mode=mode, owner=owner, group=group,
|
|
|
|
seuser=seuser, serole=serole, setype=setype,
|
|
|
|
selevel=selevel, secontext=secontext,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# Detect whether using selinux that is MLS-aware.
|
|
|
|
# While this means you can set the level/range with
|
|
|
|
# selinux.lsetfilecon(), it may or may not mean that you
|
|
|
|
# will get the selevel as part of the context returned
|
|
|
|
# by selinux.lgetfilecon().
|
|
|
|
|
|
|
|
def selinux_mls_enabled(self):
|
|
|
|
if not HAVE_SELINUX:
|
|
|
|
return False
|
|
|
|
if selinux.is_selinux_mls_enabled() == 1:
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
return False
|
|
|
|
|
|
|
|
def selinux_enabled(self):
|
|
|
|
if not HAVE_SELINUX:
|
|
|
|
seenabled = self.get_bin_path('selinuxenabled')
|
|
|
|
if seenabled is not None:
|
|
|
|
(rc,out,err) = self.run_command(seenabled)
|
|
|
|
if rc == 0:
|
|
|
|
self.fail_json(msg="Aborting, target uses selinux but python bindings (libselinux-python) aren't installed!")
|
|
|
|
return False
|
|
|
|
if selinux.is_selinux_enabled() == 1:
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
return False
|
|
|
|
|
|
|
|
# Determine whether we need a placeholder for selevel/mls
|
|
|
|
def selinux_initial_context(self):
|
|
|
|
context = [None, None, None]
|
|
|
|
if self.selinux_mls_enabled():
|
|
|
|
context.append(None)
|
|
|
|
return context
|
|
|
|
|
|
|
|
def _to_filesystem_str(self, path):
|
|
|
|
'''Returns filesystem path as a str, if it wasn't already.
|
|
|
|
|
|
|
|
Used in selinux interactions because it cannot accept unicode
|
|
|
|
instances, and specifying complex args in a playbook leaves
|
|
|
|
you with unicode instances. This method currently assumes
|
|
|
|
that your filesystem encoding is UTF-8.
|
|
|
|
|
|
|
|
'''
|
|
|
|
if isinstance(path, unicode):
|
|
|
|
path = path.encode("utf-8")
|
|
|
|
return path
|
|
|
|
|
|
|
|
# If selinux fails to find a default, return an array of None
|
|
|
|
def selinux_default_context(self, path, mode=0):
|
|
|
|
context = self.selinux_initial_context()
|
|
|
|
if not HAVE_SELINUX or not self.selinux_enabled():
|
|
|
|
return context
|
|
|
|
try:
|
|
|
|
ret = selinux.matchpathcon(self._to_filesystem_str(path), mode)
|
|
|
|
except OSError:
|
|
|
|
return context
|
|
|
|
if ret[0] == -1:
|
|
|
|
return context
|
|
|
|
# Limit split to 4 because the selevel, the last in the list,
|
|
|
|
# may contain ':' characters
|
|
|
|
context = ret[1].split(':', 3)
|
|
|
|
return context
|
|
|
|
|
|
|
|
def selinux_context(self, path):
|
|
|
|
context = self.selinux_initial_context()
|
|
|
|
if not HAVE_SELINUX or not self.selinux_enabled():
|
|
|
|
return context
|
|
|
|
try:
|
|
|
|
ret = selinux.lgetfilecon_raw(self._to_filesystem_str(path))
|
|
|
|
except OSError, e:
|
|
|
|
if e.errno == errno.ENOENT:
|
|
|
|
self.fail_json(path=path, msg='path %s does not exist' % path)
|
|
|
|
else:
|
|
|
|
self.fail_json(path=path, msg='failed to retrieve selinux context')
|
|
|
|
if ret[0] == -1:
|
|
|
|
return context
|
|
|
|
# Limit split to 4 because the selevel, the last in the list,
|
|
|
|
# may contain ':' characters
|
|
|
|
context = ret[1].split(':', 3)
|
|
|
|
return context
|
|
|
|
|
|
|
|
def user_and_group(self, filename):
|
|
|
|
filename = os.path.expanduser(filename)
|
|
|
|
st = os.lstat(filename)
|
|
|
|
uid = st.st_uid
|
|
|
|
gid = st.st_gid
|
|
|
|
return (uid, gid)
|
|
|
|
|
2014-04-17 23:16:54 +02:00
|
|
|
def find_mount_point(self, path):
|
|
|
|
path = os.path.abspath(os.path.expanduser(os.path.expandvars(path)))
|
|
|
|
while not os.path.ismount(path):
|
|
|
|
path = os.path.dirname(path)
|
|
|
|
return path
|
|
|
|
|
|
|
|
def is_nfs_path(self, path):
|
|
|
|
"""
|
|
|
|
Returns a tuple containing (True, selinux_context) if the given path
|
|
|
|
is on a NFS mount point, otherwise the return will be (False, None).
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
f = open('/proc/mounts', 'r')
|
|
|
|
mount_data = f.readlines()
|
|
|
|
f.close()
|
|
|
|
except:
|
|
|
|
return (False, None)
|
|
|
|
path_mount_point = self.find_mount_point(path)
|
|
|
|
for line in mount_data:
|
|
|
|
(device, mount_point, fstype, options, rest) = line.split(' ', 4)
|
|
|
|
if path_mount_point == mount_point and 'nfs' in fstype:
|
|
|
|
nfs_context = self.selinux_context(path_mount_point)
|
|
|
|
return (True, nfs_context)
|
|
|
|
return (False, None)
|
|
|
|
|
2013-10-31 21:52:37 +01:00
|
|
|
def set_default_selinux_context(self, path, changed):
|
|
|
|
if not HAVE_SELINUX or not self.selinux_enabled():
|
|
|
|
return changed
|
|
|
|
context = self.selinux_default_context(path)
|
|
|
|
return self.set_context_if_different(path, context, False)
|
|
|
|
|
|
|
|
def set_context_if_different(self, path, context, changed):
|
|
|
|
|
|
|
|
if not HAVE_SELINUX or not self.selinux_enabled():
|
|
|
|
return changed
|
|
|
|
cur_context = self.selinux_context(path)
|
|
|
|
new_context = list(cur_context)
|
|
|
|
# Iterate over the current context instead of the
|
|
|
|
# argument context, which may have selevel.
|
|
|
|
|
2014-04-17 23:16:54 +02:00
|
|
|
(is_nfs, nfs_context) = self.is_nfs_path(path)
|
|
|
|
if is_nfs:
|
|
|
|
new_context = nfs_context
|
|
|
|
else:
|
|
|
|
for i in range(len(cur_context)):
|
|
|
|
if len(context) > i:
|
|
|
|
if context[i] is not None and context[i] != cur_context[i]:
|
|
|
|
new_context[i] = context[i]
|
|
|
|
if context[i] is None:
|
|
|
|
new_context[i] = cur_context[i]
|
2013-11-08 19:17:02 +01:00
|
|
|
|
2013-10-31 21:52:37 +01:00
|
|
|
if cur_context != new_context:
|
|
|
|
try:
|
|
|
|
if self.check_mode:
|
|
|
|
return True
|
|
|
|
rc = selinux.lsetfilecon(self._to_filesystem_str(path),
|
|
|
|
str(':'.join(new_context)))
|
|
|
|
except OSError:
|
|
|
|
self.fail_json(path=path, msg='invalid selinux context', new_context=new_context, cur_context=cur_context, input_was=context)
|
|
|
|
if rc != 0:
|
|
|
|
self.fail_json(path=path, msg='set selinux context failed')
|
|
|
|
changed = True
|
|
|
|
return changed
|
|
|
|
|
|
|
|
def set_owner_if_different(self, path, owner, changed):
|
|
|
|
path = os.path.expanduser(path)
|
|
|
|
if owner is None:
|
|
|
|
return changed
|
|
|
|
orig_uid, orig_gid = self.user_and_group(path)
|
|
|
|
try:
|
|
|
|
uid = int(owner)
|
|
|
|
except ValueError:
|
|
|
|
try:
|
|
|
|
uid = pwd.getpwnam(owner).pw_uid
|
|
|
|
except KeyError:
|
|
|
|
self.fail_json(path=path, msg='chown failed: failed to look up user %s' % owner)
|
|
|
|
if orig_uid != uid:
|
|
|
|
if self.check_mode:
|
|
|
|
return True
|
|
|
|
try:
|
|
|
|
os.lchown(path, uid, -1)
|
|
|
|
except OSError:
|
|
|
|
self.fail_json(path=path, msg='chown failed')
|
|
|
|
changed = True
|
|
|
|
return changed
|
|
|
|
|
|
|
|
def set_group_if_different(self, path, group, changed):
|
|
|
|
path = os.path.expanduser(path)
|
|
|
|
if group is None:
|
|
|
|
return changed
|
|
|
|
orig_uid, orig_gid = self.user_and_group(path)
|
|
|
|
try:
|
|
|
|
gid = int(group)
|
|
|
|
except ValueError:
|
|
|
|
try:
|
|
|
|
gid = grp.getgrnam(group).gr_gid
|
|
|
|
except KeyError:
|
|
|
|
self.fail_json(path=path, msg='chgrp failed: failed to look up group %s' % group)
|
|
|
|
if orig_gid != gid:
|
|
|
|
if self.check_mode:
|
|
|
|
return True
|
|
|
|
try:
|
|
|
|
os.lchown(path, -1, gid)
|
|
|
|
except OSError:
|
|
|
|
self.fail_json(path=path, msg='chgrp failed')
|
|
|
|
changed = True
|
|
|
|
return changed
|
|
|
|
|
|
|
|
def set_mode_if_different(self, path, mode, changed):
|
|
|
|
path = os.path.expanduser(path)
|
|
|
|
if mode is None:
|
|
|
|
return changed
|
|
|
|
try:
|
|
|
|
# FIXME: support English modes
|
|
|
|
if not isinstance(mode, int):
|
|
|
|
mode = int(mode, 8)
|
|
|
|
except Exception, e:
|
|
|
|
self.fail_json(path=path, msg='mode needs to be something octalish', details=str(e))
|
|
|
|
|
|
|
|
st = os.lstat(path)
|
|
|
|
prev_mode = stat.S_IMODE(st[stat.ST_MODE])
|
|
|
|
|
|
|
|
if prev_mode != mode:
|
|
|
|
if self.check_mode:
|
|
|
|
return True
|
|
|
|
# FIXME: comparison against string above will cause this to be executed
|
|
|
|
# every time
|
|
|
|
try:
|
|
|
|
if 'lchmod' in dir(os):
|
|
|
|
os.lchmod(path, mode)
|
|
|
|
else:
|
|
|
|
os.chmod(path, mode)
|
|
|
|
except OSError, e:
|
|
|
|
if os.path.islink(path) and e.errno == errno.EPERM: # Can't set mode on symbolic links
|
|
|
|
pass
|
|
|
|
elif e.errno == errno.ENOENT: # Can't set mode on broken symbolic links
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
raise e
|
|
|
|
except Exception, e:
|
|
|
|
self.fail_json(path=path, msg='chmod failed', details=str(e))
|
|
|
|
|
|
|
|
st = os.lstat(path)
|
|
|
|
new_mode = stat.S_IMODE(st[stat.ST_MODE])
|
|
|
|
|
|
|
|
if new_mode != prev_mode:
|
|
|
|
changed = True
|
|
|
|
return changed
|
|
|
|
|
2014-03-14 04:07:35 +01:00
|
|
|
def set_fs_attributes_if_different(self, file_args, changed):
|
2013-10-31 21:52:37 +01:00
|
|
|
# set modes owners and context as needed
|
|
|
|
changed = self.set_context_if_different(
|
|
|
|
file_args['path'], file_args['secontext'], changed
|
|
|
|
)
|
|
|
|
changed = self.set_owner_if_different(
|
|
|
|
file_args['path'], file_args['owner'], changed
|
|
|
|
)
|
|
|
|
changed = self.set_group_if_different(
|
|
|
|
file_args['path'], file_args['group'], changed
|
|
|
|
)
|
|
|
|
changed = self.set_mode_if_different(
|
|
|
|
file_args['path'], file_args['mode'], changed
|
|
|
|
)
|
|
|
|
return changed
|
|
|
|
|
|
|
|
def set_directory_attributes_if_different(self, file_args, changed):
|
2014-03-14 04:07:35 +01:00
|
|
|
return self.set_fs_attributes_if_different(file_args, changed)
|
|
|
|
|
|
|
|
def set_file_attributes_if_different(self, file_args, changed):
|
|
|
|
return self.set_fs_attributes_if_different(file_args, changed)
|
2013-10-31 21:52:37 +01:00
|
|
|
|
|
|
|
def add_path_info(self, kwargs):
|
|
|
|
'''
|
|
|
|
for results that are files, supplement the info about the file
|
|
|
|
in the return path with stats about the file path.
|
|
|
|
'''
|
|
|
|
|
|
|
|
path = kwargs.get('path', kwargs.get('dest', None))
|
|
|
|
if path is None:
|
|
|
|
return kwargs
|
|
|
|
if os.path.exists(path):
|
|
|
|
(uid, gid) = self.user_and_group(path)
|
|
|
|
kwargs['uid'] = uid
|
|
|
|
kwargs['gid'] = gid
|
|
|
|
try:
|
|
|
|
user = pwd.getpwuid(uid)[0]
|
|
|
|
except KeyError:
|
|
|
|
user = str(uid)
|
|
|
|
try:
|
|
|
|
group = grp.getgrgid(gid)[0]
|
|
|
|
except KeyError:
|
|
|
|
group = str(gid)
|
|
|
|
kwargs['owner'] = user
|
|
|
|
kwargs['group'] = group
|
|
|
|
st = os.lstat(path)
|
|
|
|
kwargs['mode'] = oct(stat.S_IMODE(st[stat.ST_MODE]))
|
|
|
|
# secontext not yet supported
|
|
|
|
if os.path.islink(path):
|
|
|
|
kwargs['state'] = 'link'
|
|
|
|
elif os.path.isdir(path):
|
|
|
|
kwargs['state'] = 'directory'
|
2013-11-01 14:41:22 +01:00
|
|
|
elif os.stat(path).st_nlink > 1:
|
|
|
|
kwargs['state'] = 'hard'
|
2013-10-31 21:52:37 +01:00
|
|
|
else:
|
|
|
|
kwargs['state'] = 'file'
|
|
|
|
if HAVE_SELINUX and self.selinux_enabled():
|
|
|
|
kwargs['secontext'] = ':'.join(self.selinux_context(path))
|
|
|
|
kwargs['size'] = st[stat.ST_SIZE]
|
|
|
|
else:
|
|
|
|
kwargs['state'] = 'absent'
|
|
|
|
return kwargs
|
|
|
|
|
2014-05-19 17:26:06 +02:00
|
|
|
def _check_locale(self):
|
|
|
|
'''
|
|
|
|
Uses the locale module to test the currently set locale
|
|
|
|
(per the LANG and LC_CTYPE environment settings)
|
|
|
|
'''
|
|
|
|
try:
|
|
|
|
# setting the locale to '' uses the default locale
|
|
|
|
# as it would be returned by locale.getdefaultlocale()
|
|
|
|
locale.setlocale(locale.LC_ALL, '')
|
|
|
|
except locale.Error, e:
|
|
|
|
# fallback to the 'C' locale, which may cause unicode
|
|
|
|
# issues but is preferable to simply failing because
|
|
|
|
# of an unknown locale
|
|
|
|
locale.setlocale(locale.LC_ALL, 'C')
|
2014-05-20 06:22:14 +02:00
|
|
|
os.environ['LANG'] = 'C'
|
|
|
|
os.environ['LC_CTYPE'] = 'C'
|
2014-05-19 17:26:06 +02:00
|
|
|
except Exception, e:
|
|
|
|
self.fail_json(msg="An unknown error was encountered while attempting to validate the locale: %s" % e)
|
2013-10-31 21:52:37 +01:00
|
|
|
|
|
|
|
def _handle_aliases(self):
|
|
|
|
aliases_results = {} #alias:canon
|
|
|
|
for (k,v) in self.argument_spec.iteritems():
|
|
|
|
self._legal_inputs.append(k)
|
|
|
|
aliases = v.get('aliases', None)
|
|
|
|
default = v.get('default', None)
|
|
|
|
required = v.get('required', False)
|
|
|
|
if default is not None and required:
|
|
|
|
# not alias specific but this is a good place to check this
|
|
|
|
self.fail_json(msg="internal error: required and default are mutally exclusive for %s" % k)
|
|
|
|
if aliases is None:
|
|
|
|
continue
|
|
|
|
if type(aliases) != list:
|
|
|
|
self.fail_json(msg='internal error: aliases must be a list')
|
|
|
|
for alias in aliases:
|
|
|
|
self._legal_inputs.append(alias)
|
|
|
|
aliases_results[alias] = k
|
|
|
|
if alias in self.params:
|
|
|
|
self.params[k] = self.params[alias]
|
|
|
|
|
|
|
|
return aliases_results
|
|
|
|
|
|
|
|
def _check_for_check_mode(self):
|
|
|
|
for (k,v) in self.params.iteritems():
|
|
|
|
if k == 'CHECKMODE':
|
|
|
|
if not self.supports_check_mode:
|
|
|
|
self.exit_json(skipped=True, msg="remote module does not support check mode")
|
|
|
|
if self.supports_check_mode:
|
|
|
|
self.check_mode = True
|
|
|
|
|
2014-01-31 23:09:10 +01:00
|
|
|
def _check_for_no_log(self):
|
|
|
|
for (k,v) in self.params.iteritems():
|
|
|
|
if k == 'NO_LOG':
|
|
|
|
self.no_log = self.boolean(v)
|
|
|
|
|
2013-10-31 21:52:37 +01:00
|
|
|
def _check_invalid_arguments(self):
|
|
|
|
for (k,v) in self.params.iteritems():
|
2014-03-10 22:06:52 +01:00
|
|
|
# these should be in legal inputs already
|
|
|
|
#if k in ('CHECKMODE', 'NO_LOG'):
|
|
|
|
# continue
|
2013-10-31 21:52:37 +01:00
|
|
|
if k not in self._legal_inputs:
|
|
|
|
self.fail_json(msg="unsupported parameter for module: %s" % k)
|
|
|
|
|
|
|
|
def _count_terms(self, check):
|
|
|
|
count = 0
|
|
|
|
for term in check:
|
2013-11-01 00:47:05 +01:00
|
|
|
if term in self.params:
|
|
|
|
count += 1
|
2013-10-31 21:52:37 +01:00
|
|
|
return count
|
|
|
|
|
|
|
|
def _check_mutually_exclusive(self, spec):
|
|
|
|
if spec is None:
|
|
|
|
return
|
|
|
|
for check in spec:
|
|
|
|
count = self._count_terms(check)
|
|
|
|
if count > 1:
|
|
|
|
self.fail_json(msg="parameters are mutually exclusive: %s" % check)
|
|
|
|
|
|
|
|
def _check_required_one_of(self, spec):
|
|
|
|
if spec is None:
|
|
|
|
return
|
|
|
|
for check in spec:
|
|
|
|
count = self._count_terms(check)
|
|
|
|
if count == 0:
|
|
|
|
self.fail_json(msg="one of the following is required: %s" % ','.join(check))
|
|
|
|
|
|
|
|
def _check_required_together(self, spec):
|
|
|
|
if spec is None:
|
|
|
|
return
|
|
|
|
for check in spec:
|
|
|
|
counts = [ self._count_terms([field]) for field in check ]
|
|
|
|
non_zero = [ c for c in counts if c > 0 ]
|
|
|
|
if len(non_zero) > 0:
|
|
|
|
if 0 in counts:
|
|
|
|
self.fail_json(msg="parameters are required together: %s" % check)
|
|
|
|
|
|
|
|
def _check_required_arguments(self):
|
|
|
|
''' ensure all required arguments are present '''
|
|
|
|
missing = []
|
|
|
|
for (k,v) in self.argument_spec.iteritems():
|
|
|
|
required = v.get('required', False)
|
|
|
|
if required and k not in self.params:
|
|
|
|
missing.append(k)
|
|
|
|
if len(missing) > 0:
|
|
|
|
self.fail_json(msg="missing required arguments: %s" % ",".join(missing))
|
|
|
|
|
|
|
|
def _check_argument_values(self):
|
|
|
|
''' ensure all arguments have the requested values, and there are no stray arguments '''
|
|
|
|
for (k,v) in self.argument_spec.iteritems():
|
|
|
|
choices = v.get('choices',None)
|
|
|
|
if choices is None:
|
|
|
|
continue
|
|
|
|
if type(choices) == list:
|
|
|
|
if k in self.params:
|
|
|
|
if self.params[k] not in choices:
|
|
|
|
choices_str=",".join([str(c) for c in choices])
|
|
|
|
msg="value of %s must be one of: %s, got: %s" % (k, choices_str, self.params[k])
|
|
|
|
self.fail_json(msg=msg)
|
|
|
|
else:
|
|
|
|
self.fail_json(msg="internal error: do not know how to interpret argument_spec")
|
|
|
|
|
2013-10-31 23:44:13 +01:00
|
|
|
def safe_eval(self, str, locals=None, include_exceptions=False):
|
|
|
|
|
|
|
|
# do not allow method calls to modules
|
|
|
|
if not isinstance(str, basestring):
|
|
|
|
# already templated to a datastructure, perhaps?
|
|
|
|
if include_exceptions:
|
|
|
|
return (str, None)
|
|
|
|
return str
|
|
|
|
if re.search(r'\w\.\w+\(', str):
|
|
|
|
if include_exceptions:
|
|
|
|
return (str, None)
|
|
|
|
return str
|
|
|
|
# do not allow imports
|
|
|
|
if re.search(r'import \w+', str):
|
|
|
|
if include_exceptions:
|
|
|
|
return (str, None)
|
|
|
|
return str
|
|
|
|
try:
|
|
|
|
result = None
|
|
|
|
if not locals:
|
2014-04-08 16:05:07 +02:00
|
|
|
result = _literal_eval(str)
|
2013-10-31 23:44:13 +01:00
|
|
|
else:
|
2014-04-08 16:05:07 +02:00
|
|
|
result = _literal_eval(str, None, locals)
|
2013-10-31 23:44:13 +01:00
|
|
|
if include_exceptions:
|
|
|
|
return (result, None)
|
|
|
|
else:
|
|
|
|
return result
|
|
|
|
except Exception, e:
|
|
|
|
if include_exceptions:
|
|
|
|
return (str, e)
|
|
|
|
return str
|
|
|
|
|
2013-10-31 21:52:37 +01:00
|
|
|
def _check_argument_types(self):
|
|
|
|
''' ensure all arguments have the requested type '''
|
|
|
|
for (k, v) in self.argument_spec.iteritems():
|
|
|
|
wanted = v.get('type', None)
|
|
|
|
if wanted is None:
|
|
|
|
continue
|
|
|
|
if k not in self.params:
|
|
|
|
continue
|
|
|
|
|
|
|
|
value = self.params[k]
|
|
|
|
is_invalid = False
|
|
|
|
|
|
|
|
if wanted == 'str':
|
|
|
|
if not isinstance(value, basestring):
|
|
|
|
self.params[k] = str(value)
|
|
|
|
elif wanted == 'list':
|
|
|
|
if not isinstance(value, list):
|
|
|
|
if isinstance(value, basestring):
|
|
|
|
self.params[k] = value.split(",")
|
2014-03-28 16:18:29 +01:00
|
|
|
elif isinstance(value, int) or isinstance(value, float):
|
|
|
|
self.params[k] = [ str(value) ]
|
2013-10-31 21:52:37 +01:00
|
|
|
else:
|
|
|
|
is_invalid = True
|
|
|
|
elif wanted == 'dict':
|
|
|
|
if not isinstance(value, dict):
|
|
|
|
if isinstance(value, basestring):
|
2013-10-31 23:44:13 +01:00
|
|
|
if value.startswith("{"):
|
|
|
|
try:
|
|
|
|
self.params[k] = json.loads(value)
|
|
|
|
except:
|
|
|
|
(result, exc) = self.safe_eval(value, dict(), include_exceptions=True)
|
|
|
|
if exc is not None:
|
|
|
|
self.fail_json(msg="unable to evaluate dictionary for %s" % k)
|
|
|
|
self.params[k] = result
|
|
|
|
elif '=' in value:
|
|
|
|
self.params[k] = dict([x.split("=", 1) for x in value.split(",")])
|
|
|
|
else:
|
|
|
|
self.fail_json(msg="dictionary requested, could not parse JSON or key=value")
|
2013-10-31 21:52:37 +01:00
|
|
|
else:
|
|
|
|
is_invalid = True
|
|
|
|
elif wanted == 'bool':
|
|
|
|
if not isinstance(value, bool):
|
|
|
|
if isinstance(value, basestring):
|
|
|
|
self.params[k] = self.boolean(value)
|
|
|
|
else:
|
|
|
|
is_invalid = True
|
|
|
|
elif wanted == 'int':
|
|
|
|
if not isinstance(value, int):
|
|
|
|
if isinstance(value, basestring):
|
|
|
|
self.params[k] = int(value)
|
|
|
|
else:
|
|
|
|
is_invalid = True
|
2014-01-13 05:15:23 +01:00
|
|
|
elif wanted == 'float':
|
|
|
|
if not isinstance(value, float):
|
|
|
|
if isinstance(value, basestring):
|
|
|
|
self.params[k] = float(value)
|
|
|
|
else:
|
|
|
|
is_invalid = True
|
2013-10-31 21:52:37 +01:00
|
|
|
else:
|
|
|
|
self.fail_json(msg="implementation error: unknown type %s requested for %s" % (wanted, k))
|
|
|
|
|
|
|
|
if is_invalid:
|
|
|
|
self.fail_json(msg="argument %s is of invalid type: %s, required: %s" % (k, type(value), wanted))
|
|
|
|
|
|
|
|
def _set_defaults(self, pre=True):
|
2013-11-01 00:47:05 +01:00
|
|
|
for (k,v) in self.argument_spec.iteritems():
|
|
|
|
default = v.get('default', None)
|
|
|
|
if pre == True:
|
|
|
|
# this prevents setting defaults on required items
|
|
|
|
if default is not None and k not in self.params:
|
|
|
|
self.params[k] = default
|
|
|
|
else:
|
|
|
|
# make sure things without a default still get set None
|
|
|
|
if k not in self.params:
|
|
|
|
self.params[k] = default
|
2013-10-31 21:52:37 +01:00
|
|
|
|
|
|
|
def _load_params(self):
|
|
|
|
''' read the input and return a dictionary and the arguments string '''
|
|
|
|
args = MODULE_ARGS
|
|
|
|
items = shlex.split(args)
|
|
|
|
params = {}
|
|
|
|
for x in items:
|
|
|
|
try:
|
|
|
|
(k, v) = x.split("=",1)
|
|
|
|
except Exception, e:
|
2013-10-31 23:44:13 +01:00
|
|
|
self.fail_json(msg="this module requires key=value arguments (%s)" % (items))
|
2013-10-31 21:52:37 +01:00
|
|
|
params[k] = v
|
|
|
|
params2 = json.loads(MODULE_COMPLEX_ARGS)
|
|
|
|
params2.update(params)
|
|
|
|
return (params2, args)
|
|
|
|
|
|
|
|
def _log_invocation(self):
|
|
|
|
''' log that ansible ran the module '''
|
|
|
|
# TODO: generalize a separate log function and make log_invocation use it
|
|
|
|
# Sanitize possible password argument when logging.
|
|
|
|
log_args = dict()
|
|
|
|
passwd_keys = ['password', 'login_password']
|
2014-02-13 21:23:49 +01:00
|
|
|
|
|
|
|
filter_re = [
|
|
|
|
# filter out things like user:pass@foo/whatever
|
|
|
|
# and http://username:pass@wherever/foo
|
|
|
|
re.compile('^(?P<before>.*:)(?P<password>.*)(?P<after>\@.*)$'),
|
|
|
|
]
|
|
|
|
|
2013-10-31 21:52:37 +01:00
|
|
|
for param in self.params:
|
|
|
|
canon = self.aliases.get(param, param)
|
|
|
|
arg_opts = self.argument_spec.get(canon, {})
|
|
|
|
no_log = arg_opts.get('no_log', False)
|
|
|
|
|
|
|
|
if no_log:
|
|
|
|
log_args[param] = 'NOT_LOGGING_PARAMETER'
|
|
|
|
elif param in passwd_keys:
|
|
|
|
log_args[param] = 'NOT_LOGGING_PASSWORD'
|
|
|
|
else:
|
2014-02-13 21:23:49 +01:00
|
|
|
found = False
|
|
|
|
for filter in filter_re:
|
2014-02-17 20:37:48 +01:00
|
|
|
if isinstance(self.params[param], unicode):
|
|
|
|
m = filter.match(self.params[param])
|
|
|
|
else:
|
|
|
|
m = filter.match(str(self.params[param]))
|
2014-02-13 21:23:49 +01:00
|
|
|
if m:
|
|
|
|
d = m.groupdict()
|
|
|
|
log_args[param] = d['before'] + "********" + d['after']
|
|
|
|
found = True
|
|
|
|
break
|
|
|
|
if not found:
|
|
|
|
log_args[param] = self.params[param]
|
2013-10-31 21:52:37 +01:00
|
|
|
|
|
|
|
module = 'ansible-%s' % os.path.basename(__file__)
|
|
|
|
msg = ''
|
|
|
|
for arg in log_args:
|
2014-04-21 18:32:22 +02:00
|
|
|
if isinstance(log_args[arg], basestring):
|
|
|
|
msg = msg + arg + '=' + log_args[arg].decode('utf-8') + ' '
|
2014-02-12 18:26:58 +01:00
|
|
|
else:
|
|
|
|
msg = msg + arg + '=' + str(log_args[arg]) + ' '
|
2013-10-31 21:52:37 +01:00
|
|
|
if msg:
|
|
|
|
msg = 'Invoked with %s' % msg
|
|
|
|
else:
|
|
|
|
msg = 'Invoked'
|
|
|
|
|
2014-03-25 21:07:05 +01:00
|
|
|
# 6655 - allow for accented characters
|
|
|
|
try:
|
2014-04-21 18:32:22 +02:00
|
|
|
msg = msg.encode('utf8')
|
2014-03-25 21:07:05 +01:00
|
|
|
except UnicodeDecodeError, e:
|
|
|
|
pass
|
|
|
|
|
2013-10-31 21:52:37 +01:00
|
|
|
if (has_journal):
|
|
|
|
journal_args = ["MESSAGE=%s %s" % (module, msg)]
|
|
|
|
journal_args.append("MODULE=%s" % os.path.basename(__file__))
|
|
|
|
for arg in log_args:
|
|
|
|
journal_args.append(arg.upper() + "=" + str(log_args[arg]))
|
|
|
|
try:
|
|
|
|
journal.sendv(*journal_args)
|
|
|
|
except IOError, e:
|
|
|
|
# fall back to syslog since logging to journal failed
|
2013-12-02 10:05:58 +01:00
|
|
|
syslog.openlog(str(module), 0, syslog.LOG_USER)
|
2014-03-25 21:07:05 +01:00
|
|
|
syslog.syslog(syslog.LOG_NOTICE, msg) #1
|
2013-10-31 21:52:37 +01:00
|
|
|
else:
|
2013-12-02 10:05:58 +01:00
|
|
|
syslog.openlog(str(module), 0, syslog.LOG_USER)
|
2014-03-25 21:07:05 +01:00
|
|
|
syslog.syslog(syslog.LOG_NOTICE, msg) #2
|
2013-10-31 21:52:37 +01:00
|
|
|
|
2014-03-18 16:17:44 +01:00
|
|
|
def _set_cwd(self):
|
|
|
|
try:
|
|
|
|
cwd = os.getcwd()
|
|
|
|
if not os.access(cwd, os.F_OK|os.R_OK):
|
|
|
|
raise
|
|
|
|
return cwd
|
|
|
|
except:
|
|
|
|
# we don't have access to the cwd, probably because of sudo.
|
|
|
|
# Try and move to a neutral location to prevent errors
|
|
|
|
for cwd in [os.path.expandvars('$HOME'), tempfile.gettempdir()]:
|
|
|
|
try:
|
|
|
|
if os.access(cwd, os.F_OK|os.R_OK):
|
|
|
|
os.chdir(cwd)
|
|
|
|
return cwd
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
# we won't error here, as it may *not* be a problem,
|
|
|
|
# and we don't want to break modules unnecessarily
|
|
|
|
return None
|
|
|
|
|
2013-10-31 21:52:37 +01:00
|
|
|
def get_bin_path(self, arg, required=False, opt_dirs=[]):
|
|
|
|
'''
|
|
|
|
find system executable in PATH.
|
|
|
|
Optional arguments:
|
|
|
|
- required: if executable is not found and required is true, fail_json
|
|
|
|
- opt_dirs: optional list of directories to search in addition to PATH
|
|
|
|
if found return full path; otherwise return None
|
|
|
|
'''
|
|
|
|
sbin_paths = ['/sbin', '/usr/sbin', '/usr/local/sbin']
|
|
|
|
paths = []
|
|
|
|
for d in opt_dirs:
|
|
|
|
if d is not None and os.path.exists(d):
|
|
|
|
paths.append(d)
|
|
|
|
paths += os.environ.get('PATH', '').split(os.pathsep)
|
|
|
|
bin_path = None
|
|
|
|
# mangle PATH to include /sbin dirs
|
|
|
|
for p in sbin_paths:
|
|
|
|
if p not in paths and os.path.exists(p):
|
|
|
|
paths.append(p)
|
|
|
|
for d in paths:
|
|
|
|
path = os.path.join(d, arg)
|
|
|
|
if os.path.exists(path) and self.is_executable(path):
|
|
|
|
bin_path = path
|
|
|
|
break
|
|
|
|
if required and bin_path is None:
|
|
|
|
self.fail_json(msg='Failed to find required executable %s' % arg)
|
|
|
|
return bin_path
|
|
|
|
|
|
|
|
def boolean(self, arg):
|
|
|
|
''' return a bool for the arg '''
|
|
|
|
if arg is None or type(arg) == bool:
|
|
|
|
return arg
|
|
|
|
if type(arg) in types.StringTypes:
|
|
|
|
arg = arg.lower()
|
|
|
|
if arg in BOOLEANS_TRUE:
|
|
|
|
return True
|
|
|
|
elif arg in BOOLEANS_FALSE:
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
self.fail_json(msg='Boolean %s not in either boolean list' % arg)
|
|
|
|
|
|
|
|
def jsonify(self, data):
|
2014-02-11 21:19:00 +01:00
|
|
|
for encoding in ("utf-8", "latin-1", "unicode_escape"):
|
|
|
|
try:
|
|
|
|
return json.dumps(data, encoding=encoding)
|
2014-03-25 14:50:29 +01:00
|
|
|
# Old systems using simplejson module does not support encoding keyword.
|
|
|
|
except TypeError, e:
|
|
|
|
return json.dumps(data)
|
2014-02-11 21:19:00 +01:00
|
|
|
except UnicodeDecodeError, e:
|
|
|
|
continue
|
|
|
|
self.fail_json(msg='Invalid unicode encoding encountered')
|
2013-10-31 21:52:37 +01:00
|
|
|
|
|
|
|
def from_json(self, data):
|
|
|
|
return json.loads(data)
|
|
|
|
|
2014-05-13 20:52:38 +02:00
|
|
|
def add_cleanup_file(self, path):
|
|
|
|
if path not in self.cleanup_files:
|
|
|
|
self.cleanup_files.append(path)
|
|
|
|
|
|
|
|
def do_cleanup_files(self):
|
|
|
|
for path in self.cleanup_files:
|
|
|
|
self.cleanup(path)
|
|
|
|
|
2013-10-31 21:52:37 +01:00
|
|
|
def exit_json(self, **kwargs):
|
|
|
|
''' return from the module, without error '''
|
|
|
|
self.add_path_info(kwargs)
|
2013-11-01 00:47:05 +01:00
|
|
|
if not 'changed' in kwargs:
|
2013-10-31 21:52:37 +01:00
|
|
|
kwargs['changed'] = False
|
2014-05-13 20:52:38 +02:00
|
|
|
self.do_cleanup_files()
|
2013-10-31 21:52:37 +01:00
|
|
|
print self.jsonify(kwargs)
|
|
|
|
sys.exit(0)
|
|
|
|
|
|
|
|
def fail_json(self, **kwargs):
|
|
|
|
''' return from the module, with an error message '''
|
|
|
|
self.add_path_info(kwargs)
|
|
|
|
assert 'msg' in kwargs, "implementation error -- msg to explain the error is required"
|
|
|
|
kwargs['failed'] = True
|
2014-05-13 20:52:38 +02:00
|
|
|
self.do_cleanup_files()
|
2013-10-31 21:52:37 +01:00
|
|
|
print self.jsonify(kwargs)
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
def is_executable(self, path):
|
|
|
|
'''is the given path executable?'''
|
|
|
|
return (stat.S_IXUSR & os.stat(path)[stat.ST_MODE]
|
|
|
|
or stat.S_IXGRP & os.stat(path)[stat.ST_MODE]
|
|
|
|
or stat.S_IXOTH & os.stat(path)[stat.ST_MODE])
|
|
|
|
|
|
|
|
def digest_from_file(self, filename, digest_method):
|
|
|
|
''' Return hex digest of local file for a given digest_method, or None if file is not present. '''
|
|
|
|
if not os.path.exists(filename):
|
|
|
|
return None
|
|
|
|
if os.path.isdir(filename):
|
|
|
|
self.fail_json(msg="attempted to take checksum of directory: %s" % filename)
|
|
|
|
digest = digest_method
|
|
|
|
blocksize = 64 * 1024
|
|
|
|
infile = open(filename, 'rb')
|
|
|
|
block = infile.read(blocksize)
|
|
|
|
while block:
|
|
|
|
digest.update(block)
|
|
|
|
block = infile.read(blocksize)
|
|
|
|
infile.close()
|
|
|
|
return digest.hexdigest()
|
|
|
|
|
|
|
|
def md5(self, filename):
|
|
|
|
''' Return MD5 hex digest of local file using digest_from_file(). '''
|
|
|
|
return self.digest_from_file(filename, _md5())
|
|
|
|
|
|
|
|
def sha256(self, filename):
|
|
|
|
''' Return SHA-256 hex digest of local file using digest_from_file(). '''
|
|
|
|
if not HAVE_HASHLIB:
|
|
|
|
self.fail_json(msg="SHA-256 checksums require hashlib, which is available in Python 2.5 and higher")
|
|
|
|
return self.digest_from_file(filename, _sha256())
|
|
|
|
|
|
|
|
def backup_local(self, fn):
|
|
|
|
'''make a date-marked backup of the specified file, return True or False on success or failure'''
|
|
|
|
# backups named basename-YYYY-MM-DD@HH:MM~
|
|
|
|
ext = time.strftime("%Y-%m-%d@%H:%M~", time.localtime(time.time()))
|
|
|
|
backupdest = '%s.%s' % (fn, ext)
|
|
|
|
|
|
|
|
try:
|
|
|
|
shutil.copy2(fn, backupdest)
|
|
|
|
except shutil.Error, e:
|
|
|
|
self.fail_json(msg='Could not make backup of %s to %s: %s' % (fn, backupdest, e))
|
|
|
|
return backupdest
|
|
|
|
|
2014-05-13 20:52:38 +02:00
|
|
|
def cleanup(self, tmpfile):
|
2013-10-31 21:52:37 +01:00
|
|
|
if os.path.exists(tmpfile):
|
|
|
|
try:
|
|
|
|
os.unlink(tmpfile)
|
|
|
|
except OSError, e:
|
|
|
|
sys.stderr.write("could not cleanup %s: %s" % (tmpfile, e))
|
|
|
|
|
|
|
|
def atomic_move(self, src, dest):
|
|
|
|
'''atomically move src to dest, copying attributes from dest, returns true on success
|
|
|
|
it uses os.rename to ensure this as it is an atomic operation, rest of the function is
|
|
|
|
to work around limitations, corner cases and ensure selinux context is saved if possible'''
|
|
|
|
context = None
|
2014-03-25 19:00:38 +01:00
|
|
|
dest_stat = None
|
2013-10-31 21:52:37 +01:00
|
|
|
if os.path.exists(dest):
|
|
|
|
try:
|
2014-03-25 19:00:38 +01:00
|
|
|
dest_stat = os.stat(dest)
|
|
|
|
os.chmod(src, dest_stat.st_mode & 07777)
|
|
|
|
os.chown(src, dest_stat.st_uid, dest_stat.st_gid)
|
2013-10-31 21:52:37 +01:00
|
|
|
except OSError, e:
|
|
|
|
if e.errno != errno.EPERM:
|
|
|
|
raise
|
|
|
|
if self.selinux_enabled():
|
|
|
|
context = self.selinux_context(dest)
|
|
|
|
else:
|
|
|
|
if self.selinux_enabled():
|
|
|
|
context = self.selinux_default_context(dest)
|
|
|
|
|
2014-03-24 21:10:43 +01:00
|
|
|
creating = not os.path.exists(dest)
|
2014-06-03 16:36:19 +02:00
|
|
|
|
|
|
|
try:
|
|
|
|
login_name = os.getlogin()
|
|
|
|
except OSError:
|
|
|
|
# not having a tty can cause the above to fail, so
|
|
|
|
# just get the LOGNAME environment variable instead
|
|
|
|
login_name = os.environ.get('LOGNAME', None)
|
|
|
|
|
|
|
|
# if the original login_name doesn't match the currently
|
|
|
|
# logged-in user, or if the SUDO_USER environment variable
|
|
|
|
# is set, then this user has switched their credentials
|
|
|
|
switched_user = login_name and login_name != pwd.getpwuid(os.getuid())[0] or os.environ.get('SUDO_USER')
|
2014-03-24 21:10:43 +01:00
|
|
|
|
2013-10-31 21:52:37 +01:00
|
|
|
try:
|
2014-03-14 04:07:35 +01:00
|
|
|
# Optimistically try a rename, solves some corner cases and can avoid useless work, throws exception if not atomic.
|
2013-10-31 21:52:37 +01:00
|
|
|
os.rename(src, dest)
|
|
|
|
except (IOError,OSError), e:
|
|
|
|
# only try workarounds for errno 18 (cross device), 1 (not permited) and 13 (permission denied)
|
|
|
|
if e.errno != errno.EPERM and e.errno != errno.EXDEV and e.errno != errno.EACCES:
|
|
|
|
self.fail_json(msg='Could not replace file: %s to %s: %s' % (src, dest, e))
|
|
|
|
|
|
|
|
dest_dir = os.path.dirname(dest)
|
|
|
|
dest_file = os.path.basename(dest)
|
2014-03-20 11:12:58 +01:00
|
|
|
tmp_dest = tempfile.NamedTemporaryFile(
|
|
|
|
prefix=".ansible_tmp", dir=dest_dir, suffix=dest_file)
|
2013-10-31 21:52:37 +01:00
|
|
|
|
|
|
|
try: # leaves tmp file behind when sudo and not root
|
2014-05-27 23:04:02 +02:00
|
|
|
if switched_user and os.getuid() != 0:
|
2013-11-01 00:47:05 +01:00
|
|
|
# cleanup will happen by 'rm' of tempdir
|
2014-03-20 12:30:55 +01:00
|
|
|
# copy2 will preserve some metadata
|
2014-03-20 11:12:58 +01:00
|
|
|
shutil.copy2(src, tmp_dest.name)
|
2013-10-31 21:52:37 +01:00
|
|
|
else:
|
2014-03-20 11:12:58 +01:00
|
|
|
shutil.move(src, tmp_dest.name)
|
2013-10-31 21:52:37 +01:00
|
|
|
if self.selinux_enabled():
|
2014-03-20 12:30:55 +01:00
|
|
|
self.set_context_if_different(
|
2014-03-20 11:12:58 +01:00
|
|
|
tmp_dest.name, context, False)
|
2014-05-14 16:59:12 +02:00
|
|
|
tmp_stat = os.stat(tmp_dest.name)
|
|
|
|
if dest_stat and (tmp_stat.st_uid != dest_stat.st_uid or tmp_stat.st_gid != dest_stat.st_gid):
|
2014-03-25 19:00:38 +01:00
|
|
|
os.chown(tmp_dest.name, dest_stat.st_uid, dest_stat.st_gid)
|
2014-03-20 11:12:58 +01:00
|
|
|
os.rename(tmp_dest.name, dest)
|
2013-10-31 21:52:37 +01:00
|
|
|
except (shutil.Error, OSError, IOError), e:
|
2014-03-20 11:12:58 +01:00
|
|
|
self.cleanup(tmp_dest.name)
|
2013-10-31 21:52:37 +01:00
|
|
|
self.fail_json(msg='Could not replace file: %s to %s: %s' % (src, dest, e))
|
|
|
|
|
2014-04-29 15:40:08 +02:00
|
|
|
if creating:
|
|
|
|
# make sure the file has the correct permissions
|
|
|
|
# based on the current value of umask
|
|
|
|
umask = os.umask(0)
|
|
|
|
os.umask(umask)
|
|
|
|
os.chmod(dest, 0666 ^ umask)
|
2014-05-27 23:04:02 +02:00
|
|
|
if switched_user:
|
2014-04-29 15:40:08 +02:00
|
|
|
os.chown(dest, os.getuid(), os.getgid())
|
2014-03-24 21:10:43 +01:00
|
|
|
|
2013-10-31 21:52:37 +01:00
|
|
|
if self.selinux_enabled():
|
|
|
|
# rename might not preserve context
|
|
|
|
self.set_context_if_different(dest, context, False)
|
|
|
|
|
2014-03-10 22:11:24 +01:00
|
|
|
def run_command(self, args, check_rc=False, close_fds=False, executable=None, data=None, binary_data=False, path_prefix=None, cwd=None, use_unsafe_shell=False):
|
2013-10-31 21:52:37 +01:00
|
|
|
'''
|
|
|
|
Execute a command, returns rc, stdout, and stderr.
|
|
|
|
args is the command to run
|
|
|
|
If args is a list, the command will be run with shell=False.
|
2014-03-10 22:11:24 +01:00
|
|
|
If args is a string and use_unsafe_shell=False it will split args to a list and run with shell=False
|
|
|
|
If args is a string and use_unsafe_shell=True it run with shell=True.
|
2013-10-31 21:52:37 +01:00
|
|
|
Other arguments:
|
|
|
|
- check_rc (boolean) Whether to call fail_json in case of
|
|
|
|
non zero RC. Default is False.
|
|
|
|
- close_fds (boolean) See documentation for subprocess.Popen().
|
|
|
|
Default is False.
|
|
|
|
- executable (string) See documentation for subprocess.Popen().
|
|
|
|
Default is None.
|
|
|
|
'''
|
2014-03-10 22:11:24 +01:00
|
|
|
|
|
|
|
shell = False
|
2013-10-31 21:52:37 +01:00
|
|
|
if isinstance(args, list):
|
2014-03-13 19:51:10 +01:00
|
|
|
if use_unsafe_shell:
|
|
|
|
args = " ".join([pipes.quote(x) for x in args])
|
|
|
|
shell = True
|
2014-03-10 22:11:24 +01:00
|
|
|
elif isinstance(args, basestring) and use_unsafe_shell:
|
2013-10-31 21:52:37 +01:00
|
|
|
shell = True
|
2014-03-10 22:11:24 +01:00
|
|
|
elif isinstance(args, basestring):
|
2014-04-15 19:04:41 +02:00
|
|
|
args = shlex.split(args.encode('utf-8'))
|
2013-10-31 21:52:37 +01:00
|
|
|
else:
|
|
|
|
msg = "Argument 'args' to run_command must be list or string"
|
|
|
|
self.fail_json(rc=257, cmd=args, msg=msg)
|
2014-03-10 22:11:24 +01:00
|
|
|
|
2014-03-12 15:10:45 +01:00
|
|
|
# expand things like $HOME and ~
|
2014-03-12 16:57:28 +01:00
|
|
|
if not shell:
|
|
|
|
args = [ os.path.expandvars(os.path.expanduser(x)) for x in args ]
|
2014-03-12 15:10:45 +01:00
|
|
|
|
2013-10-31 21:52:37 +01:00
|
|
|
rc = 0
|
|
|
|
msg = None
|
|
|
|
st_in = None
|
2013-11-07 21:50:41 +01:00
|
|
|
|
|
|
|
# Set a temporart env path if a prefix is passed
|
|
|
|
env=os.environ
|
|
|
|
if path_prefix:
|
|
|
|
env['PATH']="%s:%s" % (path_prefix, env['PATH'])
|
|
|
|
|
2014-03-06 20:33:18 +01:00
|
|
|
# create a printable version of the command for use
|
|
|
|
# in reporting later, which strips out things like
|
|
|
|
# passwords from the args list
|
|
|
|
if isinstance(args, list):
|
|
|
|
clean_args = " ".join(pipes.quote(arg) for arg in args)
|
|
|
|
else:
|
|
|
|
clean_args = args
|
|
|
|
|
|
|
|
# all clean strings should return two match groups,
|
|
|
|
# where the first is the CLI argument and the second
|
|
|
|
# is the password/key/phrase that will be hidden
|
|
|
|
clean_re_strings = [
|
|
|
|
# this removes things like --password, --pass, --pass-wd, etc.
|
|
|
|
# optionally followed by an '=' or a space. The password can
|
|
|
|
# be quoted or not too, though it does not care about quotes
|
|
|
|
# that are not balanced
|
|
|
|
# source: http://blog.stevenlevithan.com/archives/match-quoted-string
|
|
|
|
r'([-]{0,2}pass[-]?(?:word|wd)?[=\s]?)((?:["\'])?(?:[^\s])*(?:\1)?)',
|
2014-06-12 14:42:07 +02:00
|
|
|
r'^(?P<before>.*:)(?P<password>.*)(?P<after>\@.*)$',
|
2014-03-06 20:33:18 +01:00
|
|
|
# TODO: add more regex checks here
|
|
|
|
]
|
|
|
|
for re_str in clean_re_strings:
|
|
|
|
r = re.compile(re_str)
|
|
|
|
clean_args = r.sub(r'\1********', clean_args)
|
|
|
|
|
2013-10-31 21:52:37 +01:00
|
|
|
if data:
|
|
|
|
st_in = subprocess.PIPE
|
2014-03-10 22:11:24 +01:00
|
|
|
|
|
|
|
kwargs = dict(
|
|
|
|
executable=executable,
|
|
|
|
shell=shell,
|
|
|
|
close_fds=close_fds,
|
|
|
|
stdin= st_in,
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
stderr=subprocess.PIPE
|
|
|
|
)
|
|
|
|
|
|
|
|
if path_prefix:
|
|
|
|
kwargs['env'] = env
|
2014-03-12 21:59:24 +01:00
|
|
|
if cwd and os.path.isdir(cwd):
|
2014-03-10 22:11:24 +01:00
|
|
|
kwargs['cwd'] = cwd
|
|
|
|
|
2014-03-13 22:15:23 +01:00
|
|
|
# store the pwd
|
|
|
|
prev_dir = os.getcwd()
|
2014-03-10 22:11:24 +01:00
|
|
|
|
2014-03-13 22:15:23 +01:00
|
|
|
# make sure we're in the right working directory
|
|
|
|
if cwd and os.path.isdir(cwd):
|
|
|
|
try:
|
2014-03-12 19:59:50 +01:00
|
|
|
os.chdir(cwd)
|
2014-03-13 22:15:23 +01:00
|
|
|
except (OSError, IOError), e:
|
|
|
|
self.fail_json(rc=e.errno, msg="Could not open %s , %s" % (cwd, str(e)))
|
2014-03-12 20:33:31 +01:00
|
|
|
|
2014-03-13 22:15:23 +01:00
|
|
|
try:
|
2014-03-10 22:11:24 +01:00
|
|
|
cmd = subprocess.Popen(args, **kwargs)
|
|
|
|
|
2013-10-31 21:52:37 +01:00
|
|
|
if data:
|
|
|
|
if not binary_data:
|
2014-03-13 20:28:51 +01:00
|
|
|
data += '\n'
|
2013-10-31 21:52:37 +01:00
|
|
|
out, err = cmd.communicate(input=data)
|
|
|
|
rc = cmd.returncode
|
|
|
|
except (OSError, IOError), e:
|
2014-03-06 20:33:18 +01:00
|
|
|
self.fail_json(rc=e.errno, msg=str(e), cmd=clean_args)
|
2013-10-31 21:52:37 +01:00
|
|
|
except:
|
2014-03-06 20:33:18 +01:00
|
|
|
self.fail_json(rc=257, msg=traceback.format_exc(), cmd=clean_args)
|
2014-03-13 21:06:59 +01:00
|
|
|
|
2013-10-31 21:52:37 +01:00
|
|
|
if rc != 0 and check_rc:
|
|
|
|
msg = err.rstrip()
|
2014-03-06 20:33:18 +01:00
|
|
|
self.fail_json(cmd=clean_args, rc=rc, stdout=out, stderr=err, msg=msg)
|
2014-03-13 22:15:23 +01:00
|
|
|
|
|
|
|
# reset the pwd
|
|
|
|
os.chdir(prev_dir)
|
|
|
|
|
2013-10-31 21:52:37 +01:00
|
|
|
return (rc, out, err)
|
|
|
|
|
2014-03-12 15:55:54 +01:00
|
|
|
def append_to_file(self, filename, str):
|
|
|
|
filename = os.path.expandvars(os.path.expanduser(filename))
|
|
|
|
fh = open(filename, 'a')
|
|
|
|
fh.write(str)
|
|
|
|
fh.close()
|
|
|
|
|
2013-10-31 21:52:37 +01:00
|
|
|
def pretty_bytes(self,size):
|
|
|
|
ranges = (
|
|
|
|
(1<<70L, 'ZB'),
|
|
|
|
(1<<60L, 'EB'),
|
|
|
|
(1<<50L, 'PB'),
|
|
|
|
(1<<40L, 'TB'),
|
|
|
|
(1<<30L, 'GB'),
|
|
|
|
(1<<20L, 'MB'),
|
|
|
|
(1<<10L, 'KB'),
|
|
|
|
(1, 'Bytes')
|
|
|
|
)
|
|
|
|
for limit, suffix in ranges:
|
|
|
|
if size >= limit:
|
|
|
|
break
|
|
|
|
return '%.2f %s' % (float(size)/ limit, suffix)
|
|
|
|
|
2014-03-19 15:30:10 +01:00
|
|
|
def get_module_path():
|
|
|
|
return os.path.dirname(os.path.realpath(__file__))
|