2014-11-14 23:14:08 +01:00
|
|
|
# (c) 2013-2014, Michael DeHaan <michael.dehaan@gmail.com>
|
2015-02-10 21:35:34 +01:00
|
|
|
# (c) 2015 Toshio Kuratomi <tkuratomi@ansible.com>
|
2014-11-14 23:14:08 +01:00
|
|
|
#
|
|
|
|
# This file is part of Ansible
|
|
|
|
#
|
|
|
|
# Ansible is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# Ansible is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
2015-02-10 21:35:34 +01:00
|
|
|
# Make coding more python3-ish
|
|
|
|
from __future__ import (absolute_import, division, print_function)
|
|
|
|
__metaclass__ = type
|
|
|
|
|
2016-04-06 01:33:56 +02:00
|
|
|
import ast
|
2016-04-05 20:06:17 +02:00
|
|
|
import base64
|
2016-11-29 10:21:13 +01:00
|
|
|
import datetime
|
2016-04-19 09:55:19 +02:00
|
|
|
import imp
|
2014-11-14 23:14:08 +01:00
|
|
|
import json
|
|
|
|
import os
|
|
|
|
import shlex
|
2016-04-05 20:06:17 +02:00
|
|
|
import zipfile
|
2017-02-17 09:09:56 +01:00
|
|
|
import random
|
|
|
|
import re
|
2016-04-05 20:06:17 +02:00
|
|
|
from io import BytesIO
|
2014-11-14 23:14:08 +01:00
|
|
|
|
2016-04-29 02:36:09 +02:00
|
|
|
from ansible.release import __version__, __author__
|
2014-11-14 23:14:08 +01:00
|
|
|
from ansible import constants as C
|
|
|
|
from ansible.errors import AnsibleError
|
2016-09-07 07:54:17 +02:00
|
|
|
from ansible.module_utils._text import to_bytes, to_text
|
2017-08-15 22:38:59 +02:00
|
|
|
from ansible.plugins.loader import module_utils_loader, ps_module_utils_loader
|
2017-06-27 07:58:09 +02:00
|
|
|
from ansible.plugins.shell.powershell import async_watchdog, async_wrapper, become_wrapper, leaf_exec, exec_wrapper
|
2016-04-30 05:47:51 +02:00
|
|
|
# Must import strategy and use write_locks from there
|
|
|
|
# If we import write_locks directly then we end up binding a
|
|
|
|
# variable to the object and then it never gets updated.
|
2016-08-26 21:55:56 +02:00
|
|
|
from ansible.executor import action_write_locks
|
2014-11-14 23:14:08 +01:00
|
|
|
|
2016-04-05 20:06:17 +02:00
|
|
|
try:
|
|
|
|
from __main__ import display
|
|
|
|
except ImportError:
|
|
|
|
from ansible.utils.display import Display
|
|
|
|
display = Display()
|
|
|
|
|
2016-09-07 07:54:17 +02:00
|
|
|
|
2017-05-30 19:13:53 +02:00
|
|
|
REPLACER = b"#<<INCLUDE_ANSIBLE_MODULE_COMMON>>"
|
|
|
|
REPLACER_VERSION = b"\"<<ANSIBLE_VERSION>>\""
|
|
|
|
REPLACER_COMPLEX = b"\"<<INCLUDE_ANSIBLE_MODULE_COMPLEX_ARGS>>\""
|
|
|
|
REPLACER_WINDOWS = b"# POWERSHELL_COMMON"
|
2016-02-27 01:41:13 +01:00
|
|
|
REPLACER_JSONARGS = b"<<INCLUDE_ANSIBLE_MODULE_JSON_ARGS>>"
|
2017-05-30 19:13:53 +02:00
|
|
|
REPLACER_SELINUX = b"<<SELINUX_SPECIAL_FILESYSTEMS>>"
|
2014-11-14 23:14:08 +01:00
|
|
|
|
2015-02-10 21:35:34 +01:00
|
|
|
# We could end up writing out parameters with unicode characters so we need to
|
|
|
|
# specify an encoding for the python source file
|
2016-04-06 09:23:47 +02:00
|
|
|
ENCODING_STRING = u'# -*- coding: utf-8 -*-'
|
2015-02-10 21:35:34 +01:00
|
|
|
|
2017-01-27 20:53:02 +01:00
|
|
|
# module_common is relative to module_utils, so fix the path
|
|
|
|
_MODULE_UTILS_PATH = os.path.join(os.path.dirname(__file__), '..', 'module_utils')
|
2015-02-10 21:35:34 +01:00
|
|
|
|
|
|
|
# ******************************************************************************
|
|
|
|
|
2016-07-21 19:58:24 +02:00
|
|
|
ANSIBALLZ_TEMPLATE = u'''%(shebang)s
|
2016-04-06 09:23:47 +02:00
|
|
|
%(coding)s
|
2016-07-21 19:58:24 +02:00
|
|
|
ANSIBALLZ_WRAPPER = True # For test-module script to tell this is a ANSIBALLZ_WRAPPER
|
2016-04-07 06:37:04 +02:00
|
|
|
# This code is part of Ansible, but is an independent component.
|
|
|
|
# The code in this particular templatable string, and this templatable string
|
|
|
|
# only, is BSD licensed. Modules which end up using this snippet, which is
|
|
|
|
# dynamically combined together by Ansible still belong to the author of the
|
|
|
|
# module, and they may assign their own license to the complete work.
|
|
|
|
#
|
|
|
|
# Copyright (c), James Cammarata, 2016
|
|
|
|
# Copyright (c), Toshio Kuratomi, 2016
|
|
|
|
#
|
|
|
|
# Redistribution and use in source and binary forms, with or without modification,
|
|
|
|
# are permitted provided that the following conditions are met:
|
|
|
|
#
|
|
|
|
# * Redistributions of source code must retain the above copyright
|
|
|
|
# notice, this list of conditions and the following disclaimer.
|
|
|
|
# * Redistributions in binary form must reproduce the above copyright notice,
|
|
|
|
# this list of conditions and the following disclaimer in the documentation
|
|
|
|
# and/or other materials provided with the distribution.
|
|
|
|
#
|
|
|
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
|
|
|
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
|
|
|
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
|
|
|
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
|
|
|
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
|
|
|
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
|
|
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
|
|
|
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
|
|
|
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
2016-04-05 20:06:17 +02:00
|
|
|
import os
|
2016-08-24 09:43:50 +02:00
|
|
|
import os.path
|
2016-04-05 20:06:17 +02:00
|
|
|
import sys
|
2016-08-24 09:43:50 +02:00
|
|
|
import __main__
|
|
|
|
|
|
|
|
# For some distros and python versions we pick up this script in the temporary
|
|
|
|
# directory. This leads to problems when the ansible module masks a python
|
|
|
|
# library that another import needs. We have not figured out what about the
|
|
|
|
# specific distros and python versions causes this to behave differently.
|
|
|
|
#
|
|
|
|
# Tested distros:
|
|
|
|
# Fedora23 with python3.4 Works
|
|
|
|
# Ubuntu15.10 with python2.7 Works
|
|
|
|
# Ubuntu15.10 with python3.4 Fails without this
|
|
|
|
# Ubuntu16.04.1 with python3.5 Fails without this
|
2017-01-06 00:11:23 +01:00
|
|
|
# To test on another platform:
|
|
|
|
# * use the copy module (since this shadows the stdlib copy module)
|
|
|
|
# * Turn off pipelining
|
|
|
|
# * Make sure that the destination file does not exist
|
|
|
|
# * ansible ubuntu16-test -m copy -a 'src=/etc/motd dest=/var/tmp/m'
|
|
|
|
# This will traceback in shutil. Looking at the complete traceback will show
|
|
|
|
# that shutil is importing copy which finds the ansible module instead of the
|
|
|
|
# stdlib module
|
2016-08-24 09:43:50 +02:00
|
|
|
scriptdir = None
|
|
|
|
try:
|
2017-08-01 15:31:19 +02:00
|
|
|
scriptdir = os.path.dirname(os.path.realpath(__main__.__file__))
|
2017-01-06 00:11:23 +01:00
|
|
|
except (AttributeError, OSError):
|
|
|
|
# Some platforms don't set __file__ when reading from stdin
|
|
|
|
# OSX raises OSError if using abspath() in a directory we don't have
|
2017-08-01 15:31:19 +02:00
|
|
|
# permission to read (realpath calls abspath)
|
2016-08-24 09:43:50 +02:00
|
|
|
pass
|
|
|
|
if scriptdir is not None:
|
|
|
|
sys.path = [p for p in sys.path if p != scriptdir]
|
|
|
|
|
2016-04-05 20:06:17 +02:00
|
|
|
import base64
|
2016-04-11 06:14:53 +02:00
|
|
|
import shutil
|
|
|
|
import zipfile
|
2016-04-05 20:06:17 +02:00
|
|
|
import tempfile
|
2016-04-07 06:25:18 +02:00
|
|
|
import subprocess
|
|
|
|
|
|
|
|
if sys.version_info < (3,):
|
|
|
|
bytes = str
|
2016-04-12 19:08:00 +02:00
|
|
|
PY3 = False
|
2016-04-07 06:25:18 +02:00
|
|
|
else:
|
|
|
|
unicode = str
|
2016-04-12 19:08:00 +02:00
|
|
|
PY3 = True
|
2016-04-10 21:33:16 +02:00
|
|
|
try:
|
|
|
|
# Python-2.6+
|
|
|
|
from io import BytesIO as IOStream
|
|
|
|
except ImportError:
|
|
|
|
# Python < 2.6
|
|
|
|
from StringIO import StringIO as IOStream
|
|
|
|
|
2016-04-05 20:06:17 +02:00
|
|
|
ZIPDATA = """%(zipdata)s"""
|
|
|
|
|
2016-04-11 06:14:53 +02:00
|
|
|
def invoke_module(module, modlib_path, json_params):
|
2016-04-10 21:33:16 +02:00
|
|
|
pythonpath = os.environ.get('PYTHONPATH')
|
|
|
|
if pythonpath:
|
2016-04-11 06:14:53 +02:00
|
|
|
os.environ['PYTHONPATH'] = ':'.join((modlib_path, pythonpath))
|
2016-04-10 21:33:16 +02:00
|
|
|
else:
|
2016-04-11 06:14:53 +02:00
|
|
|
os.environ['PYTHONPATH'] = modlib_path
|
2016-04-10 21:33:16 +02:00
|
|
|
|
2016-04-28 05:11:26 +02:00
|
|
|
p = subprocess.Popen([%(interpreter)s, module], env=os.environ, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
|
2016-04-10 21:33:16 +02:00
|
|
|
(stdout, stderr) = p.communicate(json_params)
|
|
|
|
|
|
|
|
if not isinstance(stderr, (bytes, unicode)):
|
|
|
|
stderr = stderr.read()
|
|
|
|
if not isinstance(stdout, (bytes, unicode)):
|
|
|
|
stdout = stdout.read()
|
2016-04-12 19:08:00 +02:00
|
|
|
if PY3:
|
|
|
|
sys.stderr.buffer.write(stderr)
|
|
|
|
sys.stdout.buffer.write(stdout)
|
|
|
|
else:
|
|
|
|
sys.stderr.write(stderr)
|
|
|
|
sys.stdout.write(stdout)
|
2016-04-10 21:33:16 +02:00
|
|
|
return p.returncode
|
|
|
|
|
|
|
|
def debug(command, zipped_mod, json_params):
|
2016-04-05 20:06:17 +02:00
|
|
|
# The code here normally doesn't run. It's only used for debugging on the
|
2016-04-20 19:34:57 +02:00
|
|
|
# remote machine.
|
|
|
|
#
|
2016-07-21 19:58:24 +02:00
|
|
|
# The subcommands in this function make it easier to debug ansiballz
|
2016-04-20 19:34:57 +02:00
|
|
|
# modules. Here's the basic steps:
|
|
|
|
#
|
|
|
|
# Run ansible with the environment variable: ANSIBLE_KEEP_REMOTE_FILES=1 and -vvv
|
|
|
|
# to save the module file remotely::
|
|
|
|
# $ ANSIBLE_KEEP_REMOTE_FILES=1 ansible host1 -m ping -a 'data=october' -vvv
|
|
|
|
#
|
|
|
|
# Part of the verbose output will tell you where on the remote machine the
|
|
|
|
# module was written to::
|
|
|
|
# [...]
|
|
|
|
# <host1> SSH: EXEC ssh -C -q -o ControlMaster=auto -o ControlPersist=60s -o KbdInteractiveAuthentication=no -o
|
|
|
|
# PreferredAuthentications=gssapi-with-mic,gssapi-keyex,hostbased,publickey -o PasswordAuthentication=no -o ConnectTimeout=10 -o
|
|
|
|
# ControlPath=/home/badger/.ansible/cp/ansible-ssh-%%h-%%p-%%r -tt rhel7 '/bin/sh -c '"'"'LANG=en_US.UTF-8 LC_ALL=en_US.UTF-8
|
|
|
|
# LC_MESSAGES=en_US.UTF-8 /usr/bin/python /home/badger/.ansible/tmp/ansible-tmp-1461173013.93-9076457629738/ping'"'"''
|
|
|
|
# [...]
|
|
|
|
#
|
|
|
|
# Login to the remote machine and run the module file via from the previous
|
|
|
|
# step with the explode subcommand to extract the module payload into
|
|
|
|
# source files::
|
|
|
|
# $ ssh host1
|
|
|
|
# $ /usr/bin/python /home/badger/.ansible/tmp/ansible-tmp-1461173013.93-9076457629738/ping explode
|
|
|
|
# Module expanded into:
|
|
|
|
# /home/badger/.ansible/tmp/ansible-tmp-1461173408.08-279692652635227/ansible
|
|
|
|
#
|
|
|
|
# You can now edit the source files to instrument the code or experiment with
|
|
|
|
# different parameter values. When you're ready to run the code you've modified
|
|
|
|
# (instead of the code from the actual zipped module), use the execute subcommand like this::
|
|
|
|
# $ /usr/bin/python /home/badger/.ansible/tmp/ansible-tmp-1461173013.93-9076457629738/ping execute
|
2016-04-10 21:33:16 +02:00
|
|
|
|
2016-04-05 20:06:17 +02:00
|
|
|
# Okay to use __file__ here because we're running from a kept file
|
2016-04-23 00:55:13 +02:00
|
|
|
basedir = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'debug_dir')
|
2016-04-14 18:06:38 +02:00
|
|
|
args_path = os.path.join(basedir, 'args')
|
2016-04-23 00:55:13 +02:00
|
|
|
script_path = os.path.join(basedir, 'ansible_module_%(ansible_module)s.py')
|
|
|
|
|
2016-04-05 20:06:17 +02:00
|
|
|
if command == 'explode':
|
2016-04-07 17:07:52 +02:00
|
|
|
# transform the ZIPDATA into an exploded directory of code and then
|
|
|
|
# print the path to the code. This is an easy way for people to look
|
|
|
|
# at the code on the remote machine for debugging it in that
|
|
|
|
# environment
|
2016-04-05 20:06:17 +02:00
|
|
|
z = zipfile.ZipFile(zipped_mod)
|
|
|
|
for filename in z.namelist():
|
|
|
|
if filename.startswith('/'):
|
|
|
|
raise Exception('Something wrong with this module zip file: should not contain absolute paths')
|
2016-04-10 21:33:16 +02:00
|
|
|
|
2016-04-05 20:06:17 +02:00
|
|
|
dest_filename = os.path.join(basedir, filename)
|
|
|
|
if dest_filename.endswith(os.path.sep) and not os.path.exists(dest_filename):
|
|
|
|
os.makedirs(dest_filename)
|
|
|
|
else:
|
|
|
|
directory = os.path.dirname(dest_filename)
|
|
|
|
if not os.path.exists(directory):
|
|
|
|
os.makedirs(directory)
|
2016-10-03 17:38:24 +02:00
|
|
|
f = open(dest_filename, 'wb')
|
2016-04-05 20:06:17 +02:00
|
|
|
f.write(z.read(filename))
|
|
|
|
f.close()
|
2016-04-10 21:33:16 +02:00
|
|
|
|
2016-04-14 18:06:38 +02:00
|
|
|
# write the args file
|
2016-10-03 17:38:24 +02:00
|
|
|
f = open(args_path, 'wb')
|
2016-04-14 18:06:38 +02:00
|
|
|
f.write(json_params)
|
|
|
|
f.close()
|
|
|
|
|
2016-04-07 17:07:52 +02:00
|
|
|
print('Module expanded into:')
|
2016-04-23 20:14:47 +02:00
|
|
|
print('%%s' %% basedir)
|
2016-04-10 21:33:16 +02:00
|
|
|
exitcode = 0
|
|
|
|
|
2016-04-05 20:06:17 +02:00
|
|
|
elif command == 'execute':
|
2016-04-07 17:07:52 +02:00
|
|
|
# Execute the exploded code instead of executing the module from the
|
|
|
|
# embedded ZIPDATA. This allows people to easily run their modified
|
|
|
|
# code on the remote machine to see how changes will affect it.
|
2016-04-14 18:06:38 +02:00
|
|
|
# This differs slightly from default Ansible execution of Python modules
|
|
|
|
# as it passes the arguments to the module via a file instead of stdin.
|
|
|
|
|
2016-04-23 00:55:13 +02:00
|
|
|
# Set pythonpath to the debug dir
|
2016-04-14 18:06:38 +02:00
|
|
|
pythonpath = os.environ.get('PYTHONPATH')
|
|
|
|
if pythonpath:
|
|
|
|
os.environ['PYTHONPATH'] = ':'.join((basedir, pythonpath))
|
|
|
|
else:
|
|
|
|
os.environ['PYTHONPATH'] = basedir
|
|
|
|
|
2016-09-07 07:54:17 +02:00
|
|
|
p = subprocess.Popen([%(interpreter)s, script_path, args_path],
|
|
|
|
env=os.environ, shell=False, stdout=subprocess.PIPE,
|
|
|
|
stderr=subprocess.PIPE, stdin=subprocess.PIPE)
|
2016-04-14 18:06:38 +02:00
|
|
|
(stdout, stderr) = p.communicate()
|
|
|
|
|
|
|
|
if not isinstance(stderr, (bytes, unicode)):
|
|
|
|
stderr = stderr.read()
|
|
|
|
if not isinstance(stdout, (bytes, unicode)):
|
|
|
|
stdout = stdout.read()
|
|
|
|
if PY3:
|
|
|
|
sys.stderr.buffer.write(stderr)
|
|
|
|
sys.stdout.buffer.write(stdout)
|
|
|
|
else:
|
|
|
|
sys.stderr.write(stderr)
|
|
|
|
sys.stdout.write(stdout)
|
|
|
|
return p.returncode
|
2016-04-10 21:33:16 +02:00
|
|
|
|
2016-04-07 17:07:52 +02:00
|
|
|
elif command == 'excommunicate':
|
|
|
|
# This attempts to run the module in-process (by importing a main
|
|
|
|
# function and then calling it). It is not the way ansible generally
|
|
|
|
# invokes the module so it won't work in every case. It is here to
|
|
|
|
# aid certain debuggers which work better when the code doesn't change
|
|
|
|
# from one process to another but there may be problems that occur
|
|
|
|
# when using this that are only artifacts of how we're invoking here,
|
|
|
|
# not actual bugs (as they don't affect the real way that we invoke
|
|
|
|
# ansible modules)
|
2016-04-14 18:06:38 +02:00
|
|
|
|
2016-04-23 00:55:13 +02:00
|
|
|
# stub the args and python path
|
2016-04-14 18:06:38 +02:00
|
|
|
sys.argv = ['%(ansible_module)s', args_path]
|
2016-04-23 00:55:13 +02:00
|
|
|
sys.path.insert(0, basedir)
|
|
|
|
|
2016-04-12 18:12:58 +02:00
|
|
|
from ansible_module_%(ansible_module)s import main
|
2016-04-07 17:07:52 +02:00
|
|
|
main()
|
2016-04-10 21:33:16 +02:00
|
|
|
print('WARNING: Module returned to wrapper instead of exiting')
|
|
|
|
sys.exit(1)
|
|
|
|
else:
|
|
|
|
print('WARNING: Unknown debug command. Doing nothing.')
|
|
|
|
exitcode = 0
|
2016-04-05 20:06:17 +02:00
|
|
|
|
2016-04-10 21:33:16 +02:00
|
|
|
return exitcode
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
2016-04-20 19:34:57 +02:00
|
|
|
#
|
|
|
|
# See comments in the debug() method for information on debugging
|
|
|
|
#
|
|
|
|
|
2016-07-21 19:58:24 +02:00
|
|
|
ANSIBALLZ_PARAMS = %(params)s
|
2016-04-12 19:08:00 +02:00
|
|
|
if PY3:
|
2016-07-21 19:58:24 +02:00
|
|
|
ANSIBALLZ_PARAMS = ANSIBALLZ_PARAMS.encode('utf-8')
|
2016-04-05 20:06:17 +02:00
|
|
|
try:
|
2016-04-23 00:55:13 +02:00
|
|
|
# There's a race condition with the controller removing the
|
|
|
|
# remote_tmpdir and this module executing under async. So we cannot
|
|
|
|
# store this in remote_tmpdir (use system tempdir instead)
|
2016-04-11 06:14:53 +02:00
|
|
|
temp_path = tempfile.mkdtemp(prefix='ansible_')
|
2016-06-13 16:56:18 +02:00
|
|
|
|
2016-04-11 06:14:53 +02:00
|
|
|
zipped_mod = os.path.join(temp_path, 'ansible_modlib.zip')
|
|
|
|
modlib = open(zipped_mod, 'wb')
|
|
|
|
modlib.write(base64.b64decode(ZIPDATA))
|
|
|
|
modlib.close()
|
2016-06-13 16:56:18 +02:00
|
|
|
|
2016-04-10 21:33:16 +02:00
|
|
|
if len(sys.argv) == 2:
|
2016-07-21 19:58:24 +02:00
|
|
|
exitcode = debug(sys.argv[1], zipped_mod, ANSIBALLZ_PARAMS)
|
2016-04-10 21:33:16 +02:00
|
|
|
else:
|
2016-06-13 16:56:18 +02:00
|
|
|
z = zipfile.ZipFile(zipped_mod, mode='r')
|
2016-04-11 06:14:53 +02:00
|
|
|
module = os.path.join(temp_path, 'ansible_module_%(ansible_module)s.py')
|
|
|
|
f = open(module, 'wb')
|
|
|
|
f.write(z.read('ansible_module_%(ansible_module)s.py'))
|
|
|
|
f.close()
|
2016-06-13 16:56:18 +02:00
|
|
|
|
|
|
|
# When installed via setuptools (including python setup.py install),
|
|
|
|
# ansible may be installed with an easy-install.pth file. That file
|
|
|
|
# may load the system-wide install of ansible rather than the one in
|
|
|
|
# the module. sitecustomize is the only way to override that setting.
|
|
|
|
z = zipfile.ZipFile(zipped_mod, mode='a')
|
2016-07-06 21:22:42 +02:00
|
|
|
|
2016-06-13 16:56:18 +02:00
|
|
|
# py3: zipped_mod will be text, py2: it's bytes. Need bytes at the end
|
|
|
|
sitecustomize = u'import sys\\nsys.path.insert(0,"%%s")\\n' %% zipped_mod
|
|
|
|
sitecustomize = sitecustomize.encode('utf-8')
|
2016-11-29 10:21:13 +01:00
|
|
|
# Use a ZipInfo to work around zipfile limitation on hosts with
|
|
|
|
# clocks set to a pre-1980 year (for instance, Raspberry Pi)
|
|
|
|
zinfo = zipfile.ZipInfo()
|
|
|
|
zinfo.filename = 'sitecustomize.py'
|
|
|
|
zinfo.date_time = ( %(year)i, %(month)i, %(day)i, %(hour)i, %(minute)i, %(second)i)
|
|
|
|
z.writestr(zinfo, sitecustomize)
|
2016-06-13 16:56:18 +02:00
|
|
|
z.close()
|
|
|
|
|
2016-07-21 19:58:24 +02:00
|
|
|
exitcode = invoke_module(module, zipped_mod, ANSIBALLZ_PARAMS)
|
2016-04-10 21:33:16 +02:00
|
|
|
finally:
|
|
|
|
try:
|
2016-04-11 06:14:53 +02:00
|
|
|
shutil.rmtree(temp_path)
|
2017-06-06 16:59:50 +02:00
|
|
|
except (NameError, OSError):
|
2016-04-11 06:14:53 +02:00
|
|
|
# tempdir creation probably failed
|
2016-04-10 21:33:16 +02:00
|
|
|
pass
|
|
|
|
sys.exit(exitcode)
|
2016-04-05 20:06:17 +02:00
|
|
|
'''
|
|
|
|
|
2016-09-07 07:54:17 +02:00
|
|
|
|
2016-04-20 19:34:57 +02:00
|
|
|
def _strip_comments(source):
|
|
|
|
# Strip comments and blank lines from the wrapper
|
|
|
|
buf = []
|
|
|
|
for line in source.splitlines():
|
|
|
|
l = line.strip()
|
|
|
|
if not l or l.startswith(u'#'):
|
|
|
|
continue
|
|
|
|
buf.append(line)
|
|
|
|
return u'\n'.join(buf)
|
|
|
|
|
2016-09-07 07:54:17 +02:00
|
|
|
|
2016-04-20 19:34:57 +02:00
|
|
|
if C.DEFAULT_KEEP_REMOTE_FILES:
|
|
|
|
# Keep comments when KEEP_REMOTE_FILES is set. That way users will see
|
|
|
|
# the comments with some nice usage instructions
|
2016-07-21 19:58:24 +02:00
|
|
|
ACTIVE_ANSIBALLZ_TEMPLATE = ANSIBALLZ_TEMPLATE
|
2016-04-20 19:34:57 +02:00
|
|
|
else:
|
2016-07-21 19:58:24 +02:00
|
|
|
# ANSIBALLZ_TEMPLATE stripped of comments for smaller over the wire size
|
|
|
|
ACTIVE_ANSIBALLZ_TEMPLATE = _strip_comments(ANSIBALLZ_TEMPLATE)
|
2016-04-20 19:34:57 +02:00
|
|
|
|
2016-09-07 07:54:17 +02:00
|
|
|
|
2016-04-06 01:33:56 +02:00
|
|
|
class ModuleDepFinder(ast.NodeVisitor):
|
|
|
|
# Caveats:
|
|
|
|
# This code currently does not handle:
|
|
|
|
# * relative imports from py2.6+ from . import urls
|
|
|
|
IMPORT_PREFIX_SIZE = len('ansible.module_utils.')
|
|
|
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
2016-04-19 09:55:19 +02:00
|
|
|
"""
|
|
|
|
Walk the ast tree for the python module.
|
|
|
|
|
|
|
|
Save submodule[.submoduleN][.identifier] into self.submodules
|
|
|
|
|
|
|
|
self.submodules will end up with tuples like:
|
|
|
|
- ('basic',)
|
|
|
|
- ('urls', 'fetch_url')
|
|
|
|
- ('database', 'postgres')
|
|
|
|
- ('database', 'postgres', 'quote')
|
|
|
|
|
|
|
|
It's up to calling code to determine whether the final element of the
|
|
|
|
dotted strings are module names or something else (function, class, or
|
|
|
|
variable names)
|
|
|
|
"""
|
2016-04-06 01:33:56 +02:00
|
|
|
super(ModuleDepFinder, self).__init__(*args, **kwargs)
|
2016-04-19 09:55:19 +02:00
|
|
|
self.submodules = set()
|
2016-04-06 01:33:56 +02:00
|
|
|
|
|
|
|
def visit_Import(self, node):
|
2016-04-19 09:55:19 +02:00
|
|
|
# import ansible.module_utils.MODLIB[.MODLIBn] [as asname]
|
2016-04-06 01:33:56 +02:00
|
|
|
for alias in (a for a in node.names if a.name.startswith('ansible.module_utils.')):
|
2016-04-19 09:55:19 +02:00
|
|
|
py_mod = alias.name[self.IMPORT_PREFIX_SIZE:]
|
2016-06-05 01:19:57 +02:00
|
|
|
py_mod = tuple(py_mod.split('.'))
|
|
|
|
self.submodules.add(py_mod)
|
2016-04-06 01:33:56 +02:00
|
|
|
self.generic_visit(node)
|
|
|
|
|
|
|
|
def visit_ImportFrom(self, node):
|
2017-03-23 21:35:05 +01:00
|
|
|
# Specialcase: six is a special case because of its
|
|
|
|
# import logic
|
|
|
|
if node.names[0].name == '_six':
|
|
|
|
self.submodules.add(('_six',))
|
|
|
|
elif node.module.startswith('ansible.module_utils'):
|
2016-04-06 01:33:56 +02:00
|
|
|
where_from = node.module[self.IMPORT_PREFIX_SIZE:]
|
|
|
|
if where_from:
|
2016-04-19 09:55:19 +02:00
|
|
|
# from ansible.module_utils.MODULE1[.MODULEn] import IDENTIFIER [as asname]
|
|
|
|
# from ansible.module_utils.MODULE1[.MODULEn] import MODULEn+1 [as asname]
|
|
|
|
# from ansible.module_utils.MODULE1[.MODULEn] import MODULEn+1 [,IDENTIFIER] [as asname]
|
|
|
|
py_mod = tuple(where_from.split('.'))
|
|
|
|
for alias in node.names:
|
|
|
|
self.submodules.add(py_mod + (alias.name,))
|
2016-04-06 01:33:56 +02:00
|
|
|
else:
|
2016-04-19 09:55:19 +02:00
|
|
|
# from ansible.module_utils import MODLIB [,MODLIB2] [as asname]
|
2016-04-06 01:33:56 +02:00
|
|
|
for alias in node.names:
|
2016-04-19 09:55:19 +02:00
|
|
|
self.submodules.add((alias.name,))
|
2016-04-06 01:33:56 +02:00
|
|
|
self.generic_visit(node)
|
|
|
|
|
|
|
|
|
2015-02-10 21:35:34 +01:00
|
|
|
def _slurp(path):
|
|
|
|
if not os.path.exists(path):
|
2016-04-19 09:55:19 +02:00
|
|
|
raise AnsibleError("imported module support code does not exist at %s" % os.path.abspath(path))
|
2016-02-27 01:41:13 +01:00
|
|
|
fd = open(path, 'rb')
|
2015-02-10 21:35:34 +01:00
|
|
|
data = fd.read()
|
|
|
|
fd.close()
|
|
|
|
return data
|
|
|
|
|
2016-09-07 07:54:17 +02:00
|
|
|
|
2016-04-05 20:06:17 +02:00
|
|
|
def _get_shebang(interpreter, task_vars, args=tuple()):
|
|
|
|
"""
|
|
|
|
Note not stellar API:
|
|
|
|
Returns None instead of always returning a shebang line. Doing it this
|
|
|
|
way allows the caller to decide to use the shebang it read from the
|
|
|
|
file rather than trust that we reformatted what they already have
|
|
|
|
correctly.
|
|
|
|
"""
|
2016-04-28 05:11:26 +02:00
|
|
|
interpreter_config = u'ansible_%s_interpreter' % os.path.basename(interpreter).strip()
|
2016-04-05 20:06:17 +02:00
|
|
|
|
|
|
|
if interpreter_config not in task_vars:
|
2016-04-07 06:25:18 +02:00
|
|
|
return (None, interpreter)
|
2016-04-05 20:06:17 +02:00
|
|
|
|
2016-04-28 05:11:26 +02:00
|
|
|
interpreter = task_vars[interpreter_config].strip()
|
2016-04-05 20:06:17 +02:00
|
|
|
shebang = u'#!' + interpreter
|
|
|
|
|
|
|
|
if args:
|
|
|
|
shebang = shebang + u' ' + u' '.join(args)
|
|
|
|
|
2016-04-07 06:25:18 +02:00
|
|
|
return (shebang, interpreter)
|
2016-04-05 20:06:17 +02:00
|
|
|
|
2016-09-07 07:54:17 +02:00
|
|
|
|
2016-04-19 09:55:19 +02:00
|
|
|
def recursive_finder(name, data, py_module_names, py_module_cache, zf):
|
2016-04-06 08:48:37 +02:00
|
|
|
"""
|
|
|
|
Using ModuleDepFinder, make sure we have all of the module_utils files that
|
|
|
|
the module its module_utils files needs.
|
|
|
|
"""
|
2016-04-19 09:55:19 +02:00
|
|
|
# Parse the module and find the imports of ansible.module_utils
|
2016-04-06 01:33:56 +02:00
|
|
|
tree = ast.parse(data)
|
|
|
|
finder = ModuleDepFinder()
|
|
|
|
finder.visit(tree)
|
|
|
|
|
2016-04-19 09:55:19 +02:00
|
|
|
#
|
|
|
|
# Determine what imports that we've found are modules (vs class, function.
|
|
|
|
# variable names) for packages
|
|
|
|
#
|
|
|
|
|
|
|
|
normalized_modules = set()
|
|
|
|
# Loop through the imports that we've found to normalize them
|
|
|
|
# Exclude paths that match with paths we've already processed
|
|
|
|
# (Have to exclude them a second time once the paths are processed)
|
2017-01-27 20:53:02 +01:00
|
|
|
|
|
|
|
module_utils_paths = [p for p in module_utils_loader._get_paths(subdirs=False) if os.path.isdir(p)]
|
|
|
|
module_utils_paths.append(_MODULE_UTILS_PATH)
|
2016-04-19 09:55:19 +02:00
|
|
|
for py_module_name in finder.submodules.difference(py_module_names):
|
|
|
|
module_info = None
|
2016-06-05 01:19:57 +02:00
|
|
|
|
|
|
|
if py_module_name[0] == 'six':
|
|
|
|
# Special case the python six library because it messes up the
|
|
|
|
# import process in an incompatible way
|
2017-01-27 20:53:02 +01:00
|
|
|
module_info = imp.find_module('six', module_utils_paths)
|
2016-06-05 01:19:57 +02:00
|
|
|
py_module_name = ('six',)
|
|
|
|
idx = 0
|
2017-03-23 21:35:05 +01:00
|
|
|
elif py_module_name[0] == '_six':
|
|
|
|
# Special case the python six library because it messes up the
|
|
|
|
# import process in an incompatible way
|
|
|
|
module_info = imp.find_module('_six', [os.path.join(p, 'six') for p in module_utils_paths])
|
|
|
|
py_module_name = ('six', '_six')
|
|
|
|
idx = 0
|
2016-06-05 01:19:57 +02:00
|
|
|
else:
|
|
|
|
# Check whether either the last or the second to last identifier is
|
|
|
|
# a module name
|
|
|
|
for idx in (1, 2):
|
|
|
|
if len(py_module_name) < idx:
|
|
|
|
break
|
|
|
|
try:
|
|
|
|
module_info = imp.find_module(py_module_name[-idx],
|
2017-05-30 19:13:53 +02:00
|
|
|
[os.path.join(p, *py_module_name[:-idx]) for p in module_utils_paths])
|
2016-06-05 01:19:57 +02:00
|
|
|
break
|
|
|
|
except ImportError:
|
|
|
|
continue
|
2016-04-06 01:33:56 +02:00
|
|
|
|
2016-04-19 09:55:19 +02:00
|
|
|
# Could not find the module. Construct a helpful error message.
|
|
|
|
if module_info is None:
|
2017-03-23 21:35:05 +01:00
|
|
|
msg = ['Could not find imported module support code for %s. Looked for' % (name,)]
|
2016-04-19 09:55:19 +02:00
|
|
|
if idx == 2:
|
2017-02-03 02:48:53 +01:00
|
|
|
msg.append('either %s.py or %s.py' % (py_module_name[-1], py_module_name[-2]))
|
|
|
|
else:
|
|
|
|
msg.append(py_module_name[-1])
|
|
|
|
raise AnsibleError(' '.join(msg))
|
|
|
|
|
|
|
|
# Found a byte compiled file rather than source. We cannot send byte
|
|
|
|
# compiled over the wire as the python version might be different.
|
|
|
|
# imp.find_module seems to prefer to return source packages so we just
|
|
|
|
# error out if imp.find_module returns byte compiled files (This is
|
|
|
|
# fragile as it depends on undocumented imp.find_module behaviour)
|
|
|
|
if module_info[2][2] not in (imp.PY_SOURCE, imp.PKG_DIRECTORY):
|
|
|
|
msg = ['Could not find python source for imported module support code for %s. Looked for' % name]
|
|
|
|
if idx == 2:
|
|
|
|
msg.append('either %s.py or %s.py' % (py_module_name[-1], py_module_name[-2]))
|
2016-04-19 09:55:19 +02:00
|
|
|
else:
|
|
|
|
msg.append(py_module_name[-1])
|
|
|
|
raise AnsibleError(' '.join(msg))
|
|
|
|
|
|
|
|
if idx == 2:
|
|
|
|
# We've determined that the last portion was an identifier and
|
|
|
|
# thus, not part of the module name
|
|
|
|
py_module_name = py_module_name[:-1]
|
|
|
|
|
|
|
|
# If not already processed then we've got work to do
|
|
|
|
if py_module_name not in py_module_names:
|
|
|
|
# If not in the cache, then read the file into the cache
|
|
|
|
# We already have a file handle for the module open so it makes
|
|
|
|
# sense to read it now
|
|
|
|
if py_module_name not in py_module_cache:
|
|
|
|
if module_info[2][2] == imp.PKG_DIRECTORY:
|
|
|
|
# Read the __init__.py instead of the module file as this is
|
|
|
|
# a python package
|
2017-04-17 22:04:40 +02:00
|
|
|
normalized_name = py_module_name + ('__init__',)
|
|
|
|
normalized_path = os.path.join(os.path.join(module_info[1], '__init__.py'))
|
|
|
|
normalized_data = _slurp(normalized_path)
|
2016-04-19 09:55:19 +02:00
|
|
|
else:
|
2017-04-17 22:04:40 +02:00
|
|
|
normalized_name = py_module_name
|
|
|
|
normalized_path = module_info[1]
|
|
|
|
normalized_data = module_info[0].read()
|
2016-04-19 09:55:19 +02:00
|
|
|
module_info[0].close()
|
2017-04-17 22:04:40 +02:00
|
|
|
|
|
|
|
py_module_cache[normalized_name] = (normalized_data, normalized_path)
|
|
|
|
normalized_modules.add(normalized_name)
|
2016-04-19 09:55:19 +02:00
|
|
|
|
|
|
|
# Make sure that all the packages that this module is a part of
|
|
|
|
# are also added
|
|
|
|
for i in range(1, len(py_module_name)):
|
|
|
|
py_pkg_name = py_module_name[:-i] + ('__init__',)
|
|
|
|
if py_pkg_name not in py_module_names:
|
2017-01-27 20:53:02 +01:00
|
|
|
pkg_dir_info = imp.find_module(py_pkg_name[-1],
|
2017-05-30 19:13:53 +02:00
|
|
|
[os.path.join(p, *py_pkg_name[:-1]) for p in module_utils_paths])
|
2016-04-19 09:55:19 +02:00
|
|
|
normalized_modules.add(py_pkg_name)
|
2017-04-17 22:04:40 +02:00
|
|
|
py_module_cache[py_pkg_name] = (_slurp(pkg_dir_info[1]), pkg_dir_info[1])
|
2016-04-19 09:55:19 +02:00
|
|
|
|
|
|
|
#
|
|
|
|
# iterate through all of the ansible.module_utils* imports that we haven't
|
|
|
|
# already checked for new imports
|
|
|
|
#
|
|
|
|
|
|
|
|
# set of modules that we haven't added to the zipfile
|
|
|
|
unprocessed_py_module_names = normalized_modules.difference(py_module_names)
|
|
|
|
|
|
|
|
for py_module_name in unprocessed_py_module_names:
|
|
|
|
py_module_path = os.path.join(*py_module_name)
|
|
|
|
py_module_file_name = '%s.py' % py_module_path
|
|
|
|
|
|
|
|
zf.writestr(os.path.join("ansible/module_utils",
|
2017-05-30 19:13:53 +02:00
|
|
|
py_module_file_name), py_module_cache[py_module_name][0])
|
2017-08-15 22:38:59 +02:00
|
|
|
display.vvvvv("Using module_utils file %s" % py_module_cache[py_module_name][1])
|
2016-04-19 09:55:19 +02:00
|
|
|
|
|
|
|
# Add the names of the files we're scheduling to examine in the loop to
|
|
|
|
# py_module_names so that we don't re-examine them in the next pass
|
|
|
|
# through recursive_finder()
|
|
|
|
py_module_names.update(unprocessed_py_module_names)
|
|
|
|
|
|
|
|
for py_module_file in unprocessed_py_module_names:
|
2017-04-17 22:04:40 +02:00
|
|
|
recursive_finder(py_module_file, py_module_cache[py_module_file][0], py_module_names, py_module_cache, zf)
|
2016-04-19 09:55:19 +02:00
|
|
|
# Save memory; the file won't have to be read again for this ansible module.
|
|
|
|
del py_module_cache[py_module_file]
|
2016-04-06 01:33:56 +02:00
|
|
|
|
2016-09-07 07:54:17 +02:00
|
|
|
|
2017-01-27 20:53:02 +01:00
|
|
|
def _is_binary(b_module_data):
|
2016-05-11 22:14:01 +02:00
|
|
|
textchars = bytearray(set([7, 8, 9, 10, 12, 13, 27]) | set(range(0x20, 0x100)) - set([0x7f]))
|
2017-01-27 20:53:02 +01:00
|
|
|
start = b_module_data[:1024]
|
2016-05-11 22:14:01 +02:00
|
|
|
return bool(start.translate(None, textchars))
|
|
|
|
|
2016-09-07 07:54:17 +02:00
|
|
|
|
2017-06-27 07:58:09 +02:00
|
|
|
def _find_module_utils(module_name, b_module_data, module_path, module_args, task_vars, module_compression, async_timeout, become,
|
|
|
|
become_method, become_user, become_password, environment):
|
2015-02-10 21:35:34 +01:00
|
|
|
"""
|
|
|
|
Given the source of the module, convert it to a Jinja2 template to insert
|
|
|
|
module code and return whether it's a new or old style module.
|
|
|
|
"""
|
2016-04-05 20:06:17 +02:00
|
|
|
module_substyle = module_style = 'old'
|
|
|
|
|
|
|
|
# module_style is something important to calling code (ActionBase). It
|
|
|
|
# determines how arguments are formatted (json vs k=v) and whether
|
|
|
|
# a separate arguments file needs to be sent over the wire.
|
|
|
|
# module_substyle is extra information that's useful internally. It tells
|
|
|
|
# us what we have to look to substitute in the module files and whether
|
2016-07-21 19:58:24 +02:00
|
|
|
# we're using module replacer or ansiballz to format the module itself.
|
2017-01-27 20:53:02 +01:00
|
|
|
if _is_binary(b_module_data):
|
2016-05-12 19:46:07 +02:00
|
|
|
module_substyle = module_style = 'binary'
|
2017-01-27 20:53:02 +01:00
|
|
|
elif REPLACER in b_module_data:
|
2016-04-05 20:06:17 +02:00
|
|
|
# Do REPLACER before from ansible.module_utils because we need make sure
|
|
|
|
# we substitute "from ansible.module_utils basic" for REPLACER
|
|
|
|
module_style = 'new'
|
|
|
|
module_substyle = 'python'
|
2017-01-27 20:53:02 +01:00
|
|
|
b_module_data = b_module_data.replace(REPLACER, b'from ansible.module_utils.basic import *')
|
|
|
|
elif b'from ansible.module_utils.' in b_module_data:
|
2015-02-10 21:35:34 +01:00
|
|
|
module_style = 'new'
|
2016-04-05 20:06:17 +02:00
|
|
|
module_substyle = 'python'
|
2017-10-23 23:21:30 +02:00
|
|
|
elif REPLACER_WINDOWS in b_module_data:
|
|
|
|
module_style = 'new'
|
|
|
|
module_substyle = 'powershell'
|
|
|
|
b_module_data = b_module_data.replace(REPLACER_WINDOWS, b'#Requires -Module Ansible.ModuleUtils.Legacy')
|
2017-11-21 04:08:30 +01:00
|
|
|
elif re.search(b'#Requires -Module', b_module_data, re.IGNORECASE) \
|
|
|
|
or re.search(b'#Requires -Version', b_module_data, re.IGNORECASE)\
|
|
|
|
or re.search(b'#AnsibleRequires -OSVersion', b_module_data, re.IGNORECASE):
|
2015-07-24 18:39:54 +02:00
|
|
|
module_style = 'new'
|
2016-04-05 20:06:17 +02:00
|
|
|
module_substyle = 'powershell'
|
2017-01-27 20:53:02 +01:00
|
|
|
elif REPLACER_JSONARGS in b_module_data:
|
2015-09-07 05:01:26 +02:00
|
|
|
module_style = 'new'
|
2016-04-05 20:06:17 +02:00
|
|
|
module_substyle = 'jsonargs'
|
2017-01-27 20:53:02 +01:00
|
|
|
elif b'WANT_JSON' in b_module_data:
|
2016-04-05 20:06:17 +02:00
|
|
|
module_substyle = module_style = 'non_native_want_json'
|
|
|
|
|
|
|
|
shebang = None
|
2016-05-11 22:14:01 +02:00
|
|
|
# Neither old-style, non_native_want_json nor binary modules should be modified
|
2016-04-05 20:06:17 +02:00
|
|
|
# except for the shebang line (Done by modify_module)
|
2016-05-11 22:14:01 +02:00
|
|
|
if module_style in ('old', 'non_native_want_json', 'binary'):
|
2017-01-27 20:53:02 +01:00
|
|
|
return b_module_data, module_style, shebang
|
2016-04-05 20:06:17 +02:00
|
|
|
|
2016-02-27 01:41:13 +01:00
|
|
|
output = BytesIO()
|
2016-04-19 09:55:19 +02:00
|
|
|
py_module_names = set()
|
2016-04-05 20:06:17 +02:00
|
|
|
|
|
|
|
if module_substyle == 'python':
|
2016-05-13 05:30:05 +02:00
|
|
|
params = dict(ANSIBLE_MODULE_ARGS=module_args,)
|
2016-06-05 01:19:57 +02:00
|
|
|
python_repred_params = repr(json.dumps(params))
|
2016-04-05 20:06:17 +02:00
|
|
|
|
|
|
|
try:
|
|
|
|
compression_method = getattr(zipfile, module_compression)
|
|
|
|
except AttributeError:
|
|
|
|
display.warning(u'Bad module compression string specified: %s. Using ZIP_STORED (no compression)' % module_compression)
|
|
|
|
compression_method = zipfile.ZIP_STORED
|
2016-04-06 08:48:37 +02:00
|
|
|
|
2016-07-21 19:58:24 +02:00
|
|
|
lookup_path = os.path.join(C.DEFAULT_LOCAL_TMP, 'ansiballz_cache')
|
2016-04-06 08:48:37 +02:00
|
|
|
cached_module_filename = os.path.join(lookup_path, "%s-%s" % (module_name, module_compression))
|
|
|
|
|
|
|
|
zipdata = None
|
|
|
|
# Optimization -- don't lock if the module has already been cached
|
|
|
|
if os.path.exists(cached_module_filename):
|
2016-07-21 19:58:24 +02:00
|
|
|
display.debug('ANSIBALLZ: using cached module: %s' % cached_module_filename)
|
2016-04-06 08:48:37 +02:00
|
|
|
zipdata = open(cached_module_filename, 'rb').read()
|
|
|
|
else:
|
2016-08-26 21:55:56 +02:00
|
|
|
if module_name in action_write_locks.action_write_locks:
|
2016-07-21 19:58:24 +02:00
|
|
|
display.debug('ANSIBALLZ: Using lock for %s' % module_name)
|
2016-08-26 21:55:56 +02:00
|
|
|
lock = action_write_locks.action_write_locks[module_name]
|
2016-04-30 05:47:51 +02:00
|
|
|
else:
|
|
|
|
# If the action plugin directly invokes the module (instead of
|
|
|
|
# going through a strategy) then we don't have a cross-process
|
|
|
|
# Lock specifically for this module. Use the "unexpected
|
|
|
|
# module" lock instead
|
2016-07-21 19:58:24 +02:00
|
|
|
display.debug('ANSIBALLZ: Using generic lock for %s' % module_name)
|
2016-08-26 21:55:56 +02:00
|
|
|
lock = action_write_locks.action_write_locks[None]
|
2016-04-30 05:47:51 +02:00
|
|
|
|
2016-07-21 19:58:24 +02:00
|
|
|
display.debug('ANSIBALLZ: Acquiring lock')
|
2016-04-30 05:47:51 +02:00
|
|
|
with lock:
|
2016-07-21 19:58:24 +02:00
|
|
|
display.debug('ANSIBALLZ: Lock acquired: %s' % id(lock))
|
2016-04-06 08:48:37 +02:00
|
|
|
# Check that no other process has created this while we were
|
|
|
|
# waiting for the lock
|
|
|
|
if not os.path.exists(cached_module_filename):
|
2016-07-21 19:58:24 +02:00
|
|
|
display.debug('ANSIBALLZ: Creating module')
|
2016-04-06 08:48:37 +02:00
|
|
|
# Create the module zip data
|
|
|
|
zipoutput = BytesIO()
|
|
|
|
zf = zipfile.ZipFile(zipoutput, mode='w', compression=compression_method)
|
2016-09-07 07:54:17 +02:00
|
|
|
# Note: If we need to import from release.py first,
|
|
|
|
# remember to catch all exceptions: https://github.com/ansible/ansible/issues/16523
|
|
|
|
zf.writestr('ansible/__init__.py',
|
2017-05-30 19:13:53 +02:00
|
|
|
b'from pkgutil import extend_path\n__path__=extend_path(__path__,__name__)\n__version__="' +
|
|
|
|
to_bytes(__version__) + b'"\n__author__="' +
|
|
|
|
to_bytes(__author__) + b'"\n')
|
2016-04-29 02:36:09 +02:00
|
|
|
zf.writestr('ansible/module_utils/__init__.py', b'from pkgutil import extend_path\n__path__=extend_path(__path__,__name__)\n')
|
2016-04-06 08:48:37 +02:00
|
|
|
|
2017-01-27 20:53:02 +01:00
|
|
|
zf.writestr('ansible_module_%s.py' % module_name, b_module_data)
|
2016-04-06 08:48:37 +02:00
|
|
|
|
2017-05-30 19:13:53 +02:00
|
|
|
py_module_cache = {('__init__',): (b'', '[builtin]')}
|
2017-01-27 20:53:02 +01:00
|
|
|
recursive_finder(module_name, b_module_data, py_module_names, py_module_cache, zf)
|
2016-04-06 08:48:37 +02:00
|
|
|
zf.close()
|
|
|
|
zipdata = base64.b64encode(zipoutput.getvalue())
|
|
|
|
|
|
|
|
# Write the assembled module to a temp file (write to temp
|
|
|
|
# so that no one looking for the file reads a partially
|
|
|
|
# written file)
|
2016-04-18 20:51:45 +02:00
|
|
|
if not os.path.exists(lookup_path):
|
|
|
|
# Note -- if we have a global function to setup, that would
|
|
|
|
# be a better place to run this
|
2016-09-15 15:57:15 +02:00
|
|
|
os.makedirs(lookup_path)
|
2016-07-21 19:58:24 +02:00
|
|
|
display.debug('ANSIBALLZ: Writing module')
|
2016-06-05 01:19:57 +02:00
|
|
|
with open(cached_module_filename + '-part', 'wb') as f:
|
2016-04-06 08:48:37 +02:00
|
|
|
f.write(zipdata)
|
|
|
|
|
|
|
|
# Rename the file into its final position in the cache so
|
|
|
|
# future users of this module can read it off the
|
|
|
|
# filesystem instead of constructing from scratch.
|
2016-07-21 19:58:24 +02:00
|
|
|
display.debug('ANSIBALLZ: Renaming module')
|
2016-04-06 08:48:37 +02:00
|
|
|
os.rename(cached_module_filename + '-part', cached_module_filename)
|
2016-07-21 19:58:24 +02:00
|
|
|
display.debug('ANSIBALLZ: Done creating module')
|
2016-04-06 08:48:37 +02:00
|
|
|
|
|
|
|
if zipdata is None:
|
2016-07-21 19:58:24 +02:00
|
|
|
display.debug('ANSIBALLZ: Reading module after lock')
|
2016-04-06 08:48:37 +02:00
|
|
|
# Another process wrote the file while we were waiting for
|
|
|
|
# the write lock. Go ahead and read the data from disk
|
|
|
|
# instead of re-creating it.
|
2016-04-18 20:51:45 +02:00
|
|
|
try:
|
|
|
|
zipdata = open(cached_module_filename, 'rb').read()
|
|
|
|
except IOError:
|
2017-05-30 19:13:53 +02:00
|
|
|
raise AnsibleError('A different worker process failed to create module file. '
|
|
|
|
'Look at traceback for that process for debugging information.')
|
2016-09-07 07:54:17 +02:00
|
|
|
zipdata = to_text(zipdata, errors='surrogate_or_strict')
|
2016-04-20 19:34:57 +02:00
|
|
|
|
2016-04-07 06:25:18 +02:00
|
|
|
shebang, interpreter = _get_shebang(u'/usr/bin/python', task_vars)
|
|
|
|
if shebang is None:
|
|
|
|
shebang = u'#!/usr/bin/python'
|
2016-04-28 05:11:26 +02:00
|
|
|
|
2016-07-26 16:46:16 +02:00
|
|
|
# Enclose the parts of the interpreter in quotes because we're
|
|
|
|
# substituting it into the template as a Python string
|
|
|
|
interpreter_parts = interpreter.split(u' ')
|
|
|
|
interpreter = u"'{0}'".format(u"', '".join(interpreter_parts))
|
2016-04-28 05:11:26 +02:00
|
|
|
|
2017-05-30 19:13:53 +02:00
|
|
|
now = datetime.datetime.utcnow()
|
2016-07-21 19:58:24 +02:00
|
|
|
output.write(to_bytes(ACTIVE_ANSIBALLZ_TEMPLATE % dict(
|
2016-04-06 08:48:37 +02:00
|
|
|
zipdata=zipdata,
|
2016-04-05 20:06:17 +02:00
|
|
|
ansible_module=module_name,
|
2016-04-10 21:33:16 +02:00
|
|
|
params=python_repred_params,
|
2016-04-05 20:06:17 +02:00
|
|
|
shebang=shebang,
|
2016-04-07 06:25:18 +02:00
|
|
|
interpreter=interpreter,
|
2016-04-06 09:23:47 +02:00
|
|
|
coding=ENCODING_STRING,
|
2016-11-29 10:21:13 +01:00
|
|
|
year=now.year,
|
|
|
|
month=now.month,
|
|
|
|
day=now.day,
|
|
|
|
hour=now.hour,
|
|
|
|
minute=now.minute,
|
|
|
|
second=now.second,
|
2016-09-07 07:54:17 +02:00
|
|
|
)))
|
2017-01-27 20:53:02 +01:00
|
|
|
b_module_data = output.getvalue()
|
2016-04-05 20:06:17 +02:00
|
|
|
|
|
|
|
elif module_substyle == 'powershell':
|
2016-07-11 20:51:28 +02:00
|
|
|
# Powershell/winrm don't actually make use of shebang so we can
|
|
|
|
# safely set this here. If we let the fallback code handle this
|
|
|
|
# it can fail in the presence of the UTF8 BOM commonly added by
|
|
|
|
# Windows text editors
|
|
|
|
shebang = u'#!powershell'
|
|
|
|
|
2017-06-27 07:58:09 +02:00
|
|
|
exec_manifest = dict(
|
|
|
|
module_entry=to_text(base64.b64encode(b_module_data)),
|
|
|
|
powershell_modules=dict(),
|
|
|
|
module_args=module_args,
|
|
|
|
actions=['exec'],
|
|
|
|
environment=environment
|
|
|
|
)
|
|
|
|
|
|
|
|
exec_manifest['exec'] = to_text(base64.b64encode(to_bytes(leaf_exec)))
|
|
|
|
|
|
|
|
if async_timeout > 0:
|
|
|
|
exec_manifest["actions"].insert(0, 'async_watchdog')
|
|
|
|
exec_manifest["async_watchdog"] = to_text(base64.b64encode(to_bytes(async_watchdog)))
|
|
|
|
exec_manifest["actions"].insert(0, 'async_wrapper')
|
|
|
|
exec_manifest["async_wrapper"] = to_text(base64.b64encode(to_bytes(async_wrapper)))
|
|
|
|
exec_manifest["async_jid"] = str(random.randint(0, 999999999999))
|
|
|
|
exec_manifest["async_timeout_sec"] = async_timeout
|
|
|
|
|
|
|
|
if become and become_method == 'runas':
|
|
|
|
exec_manifest["actions"].insert(0, 'become')
|
|
|
|
exec_manifest["become_user"] = become_user
|
|
|
|
exec_manifest["become_password"] = become_password
|
|
|
|
exec_manifest["become"] = to_text(base64.b64encode(to_bytes(become_wrapper)))
|
|
|
|
|
|
|
|
lines = b_module_data.split(b'\n')
|
|
|
|
module_names = set()
|
2017-10-23 23:21:30 +02:00
|
|
|
become_required = False
|
|
|
|
min_os_version = None
|
|
|
|
min_ps_version = None
|
2017-06-27 07:58:09 +02:00
|
|
|
|
2017-07-19 05:44:01 +02:00
|
|
|
requires_module_list = re.compile(to_bytes(r'(?i)^#\s*requires\s+\-module(?:s?)\s*(Ansible\.ModuleUtils\..+)'))
|
2017-11-21 04:08:30 +01:00
|
|
|
requires_ps_version = re.compile(to_bytes(r'(?i)^#requires\s+\-version\s+([0-9]+(\.[0-9]+){0,3})$'))
|
|
|
|
requires_os_version = re.compile(to_bytes(r'(?i)^#ansiblerequires\s+\-osversion\s+([0-9]+(\.[0-9]+){0,3})$'))
|
|
|
|
requires_become = re.compile(to_bytes(r'(?i)^#ansiblerequires\s+\-become$'))
|
2017-06-27 07:58:09 +02:00
|
|
|
|
|
|
|
for line in lines:
|
2017-10-23 23:21:30 +02:00
|
|
|
module_util_line_match = requires_module_list.match(line)
|
|
|
|
if module_util_line_match:
|
|
|
|
module_names.add(module_util_line_match.group(1))
|
|
|
|
|
|
|
|
requires_ps_version_match = requires_ps_version.match(line)
|
|
|
|
if requires_ps_version_match:
|
|
|
|
min_ps_version = to_text(requires_ps_version_match.group(1))
|
|
|
|
# Powershell cannot cast a string of "1" to version, it must
|
|
|
|
# have at least the major.minor for it to work so we append 0
|
|
|
|
if requires_ps_version_match.group(2) is None:
|
|
|
|
min_ps_version = "%s.0" % min_ps_version
|
|
|
|
|
|
|
|
requires_os_version_match = requires_os_version.match(line)
|
|
|
|
if requires_os_version_match:
|
|
|
|
min_os_version = to_text(requires_os_version_match.group(1))
|
|
|
|
if requires_os_version_match.group(2) is None:
|
|
|
|
min_os_version = "%s.0" % min_os_version
|
|
|
|
|
|
|
|
requires_become_match = requires_become.match(line)
|
|
|
|
if requires_become_match:
|
|
|
|
become_required = True
|
2017-06-27 07:58:09 +02:00
|
|
|
|
2017-07-19 05:44:01 +02:00
|
|
|
for m in set(module_names):
|
2017-06-27 07:58:09 +02:00
|
|
|
m = to_text(m)
|
2017-07-19 05:44:01 +02:00
|
|
|
mu_path = ps_module_utils_loader.find_plugin(m, ".psm1")
|
|
|
|
if not mu_path:
|
|
|
|
raise AnsibleError('Could not find imported module support code for \'%s\'.' % m)
|
2017-06-27 07:58:09 +02:00
|
|
|
exec_manifest["powershell_modules"][m] = to_text(
|
|
|
|
base64.b64encode(
|
|
|
|
to_bytes(
|
2017-07-19 05:44:01 +02:00
|
|
|
_slurp(mu_path)
|
2017-06-27 07:58:09 +02:00
|
|
|
)
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2017-10-23 23:21:30 +02:00
|
|
|
exec_manifest['min_ps_version'] = min_ps_version
|
|
|
|
exec_manifest['min_os_version'] = min_os_version
|
|
|
|
if become_required and 'become' not in exec_manifest["actions"]:
|
|
|
|
exec_manifest["actions"].insert(0, 'become')
|
|
|
|
exec_manifest["become_user"] = "SYSTEM"
|
|
|
|
exec_manifest["become_password"] = None
|
|
|
|
exec_manifest["become"] = to_text(base64.b64encode(to_bytes(become_wrapper)))
|
|
|
|
|
2017-06-27 07:58:09 +02:00
|
|
|
# FUTURE: smuggle this back as a dict instead of serializing here; the connection plugin may need to modify it
|
|
|
|
module_json = json.dumps(exec_manifest)
|
|
|
|
|
|
|
|
b_module_data = exec_wrapper.replace(b"$json_raw = ''", b"$json_raw = @'\r\n%s\r\n'@" % to_bytes(module_json))
|
2015-02-10 21:35:34 +01:00
|
|
|
|
2016-04-05 20:06:17 +02:00
|
|
|
elif module_substyle == 'jsonargs':
|
2016-04-10 21:33:16 +02:00
|
|
|
module_args_json = to_bytes(json.dumps(module_args))
|
|
|
|
|
2016-04-05 20:06:17 +02:00
|
|
|
# these strings could be included in a third-party module but
|
|
|
|
# officially they were included in the 'basic' snippet for new-style
|
|
|
|
# python modules (which has been replaced with something else in
|
2016-07-21 19:58:24 +02:00
|
|
|
# ansiballz) If we remove them from jsonargs-style module replacer
|
2016-04-05 20:06:17 +02:00
|
|
|
# then we can remove them everywhere.
|
2016-04-10 21:33:16 +02:00
|
|
|
python_repred_args = to_bytes(repr(module_args_json))
|
2017-01-27 20:53:02 +01:00
|
|
|
b_module_data = b_module_data.replace(REPLACER_VERSION, to_bytes(repr(__version__)))
|
|
|
|
b_module_data = b_module_data.replace(REPLACER_COMPLEX, python_repred_args)
|
|
|
|
b_module_data = b_module_data.replace(REPLACER_SELINUX, to_bytes(','.join(C.DEFAULT_SELINUX_SPECIAL_FS)))
|
2016-04-05 20:06:17 +02:00
|
|
|
|
|
|
|
# The main event -- substitute the JSON args string into the module
|
2017-01-27 20:53:02 +01:00
|
|
|
b_module_data = b_module_data.replace(REPLACER_JSONARGS, module_args_json)
|
2016-04-05 20:06:17 +02:00
|
|
|
|
2016-09-07 07:54:17 +02:00
|
|
|
facility = b'syslog.' + to_bytes(task_vars.get('ansible_syslog_facility', C.DEFAULT_SYSLOG_FACILITY), errors='surrogate_or_strict')
|
2017-01-27 20:53:02 +01:00
|
|
|
b_module_data = b_module_data.replace(b'syslog.LOG_USER', facility)
|
2016-04-05 20:06:17 +02:00
|
|
|
|
2017-01-27 20:53:02 +01:00
|
|
|
return (b_module_data, module_style, shebang)
|
2015-02-10 21:35:34 +01:00
|
|
|
|
2014-11-14 23:14:08 +01:00
|
|
|
|
2017-09-12 09:11:13 +02:00
|
|
|
def modify_module(module_name, module_path, module_args, task_vars=None, module_compression='ZIP_STORED', async_timeout=0, become=False,
|
|
|
|
become_method=None, become_user=None, become_password=None, environment=None):
|
2014-11-14 23:14:08 +01:00
|
|
|
"""
|
2015-02-10 21:35:34 +01:00
|
|
|
Used to insert chunks of code into modules before transfer rather than
|
|
|
|
doing regular python imports. This allows for more efficient transfer in
|
|
|
|
a non-bootstrapping scenario by not moving extra files over the wire and
|
|
|
|
also takes care of embedding arguments in the transferred modules.
|
2014-11-14 23:14:08 +01:00
|
|
|
|
|
|
|
This version is done in such a way that local imports can still be
|
|
|
|
used in the module code, so IDEs don't have to be aware of what is going on.
|
|
|
|
|
|
|
|
Example:
|
|
|
|
|
2015-02-10 21:35:34 +01:00
|
|
|
from ansible.module_utils.basic import *
|
2014-11-14 23:14:08 +01:00
|
|
|
|
2015-02-10 21:35:34 +01:00
|
|
|
... will result in the insertion of basic.py into the module
|
|
|
|
from the module_utils/ directory in the source tree.
|
2014-11-14 23:14:08 +01:00
|
|
|
|
2017-02-17 09:09:56 +01:00
|
|
|
For powershell, this code effectively no-ops, as the exec wrapper requires access to a number of
|
|
|
|
properties not available here.
|
2014-11-14 23:14:08 +01:00
|
|
|
|
|
|
|
"""
|
2017-09-12 09:11:13 +02:00
|
|
|
task_vars = {} if task_vars is None else task_vars
|
|
|
|
environment = {} if environment is None else environment
|
|
|
|
|
2016-02-27 01:41:13 +01:00
|
|
|
with open(module_path, 'rb') as f:
|
2014-11-14 23:14:08 +01:00
|
|
|
|
2015-02-10 21:35:34 +01:00
|
|
|
# read in the module source
|
2017-01-27 20:53:02 +01:00
|
|
|
b_module_data = f.read()
|
2014-11-14 23:14:08 +01:00
|
|
|
|
2017-06-27 07:58:09 +02:00
|
|
|
(b_module_data, module_style, shebang) = _find_module_utils(module_name, b_module_data, module_path, module_args, task_vars, module_compression,
|
|
|
|
async_timeout=async_timeout, become=become, become_method=become_method,
|
|
|
|
become_user=become_user, become_password=become_password,
|
|
|
|
environment=environment)
|
2015-09-21 10:46:29 +02:00
|
|
|
|
2016-05-11 22:14:01 +02:00
|
|
|
if module_style == 'binary':
|
2017-01-27 20:53:02 +01:00
|
|
|
return (b_module_data, module_style, to_text(shebang, nonstring='passthru'))
|
2016-05-11 22:14:01 +02:00
|
|
|
elif shebang is None:
|
2017-01-27 20:53:02 +01:00
|
|
|
lines = b_module_data.split(b"\n", 1)
|
2016-04-05 20:06:17 +02:00
|
|
|
if lines[0].startswith(b"#!"):
|
|
|
|
shebang = lines[0].strip()
|
|
|
|
args = shlex.split(str(shebang[2:]))
|
|
|
|
interpreter = args[0]
|
|
|
|
interpreter = to_bytes(interpreter)
|
|
|
|
|
2016-09-07 07:54:17 +02:00
|
|
|
new_shebang = to_bytes(_get_shebang(interpreter, task_vars, args[1:])[0], errors='surrogate_or_strict', nonstring='passthru')
|
2016-04-05 20:06:17 +02:00
|
|
|
if new_shebang:
|
|
|
|
lines[0] = shebang = new_shebang
|
2014-11-14 23:14:08 +01:00
|
|
|
|
2016-04-05 20:06:17 +02:00
|
|
|
if os.path.basename(interpreter).startswith(b'python'):
|
2016-04-06 09:23:47 +02:00
|
|
|
lines.insert(1, to_bytes(ENCODING_STRING))
|
2016-04-05 20:06:17 +02:00
|
|
|
else:
|
|
|
|
# No shebang, assume a binary module?
|
|
|
|
pass
|
|
|
|
|
2017-01-27 20:53:02 +01:00
|
|
|
b_module_data = b"\n".join(lines)
|
2016-04-05 20:06:17 +02:00
|
|
|
else:
|
2016-09-07 07:54:17 +02:00
|
|
|
shebang = to_bytes(shebang, errors='surrogate_or_strict')
|
2014-11-14 23:14:08 +01:00
|
|
|
|
2017-01-27 20:53:02 +01:00
|
|
|
return (b_module_data, module_style, to_text(shebang, nonstring='passthru'))
|