2016-11-30 06:21:53 +01:00
|
|
|
"""Miscellaneous utility functions and classes."""
|
|
|
|
|
|
|
|
from __future__ import absolute_import, print_function
|
|
|
|
|
2017-10-26 09:21:46 +02:00
|
|
|
import atexit
|
2018-05-09 18:24:39 +02:00
|
|
|
import contextlib
|
2016-11-30 06:21:53 +01:00
|
|
|
import errno
|
2018-02-16 07:56:05 +01:00
|
|
|
import fcntl
|
2017-08-19 02:21:11 +02:00
|
|
|
import inspect
|
2017-10-26 09:21:46 +02:00
|
|
|
import json
|
2016-11-30 06:21:53 +01:00
|
|
|
import os
|
|
|
|
import pipes
|
2017-05-05 10:23:00 +02:00
|
|
|
import pkgutil
|
2017-10-26 09:21:46 +02:00
|
|
|
import random
|
|
|
|
import re
|
2016-11-30 06:21:53 +01:00
|
|
|
import shutil
|
2018-05-09 18:24:39 +02:00
|
|
|
import socket
|
2017-10-26 09:21:46 +02:00
|
|
|
import stat
|
|
|
|
import string
|
2016-11-30 06:21:53 +01:00
|
|
|
import subprocess
|
|
|
|
import sys
|
2017-10-26 09:21:46 +02:00
|
|
|
import tempfile
|
2016-11-30 06:21:53 +01:00
|
|
|
import time
|
|
|
|
|
2018-02-16 07:56:05 +01:00
|
|
|
from struct import unpack, pack
|
|
|
|
from termios import TIOCGWINSZ
|
|
|
|
|
2017-10-17 21:49:10 +02:00
|
|
|
try:
|
|
|
|
from abc import ABC
|
|
|
|
except ImportError:
|
|
|
|
from abc import ABCMeta
|
|
|
|
ABC = ABCMeta('ABC', (), {})
|
2017-08-19 02:21:11 +02:00
|
|
|
|
2018-09-21 20:38:22 +02:00
|
|
|
try:
|
|
|
|
# noinspection PyCompatibility
|
|
|
|
from ConfigParser import SafeConfigParser as ConfigParser
|
|
|
|
except ImportError:
|
|
|
|
# noinspection PyCompatibility
|
|
|
|
from configparser import ConfigParser
|
|
|
|
|
2017-10-26 09:21:46 +02:00
|
|
|
DOCKER_COMPLETION = {}
|
2018-11-14 02:19:25 +01:00
|
|
|
COVERAGE_PATHS = {} # type: dict[str, str]
|
2017-10-26 09:21:46 +02:00
|
|
|
|
2019-02-18 22:28:25 +01:00
|
|
|
try:
|
|
|
|
MAXFD = subprocess.MAXFD
|
|
|
|
except AttributeError:
|
|
|
|
MAXFD = -1
|
|
|
|
|
2017-10-26 09:21:46 +02:00
|
|
|
|
|
|
|
def get_docker_completion():
|
|
|
|
"""
|
|
|
|
:rtype: dict[str, str]
|
|
|
|
"""
|
|
|
|
if not DOCKER_COMPLETION:
|
2018-09-20 08:20:27 +02:00
|
|
|
images = read_lines_without_comments('test/runner/completion/docker.txt', remove_blank_lines=True)
|
2017-10-26 09:21:46 +02:00
|
|
|
|
2018-08-17 06:16:15 +02:00
|
|
|
DOCKER_COMPLETION.update(dict(kvp for kvp in [parse_docker_completion(i) for i in images] if kvp))
|
2017-10-26 09:21:46 +02:00
|
|
|
|
|
|
|
return DOCKER_COMPLETION
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2018-08-17 06:16:15 +02:00
|
|
|
def parse_docker_completion(value):
|
|
|
|
"""
|
|
|
|
:type value: str
|
|
|
|
:rtype: tuple[str, dict[str, str]]
|
|
|
|
"""
|
|
|
|
values = value.split()
|
|
|
|
|
|
|
|
if not values:
|
|
|
|
return None
|
|
|
|
|
|
|
|
name = values[0]
|
|
|
|
data = dict((kvp[0], kvp[1] if len(kvp) > 1 else '') for kvp in [item.split('=', 1) for item in values[1:]])
|
|
|
|
|
|
|
|
return name, data
|
|
|
|
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
def is_shippable():
|
|
|
|
"""
|
|
|
|
:rtype: bool
|
|
|
|
"""
|
|
|
|
return os.environ.get('SHIPPABLE') == 'true'
|
|
|
|
|
|
|
|
|
|
|
|
def remove_file(path):
|
|
|
|
"""
|
|
|
|
:type path: str
|
|
|
|
"""
|
|
|
|
if os.path.isfile(path):
|
|
|
|
os.remove(path)
|
|
|
|
|
|
|
|
|
2018-09-20 08:20:27 +02:00
|
|
|
def read_lines_without_comments(path, remove_blank_lines=False):
|
|
|
|
"""
|
|
|
|
:type path: str
|
|
|
|
:type remove_blank_lines: bool
|
|
|
|
:rtype: list[str]
|
|
|
|
"""
|
|
|
|
with open(path, 'r') as path_fd:
|
|
|
|
lines = path_fd.read().splitlines()
|
|
|
|
|
|
|
|
lines = [re.sub(r' *#.*$', '', line) for line in lines]
|
|
|
|
|
|
|
|
if remove_blank_lines:
|
|
|
|
lines = [line for line in lines if line]
|
|
|
|
|
|
|
|
return lines
|
|
|
|
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
def find_executable(executable, cwd=None, path=None, required=True):
|
|
|
|
"""
|
|
|
|
:type executable: str
|
|
|
|
:type cwd: str
|
|
|
|
:type path: str
|
|
|
|
:type required: bool | str
|
|
|
|
:rtype: str | None
|
|
|
|
"""
|
|
|
|
match = None
|
|
|
|
real_cwd = os.getcwd()
|
|
|
|
|
|
|
|
if not cwd:
|
|
|
|
cwd = real_cwd
|
|
|
|
|
|
|
|
if os.path.dirname(executable):
|
|
|
|
target = os.path.join(cwd, executable)
|
|
|
|
if os.path.exists(target) and os.access(target, os.F_OK | os.X_OK):
|
|
|
|
match = executable
|
|
|
|
else:
|
|
|
|
if path is None:
|
2018-09-21 20:38:22 +02:00
|
|
|
path = os.environ.get('PATH', os.path.defpath)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
if path:
|
2018-09-21 20:38:22 +02:00
|
|
|
path_dirs = path.split(os.path.pathsep)
|
2016-11-30 06:21:53 +01:00
|
|
|
seen_dirs = set()
|
|
|
|
|
|
|
|
for path_dir in path_dirs:
|
|
|
|
if path_dir in seen_dirs:
|
|
|
|
continue
|
|
|
|
|
|
|
|
seen_dirs.add(path_dir)
|
|
|
|
|
|
|
|
if os.path.abspath(path_dir) == real_cwd:
|
|
|
|
path_dir = cwd
|
|
|
|
|
|
|
|
candidate = os.path.join(path_dir, executable)
|
|
|
|
|
|
|
|
if os.path.exists(candidate) and os.access(candidate, os.F_OK | os.X_OK):
|
|
|
|
match = candidate
|
|
|
|
break
|
|
|
|
|
|
|
|
if not match and required:
|
|
|
|
message = 'Required program "%s" not found.' % executable
|
|
|
|
|
|
|
|
if required != 'warning':
|
|
|
|
raise ApplicationError(message)
|
|
|
|
|
|
|
|
display.warning(message)
|
|
|
|
|
|
|
|
return match
|
|
|
|
|
|
|
|
|
2018-03-14 19:35:59 +01:00
|
|
|
def find_python(version, path=None):
|
|
|
|
"""
|
|
|
|
:type version: str
|
|
|
|
:type path: str | None
|
|
|
|
:rtype: str
|
|
|
|
"""
|
|
|
|
version_info = tuple(int(n) for n in version.split('.'))
|
|
|
|
|
|
|
|
if not path and version_info == sys.version_info[:len(version_info)]:
|
|
|
|
python_bin = sys.executable
|
|
|
|
else:
|
|
|
|
python_bin = find_executable('python%s' % version, path=path)
|
|
|
|
|
|
|
|
return python_bin
|
|
|
|
|
|
|
|
|
|
|
|
def generate_pip_command(python):
|
|
|
|
"""
|
|
|
|
:type python: str
|
|
|
|
:rtype: list[str]
|
|
|
|
"""
|
|
|
|
return [python, '-m', 'pip.__main__']
|
|
|
|
|
|
|
|
|
2019-03-07 00:33:03 +01:00
|
|
|
def intercept_command(args, cmd, target_name, capture=False, env=None, data=None, cwd=None, python_version=None, path=None, coverage=None):
|
2017-10-26 09:21:46 +02:00
|
|
|
"""
|
|
|
|
:type args: TestConfig
|
|
|
|
:type cmd: collections.Iterable[str]
|
|
|
|
:type target_name: str
|
|
|
|
:type capture: bool
|
|
|
|
:type env: dict[str, str] | None
|
|
|
|
:type data: str | None
|
|
|
|
:type cwd: str | None
|
|
|
|
:type python_version: str | None
|
|
|
|
:type path: str | None
|
2019-03-07 00:33:03 +01:00
|
|
|
:type coverage: bool | None
|
2017-10-26 09:21:46 +02:00
|
|
|
:rtype: str | None, str | None
|
|
|
|
"""
|
|
|
|
if not env:
|
|
|
|
env = common_environment()
|
|
|
|
|
2019-03-07 00:33:03 +01:00
|
|
|
if coverage is None:
|
|
|
|
coverage = args.coverage
|
|
|
|
|
2017-10-26 09:21:46 +02:00
|
|
|
cmd = list(cmd)
|
|
|
|
version = python_version or args.python_version
|
2018-03-14 19:35:59 +01:00
|
|
|
interpreter = find_python(version, path)
|
2018-11-14 21:45:16 +01:00
|
|
|
inject_path = get_coverage_path(args, interpreter)
|
2018-11-14 02:19:25 +01:00
|
|
|
config_path = os.path.join(inject_path, 'injector.json')
|
2017-10-26 09:21:46 +02:00
|
|
|
coverage_file = os.path.abspath(os.path.join(inject_path, '..', 'output', '%s=%s=%s=%s=coverage' % (
|
|
|
|
args.command, target_name, args.coverage_label or 'local-%s' % version, 'python-%s' % version)))
|
|
|
|
|
2019-03-11 23:13:09 +01:00
|
|
|
if args.coverage_check:
|
|
|
|
coverage_file = ''
|
|
|
|
|
2018-09-21 20:38:22 +02:00
|
|
|
env['PATH'] = inject_path + os.path.pathsep + env['PATH']
|
2017-10-26 09:21:46 +02:00
|
|
|
env['ANSIBLE_TEST_PYTHON_VERSION'] = version
|
|
|
|
env['ANSIBLE_TEST_PYTHON_INTERPRETER'] = interpreter
|
|
|
|
|
2019-03-07 00:33:03 +01:00
|
|
|
if coverage:
|
AnsiballZ improvements
Now that we don't need to worry about python-2.4 and 2.5, we can make
some improvements to the way AnsiballZ handles modules.
* Change AnsiballZ wrapper to use import to invoke the module
We need the module to think of itself as a script because it could be
coded as:
main()
or as:
if __name__ == '__main__':
main()
Or even as:
if __name__ == '__main__':
random_function_name()
A script will invoke all of those. Prior to this change, we invoked
a second Python interpreter on the module so that it really was
a script. However, this means that we have to run python twice (once
for the AnsiballZ wrapper and once for the module). This change makes
the module think that it is a script (because __name__ in the module ==
'__main__') but it's actually being invoked by us importing the module
code.
There's three ways we've come up to do this.
* The most elegant is to use zipimporter and tell the import mechanism
that the module being loaded is __main__:
* https://github.com/abadger/ansible/blob/5959f11c9ddb7b6eaa9c3214560bd85e631d4055/lib/ansible/executor/module_common.py#L175
* zipimporter is nice because we do not have to extract the module from
the zip file and save it to the disk when we do that. The import
machinery does it all for us.
* The drawback is that modules do not have a __file__ which points
to a real file when they do this. Modules could be using __file__
to for a variety of reasons, most of those probably have
replacements (the most common one is to find a writable directory
for temporary files. AnsibleModule.tmpdir should be used instead)
We can monkeypatch __file__ in fom AnsibleModule initialization
but that's kind of gross. There's no way I can see to do this
from the wrapper.
* Next, there's imp.load_module():
* https://github.com/abadger/ansible/blob/340edf7489/lib/ansible/executor/module_common.py#L151
* imp has the nice property of allowing us to set __name__ to
__main__ without changing the name of the file itself
* We also don't have to do anything special to set __file__ for
backwards compatibility (although the reason for that is the
drawback):
* Its drawback is that it requires the file to exist on disk so we
have to explicitly extract it from the zipfile and save it to
a temporary file
* The last choice is to use exec to execute the module:
* https://github.com/abadger/ansible/blob/f47a4ccc76/lib/ansible/executor/module_common.py#L175
* The code we would have to maintain for this looks pretty clean.
In the wrapper we create a ModuleType, set __file__ on it, read
the module's contents in from the zip file and then exec it.
* Drawbacks: We still have to explicitly extract the file's contents
from the zip archive instead of letting python's import mechanism
handle it.
* Exec also has hidden performance issues and breaks certain
assumptions that modules could be making about their own code:
http://lucumr.pocoo.org/2011/2/1/exec-in-python/
Our plan is to use imp.load_module() for now, deprecate the use of
__file__ in modules, and switch to zipimport once the deprecation
period for __file__ is over (without monkeypatching a fake __file__ in
via AnsibleModule).
* Rename the name of the AnsiBallZ wrapped module
This makes it obvious that the wrapped module isn't the module file that
we distribute. It's part of trying to mitigate the fact that the module
is now named __main)).py in tracebacks.
* Shield all wrapper symbols inside of a function
With the new import code, all symbols in the wrapper become visible in
the module. To mitigate the chance of collisions, move most symbols
into a toplevel function. The only symbols left in the global namespace
are now _ANSIBALLZ_WRAPPER and _ansiballz_main.
revised porting guide entry
Integrate code coverage collection into AnsiballZ.
ci_coverage
ci_complete
2018-06-20 20:23:59 +02:00
|
|
|
env['_ANSIBLE_COVERAGE_CONFIG'] = os.path.join(inject_path, '.coveragerc')
|
|
|
|
env['_ANSIBLE_COVERAGE_OUTPUT'] = coverage_file
|
|
|
|
|
2017-10-26 09:21:46 +02:00
|
|
|
config = dict(
|
|
|
|
python_interpreter=interpreter,
|
2019-03-07 00:33:03 +01:00
|
|
|
coverage_file=coverage_file if coverage else None,
|
2017-10-26 09:21:46 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
if not args.explain:
|
|
|
|
with open(config_path, 'w') as config_fd:
|
|
|
|
json.dump(config, config_fd, indent=4, sort_keys=True)
|
|
|
|
|
|
|
|
return run_command(args, cmd, capture=capture, env=env, data=data, cwd=cwd)
|
|
|
|
|
|
|
|
|
2018-11-14 21:45:16 +01:00
|
|
|
def get_coverage_path(args, interpreter):
|
2017-10-26 09:21:46 +02:00
|
|
|
"""
|
|
|
|
:type args: TestConfig
|
2018-11-14 02:19:25 +01:00
|
|
|
:type interpreter: str
|
2017-10-26 09:21:46 +02:00
|
|
|
:rtype: str
|
|
|
|
"""
|
2018-11-14 21:45:16 +01:00
|
|
|
coverage_path = COVERAGE_PATHS.get(interpreter)
|
2017-10-26 09:21:46 +02:00
|
|
|
|
|
|
|
if coverage_path:
|
|
|
|
return os.path.join(coverage_path, 'coverage')
|
|
|
|
|
2018-11-14 21:45:16 +01:00
|
|
|
prefix = 'ansible-test-coverage-'
|
2017-10-26 09:21:46 +02:00
|
|
|
tmp_dir = '/tmp'
|
|
|
|
|
|
|
|
if args.explain:
|
|
|
|
return os.path.join(tmp_dir, '%stmp' % prefix, 'coverage')
|
|
|
|
|
|
|
|
src = os.path.abspath(os.path.join(os.getcwd(), 'test/runner/injector/'))
|
|
|
|
|
|
|
|
coverage_path = tempfile.mkdtemp('', prefix, dir=tmp_dir)
|
|
|
|
os.chmod(coverage_path, stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH)
|
|
|
|
|
|
|
|
shutil.copytree(src, os.path.join(coverage_path, 'coverage'))
|
|
|
|
shutil.copy('.coveragerc', os.path.join(coverage_path, 'coverage', '.coveragerc'))
|
|
|
|
|
|
|
|
for root, dir_names, file_names in os.walk(coverage_path):
|
|
|
|
for name in dir_names + file_names:
|
|
|
|
os.chmod(os.path.join(root, name), stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH)
|
|
|
|
|
|
|
|
for directory in 'output', 'logs':
|
|
|
|
os.mkdir(os.path.join(coverage_path, directory))
|
|
|
|
os.chmod(os.path.join(coverage_path, directory), stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)
|
|
|
|
|
2018-11-14 20:03:24 +01:00
|
|
|
os.symlink(interpreter, os.path.join(coverage_path, 'coverage', 'python'))
|
|
|
|
|
2018-11-14 02:19:25 +01:00
|
|
|
if not COVERAGE_PATHS:
|
|
|
|
atexit.register(cleanup_coverage_dirs)
|
|
|
|
|
2018-11-14 21:45:16 +01:00
|
|
|
COVERAGE_PATHS[interpreter] = coverage_path
|
2017-10-26 09:21:46 +02:00
|
|
|
|
|
|
|
return os.path.join(coverage_path, 'coverage')
|
|
|
|
|
|
|
|
|
2018-11-14 02:19:25 +01:00
|
|
|
def cleanup_coverage_dirs():
|
|
|
|
"""Clean up all coverage directories."""
|
2018-11-14 21:45:16 +01:00
|
|
|
for path in COVERAGE_PATHS.values():
|
|
|
|
display.info('Cleaning up coverage directory: %s' % path, verbosity=2)
|
2018-11-14 02:19:25 +01:00
|
|
|
cleanup_coverage_dir(path)
|
|
|
|
|
|
|
|
|
|
|
|
def cleanup_coverage_dir(coverage_path):
|
|
|
|
"""Copy over coverage data from temporary directory and purge temporary directory.
|
|
|
|
:type coverage_path: str
|
|
|
|
"""
|
2017-10-26 09:21:46 +02:00
|
|
|
output_dir = os.path.join(coverage_path, 'output')
|
|
|
|
|
|
|
|
for filename in os.listdir(output_dir):
|
|
|
|
src = os.path.join(output_dir, filename)
|
|
|
|
dst = os.path.join(os.getcwd(), 'test', 'results', 'coverage')
|
|
|
|
shutil.copy(src, dst)
|
|
|
|
|
|
|
|
logs_dir = os.path.join(coverage_path, 'logs')
|
|
|
|
|
|
|
|
for filename in os.listdir(logs_dir):
|
|
|
|
random_suffix = ''.join(random.choice(string.ascii_letters + string.digits) for _ in range(8))
|
|
|
|
new_name = '%s.%s.log' % (os.path.splitext(os.path.basename(filename))[0], random_suffix)
|
|
|
|
src = os.path.join(logs_dir, filename)
|
|
|
|
dst = os.path.join(os.getcwd(), 'test', 'results', 'logs', new_name)
|
|
|
|
shutil.copy(src, dst)
|
|
|
|
|
|
|
|
shutil.rmtree(coverage_path)
|
|
|
|
|
|
|
|
|
2017-01-26 22:07:10 +01:00
|
|
|
def run_command(args, cmd, capture=False, env=None, data=None, cwd=None, always=False, stdin=None, stdout=None,
|
2017-07-28 03:15:56 +02:00
|
|
|
cmd_verbosity=1, str_errors='strict'):
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
|
|
|
:type args: CommonConfig
|
|
|
|
:type cmd: collections.Iterable[str]
|
|
|
|
:type capture: bool
|
|
|
|
:type env: dict[str, str] | None
|
|
|
|
:type data: str | None
|
|
|
|
:type cwd: str | None
|
|
|
|
:type always: bool
|
|
|
|
:type stdin: file | None
|
|
|
|
:type stdout: file | None
|
2017-01-26 22:07:10 +01:00
|
|
|
:type cmd_verbosity: int
|
2017-07-29 01:17:56 +02:00
|
|
|
:type str_errors: str
|
2016-11-30 06:21:53 +01:00
|
|
|
:rtype: str | None, str | None
|
|
|
|
"""
|
|
|
|
explain = args.explain and not always
|
2017-01-26 22:07:10 +01:00
|
|
|
return raw_command(cmd, capture=capture, env=env, data=data, cwd=cwd, explain=explain, stdin=stdin, stdout=stdout,
|
2017-07-28 03:15:56 +02:00
|
|
|
cmd_verbosity=cmd_verbosity, str_errors=str_errors)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
|
2017-01-26 22:07:10 +01:00
|
|
|
def raw_command(cmd, capture=False, env=None, data=None, cwd=None, explain=False, stdin=None, stdout=None,
|
2017-07-28 03:15:56 +02:00
|
|
|
cmd_verbosity=1, str_errors='strict'):
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
|
|
|
:type cmd: collections.Iterable[str]
|
|
|
|
:type capture: bool
|
|
|
|
:type env: dict[str, str] | None
|
|
|
|
:type data: str | None
|
|
|
|
:type cwd: str | None
|
|
|
|
:type explain: bool
|
|
|
|
:type stdin: file | None
|
|
|
|
:type stdout: file | None
|
2017-01-26 22:07:10 +01:00
|
|
|
:type cmd_verbosity: int
|
2017-07-29 01:17:56 +02:00
|
|
|
:type str_errors: str
|
2016-11-30 06:21:53 +01:00
|
|
|
:rtype: str | None, str | None
|
|
|
|
"""
|
|
|
|
if not cwd:
|
|
|
|
cwd = os.getcwd()
|
|
|
|
|
|
|
|
if not env:
|
|
|
|
env = common_environment()
|
|
|
|
|
|
|
|
cmd = list(cmd)
|
|
|
|
|
|
|
|
escaped_cmd = ' '.join(pipes.quote(c) for c in cmd)
|
|
|
|
|
2018-02-16 07:56:05 +01:00
|
|
|
display.info('Run command: %s' % escaped_cmd, verbosity=cmd_verbosity, truncate=True)
|
2016-11-30 06:21:53 +01:00
|
|
|
display.info('Working directory: %s' % cwd, verbosity=2)
|
|
|
|
|
|
|
|
program = find_executable(cmd[0], cwd=cwd, path=env['PATH'], required='warning')
|
|
|
|
|
|
|
|
if program:
|
|
|
|
display.info('Program found: %s' % program, verbosity=2)
|
|
|
|
|
|
|
|
for key in sorted(env.keys()):
|
|
|
|
display.info('%s=%s' % (key, env[key]), verbosity=2)
|
|
|
|
|
|
|
|
if explain:
|
|
|
|
return None, None
|
|
|
|
|
|
|
|
communicate = False
|
|
|
|
|
|
|
|
if stdin is not None:
|
|
|
|
data = None
|
|
|
|
communicate = True
|
|
|
|
elif data is not None:
|
|
|
|
stdin = subprocess.PIPE
|
|
|
|
communicate = True
|
|
|
|
|
|
|
|
if stdout:
|
|
|
|
communicate = True
|
|
|
|
|
|
|
|
if capture:
|
|
|
|
stdout = stdout or subprocess.PIPE
|
|
|
|
stderr = subprocess.PIPE
|
|
|
|
communicate = True
|
|
|
|
else:
|
|
|
|
stderr = None
|
|
|
|
|
|
|
|
start = time.time()
|
2019-03-05 20:58:13 +01:00
|
|
|
process = None
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
try:
|
2019-03-05 20:58:13 +01:00
|
|
|
try:
|
|
|
|
process = subprocess.Popen(cmd, env=env, stdin=stdin, stdout=stdout, stderr=stderr, cwd=cwd)
|
|
|
|
except OSError as ex:
|
|
|
|
if ex.errno == errno.ENOENT:
|
|
|
|
raise ApplicationError('Required program "%s" not found.' % cmd[0])
|
|
|
|
raise
|
|
|
|
|
|
|
|
if communicate:
|
|
|
|
encoding = 'utf-8'
|
|
|
|
data_bytes = data.encode(encoding, 'surrogateescape') if data else None
|
|
|
|
stdout_bytes, stderr_bytes = process.communicate(data_bytes)
|
|
|
|
stdout_text = stdout_bytes.decode(encoding, str_errors) if stdout_bytes else u''
|
|
|
|
stderr_text = stderr_bytes.decode(encoding, str_errors) if stderr_bytes else u''
|
|
|
|
else:
|
|
|
|
process.wait()
|
|
|
|
stdout_text, stderr_text = None, None
|
|
|
|
finally:
|
|
|
|
if process and process.returncode is None:
|
|
|
|
process.kill()
|
|
|
|
display.info('') # the process we're interrupting may have completed a partial line of output
|
|
|
|
display.notice('Killed command to avoid an orphaned child process during handling of an unexpected exception.')
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
status = process.returncode
|
|
|
|
runtime = time.time() - start
|
|
|
|
|
|
|
|
display.info('Command exited with status %s after %s seconds.' % (status, runtime), verbosity=4)
|
|
|
|
|
|
|
|
if status == 0:
|
2017-03-15 20:17:42 +01:00
|
|
|
return stdout_text, stderr_text
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2017-03-15 20:17:42 +01:00
|
|
|
raise SubprocessError(cmd, status, stdout_text, stderr_text, runtime)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
|
|
|
|
def common_environment():
|
|
|
|
"""Common environment used for executing all programs."""
|
|
|
|
env = dict(
|
|
|
|
LC_ALL='en_US.UTF-8',
|
2018-09-21 20:38:22 +02:00
|
|
|
PATH=os.environ.get('PATH', os.path.defpath),
|
2016-11-30 06:21:53 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
required = (
|
|
|
|
'HOME',
|
|
|
|
)
|
|
|
|
|
|
|
|
optional = (
|
|
|
|
'HTTPTESTER',
|
2017-08-04 19:22:54 +02:00
|
|
|
'LD_LIBRARY_PATH',
|
|
|
|
'SSH_AUTH_SOCK',
|
2017-10-10 00:41:06 +02:00
|
|
|
# MacOS High Sierra Compatibility
|
|
|
|
# http://sealiesoftware.com/blog/archive/2017/6/5/Objective-C_and_fork_in_macOS_1013.html
|
|
|
|
'OBJC_DISABLE_INITIALIZE_FORK_SAFETY',
|
2018-03-07 01:28:06 +01:00
|
|
|
'ANSIBLE_KEEP_REMOTE_FILES',
|
2016-11-30 06:21:53 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
env.update(pass_vars(required=required, optional=optional))
|
|
|
|
|
|
|
|
return env
|
|
|
|
|
|
|
|
|
2017-05-18 19:37:53 +02:00
|
|
|
def pass_vars(required, optional):
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
|
|
|
:type required: collections.Iterable[str]
|
|
|
|
:type optional: collections.Iterable[str]
|
|
|
|
:rtype: dict[str, str]
|
|
|
|
"""
|
|
|
|
env = {}
|
|
|
|
|
|
|
|
for name in required:
|
|
|
|
if name not in os.environ:
|
|
|
|
raise MissingEnvironmentVariable(name)
|
|
|
|
env[name] = os.environ[name]
|
|
|
|
|
|
|
|
for name in optional:
|
|
|
|
if name not in os.environ:
|
|
|
|
continue
|
|
|
|
env[name] = os.environ[name]
|
|
|
|
|
|
|
|
return env
|
|
|
|
|
|
|
|
|
|
|
|
def deepest_path(path_a, path_b):
|
|
|
|
"""Return the deepest of two paths, or None if the paths are unrelated.
|
|
|
|
:type path_a: str
|
|
|
|
:type path_b: str
|
2017-02-14 03:49:36 +01:00
|
|
|
:rtype: str | None
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
|
|
|
if path_a == '.':
|
|
|
|
path_a = ''
|
|
|
|
|
|
|
|
if path_b == '.':
|
|
|
|
path_b = ''
|
|
|
|
|
|
|
|
if path_a.startswith(path_b):
|
|
|
|
return path_a or '.'
|
|
|
|
|
|
|
|
if path_b.startswith(path_a):
|
|
|
|
return path_b or '.'
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
def remove_tree(path):
|
|
|
|
"""
|
|
|
|
:type path: str
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
shutil.rmtree(path)
|
|
|
|
except OSError as ex:
|
|
|
|
if ex.errno != errno.ENOENT:
|
|
|
|
raise
|
|
|
|
|
|
|
|
|
|
|
|
def make_dirs(path):
|
|
|
|
"""
|
|
|
|
:type path: str
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
os.makedirs(path)
|
|
|
|
except OSError as ex:
|
|
|
|
if ex.errno != errno.EEXIST:
|
|
|
|
raise
|
|
|
|
|
|
|
|
|
2017-03-15 20:17:42 +01:00
|
|
|
def is_binary_file(path):
|
|
|
|
"""
|
|
|
|
:type path: str
|
|
|
|
:rtype: bool
|
|
|
|
"""
|
2018-03-21 07:32:45 +01:00
|
|
|
assume_text = set([
|
|
|
|
'.cfg',
|
|
|
|
'.conf',
|
|
|
|
'.crt',
|
2018-10-03 00:55:53 +02:00
|
|
|
'.cs',
|
2018-03-21 07:32:45 +01:00
|
|
|
'.css',
|
|
|
|
'.html',
|
|
|
|
'.ini',
|
|
|
|
'.j2',
|
|
|
|
'.js',
|
|
|
|
'.json',
|
|
|
|
'.md',
|
|
|
|
'.pem',
|
|
|
|
'.ps1',
|
|
|
|
'.psm1',
|
|
|
|
'.py',
|
|
|
|
'.rst',
|
|
|
|
'.sh',
|
|
|
|
'.txt',
|
|
|
|
'.xml',
|
|
|
|
'.yaml',
|
|
|
|
'.yml',
|
|
|
|
])
|
|
|
|
|
|
|
|
assume_binary = set([
|
|
|
|
'.bin',
|
|
|
|
'.eot',
|
|
|
|
'.gz',
|
|
|
|
'.ico',
|
|
|
|
'.iso',
|
|
|
|
'.jpg',
|
|
|
|
'.otf',
|
|
|
|
'.p12',
|
|
|
|
'.png',
|
|
|
|
'.pyc',
|
|
|
|
'.rpm',
|
|
|
|
'.ttf',
|
|
|
|
'.woff',
|
|
|
|
'.woff2',
|
|
|
|
'.zip',
|
|
|
|
])
|
|
|
|
|
|
|
|
ext = os.path.splitext(path)[1]
|
|
|
|
|
|
|
|
if ext in assume_text:
|
|
|
|
return False
|
|
|
|
|
|
|
|
if ext in assume_binary:
|
|
|
|
return True
|
|
|
|
|
2017-03-15 20:17:42 +01:00
|
|
|
with open(path, 'rb') as path_fd:
|
|
|
|
return b'\0' in path_fd.read(1024)
|
|
|
|
|
|
|
|
|
2018-03-07 23:02:31 +01:00
|
|
|
def generate_password():
|
|
|
|
"""Generate a random password.
|
|
|
|
:rtype: str
|
|
|
|
"""
|
|
|
|
chars = [
|
|
|
|
string.ascii_letters,
|
|
|
|
string.digits,
|
|
|
|
string.ascii_letters,
|
|
|
|
string.digits,
|
|
|
|
'-',
|
|
|
|
] * 4
|
|
|
|
|
|
|
|
password = ''.join([random.choice(char) for char in chars[:-1]])
|
|
|
|
|
|
|
|
display.sensitive.add(password)
|
|
|
|
|
|
|
|
return password
|
|
|
|
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
class Display(object):
|
|
|
|
"""Manages color console output."""
|
|
|
|
clear = '\033[0m'
|
|
|
|
red = '\033[31m'
|
|
|
|
green = '\033[32m'
|
|
|
|
yellow = '\033[33m'
|
|
|
|
blue = '\033[34m'
|
|
|
|
purple = '\033[35m'
|
|
|
|
cyan = '\033[36m'
|
|
|
|
|
|
|
|
verbosity_colors = {
|
|
|
|
0: None,
|
|
|
|
1: green,
|
|
|
|
2: blue,
|
|
|
|
3: cyan,
|
|
|
|
}
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
self.verbosity = 0
|
|
|
|
self.color = True
|
|
|
|
self.warnings = []
|
2017-03-02 21:36:46 +01:00
|
|
|
self.warnings_unique = set()
|
|
|
|
self.info_stderr = False
|
2018-02-16 07:56:05 +01:00
|
|
|
self.rows = 0
|
|
|
|
self.columns = 0
|
|
|
|
self.truncate = 0
|
2018-02-19 22:32:07 +01:00
|
|
|
self.redact = False
|
|
|
|
self.sensitive = set()
|
2018-02-16 07:56:05 +01:00
|
|
|
|
|
|
|
if os.isatty(0):
|
|
|
|
self.rows, self.columns = unpack('HHHH', fcntl.ioctl(0, TIOCGWINSZ, pack('HHHH', 0, 0, 0, 0)))[:2]
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
def __warning(self, message):
|
|
|
|
"""
|
|
|
|
:type message: str
|
|
|
|
"""
|
|
|
|
self.print_message('WARNING: %s' % message, color=self.purple, fd=sys.stderr)
|
|
|
|
|
|
|
|
def review_warnings(self):
|
|
|
|
"""Review all warnings which previously occurred."""
|
|
|
|
if not self.warnings:
|
|
|
|
return
|
|
|
|
|
|
|
|
self.__warning('Reviewing previous %d warning(s):' % len(self.warnings))
|
|
|
|
|
|
|
|
for warning in self.warnings:
|
|
|
|
self.__warning(warning)
|
|
|
|
|
2017-03-02 21:36:46 +01:00
|
|
|
def warning(self, message, unique=False):
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
|
|
|
:type message: str
|
2017-03-02 21:36:46 +01:00
|
|
|
:type unique: bool
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
2017-03-02 21:36:46 +01:00
|
|
|
if unique:
|
|
|
|
if message in self.warnings_unique:
|
|
|
|
return
|
|
|
|
|
|
|
|
self.warnings_unique.add(message)
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
self.__warning(message)
|
|
|
|
self.warnings.append(message)
|
|
|
|
|
|
|
|
def notice(self, message):
|
|
|
|
"""
|
|
|
|
:type message: str
|
|
|
|
"""
|
|
|
|
self.print_message('NOTICE: %s' % message, color=self.purple, fd=sys.stderr)
|
|
|
|
|
|
|
|
def error(self, message):
|
|
|
|
"""
|
|
|
|
:type message: str
|
|
|
|
"""
|
|
|
|
self.print_message('ERROR: %s' % message, color=self.red, fd=sys.stderr)
|
|
|
|
|
2018-02-16 07:56:05 +01:00
|
|
|
def info(self, message, verbosity=0, truncate=False):
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
|
|
|
:type message: str
|
|
|
|
:type verbosity: int
|
2018-02-16 07:56:05 +01:00
|
|
|
:type truncate: bool
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
|
|
|
if self.verbosity >= verbosity:
|
|
|
|
color = self.verbosity_colors.get(verbosity, self.yellow)
|
2018-02-16 07:56:05 +01:00
|
|
|
self.print_message(message, color=color, fd=sys.stderr if self.info_stderr else sys.stdout, truncate=truncate)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2018-02-16 07:56:05 +01:00
|
|
|
def print_message(self, message, color=None, fd=sys.stdout, truncate=False): # pylint: disable=locally-disabled, invalid-name
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
|
|
|
:type message: str
|
|
|
|
:type color: str | None
|
|
|
|
:type fd: file
|
2018-02-16 07:56:05 +01:00
|
|
|
:type truncate: bool
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
2018-02-19 22:32:07 +01:00
|
|
|
if self.redact and self.sensitive:
|
|
|
|
for item in self.sensitive:
|
|
|
|
message = message.replace(item, '*' * len(item))
|
|
|
|
|
2018-02-16 07:56:05 +01:00
|
|
|
if truncate:
|
|
|
|
if len(message) > self.truncate > 5:
|
|
|
|
message = message[:self.truncate - 5] + ' ...'
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
if color and self.color:
|
|
|
|
# convert color resets in message to desired color
|
|
|
|
message = message.replace(self.clear, color)
|
|
|
|
message = '%s%s%s' % (color, message, self.clear)
|
|
|
|
|
2017-09-09 17:59:09 +02:00
|
|
|
if sys.version_info[0] == 2 and isinstance(message, type(u'')):
|
|
|
|
message = message.encode('utf-8')
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
print(message, file=fd)
|
|
|
|
fd.flush()
|
|
|
|
|
|
|
|
|
|
|
|
class ApplicationError(Exception):
|
|
|
|
"""General application error."""
|
2017-05-03 17:19:44 +02:00
|
|
|
pass
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
|
|
|
|
class ApplicationWarning(Exception):
|
|
|
|
"""General application warning which interrupts normal program flow."""
|
2017-05-03 17:19:44 +02:00
|
|
|
pass
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
|
|
|
|
class SubprocessError(ApplicationError):
|
|
|
|
"""Error resulting from failed subprocess execution."""
|
|
|
|
def __init__(self, cmd, status=0, stdout=None, stderr=None, runtime=None):
|
|
|
|
"""
|
|
|
|
:type cmd: list[str]
|
|
|
|
:type status: int
|
|
|
|
:type stdout: str | None
|
|
|
|
:type stderr: str | None
|
|
|
|
:type runtime: float | None
|
|
|
|
"""
|
|
|
|
message = 'Command "%s" returned exit status %s.\n' % (' '.join(pipes.quote(c) for c in cmd), status)
|
|
|
|
|
|
|
|
if stderr:
|
|
|
|
message += '>>> Standard Error\n'
|
|
|
|
message += '%s%s\n' % (stderr.strip(), Display.clear)
|
|
|
|
|
|
|
|
if stdout:
|
|
|
|
message += '>>> Standard Output\n'
|
|
|
|
message += '%s%s\n' % (stdout.strip(), Display.clear)
|
|
|
|
|
|
|
|
message = message.strip()
|
|
|
|
|
|
|
|
super(SubprocessError, self).__init__(message)
|
|
|
|
|
|
|
|
self.cmd = cmd
|
|
|
|
self.status = status
|
|
|
|
self.stdout = stdout
|
|
|
|
self.stderr = stderr
|
|
|
|
self.runtime = runtime
|
|
|
|
|
|
|
|
|
|
|
|
class MissingEnvironmentVariable(ApplicationError):
|
|
|
|
"""Error caused by missing environment variable."""
|
|
|
|
def __init__(self, name):
|
|
|
|
"""
|
|
|
|
:type name: str
|
|
|
|
"""
|
|
|
|
super(MissingEnvironmentVariable, self).__init__('Missing environment variable: %s' % name)
|
|
|
|
|
|
|
|
self.name = name
|
|
|
|
|
|
|
|
|
|
|
|
class CommonConfig(object):
|
|
|
|
"""Configuration common to all commands."""
|
2018-12-21 07:08:57 +01:00
|
|
|
def __init__(self, args, command):
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
|
|
|
:type args: any
|
2018-12-21 07:08:57 +01:00
|
|
|
:type command: str
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
2018-12-21 07:08:57 +01:00
|
|
|
self.command = command
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
self.color = args.color # type: bool
|
|
|
|
self.explain = args.explain # type: bool
|
|
|
|
self.verbosity = args.verbosity # type: int
|
2017-02-01 20:15:08 +01:00
|
|
|
self.debug = args.debug # type: bool
|
2018-02-16 07:56:05 +01:00
|
|
|
self.truncate = args.truncate # type: int
|
2018-02-19 22:32:07 +01:00
|
|
|
self.redact = args.redact # type: bool
|
|
|
|
|
|
|
|
if is_shippable():
|
|
|
|
self.redact = True
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2019-01-10 23:43:21 +01:00
|
|
|
self.cache = {}
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2017-01-19 01:31:34 +01:00
|
|
|
def docker_qualify_image(name):
|
|
|
|
"""
|
|
|
|
:type name: str
|
|
|
|
:rtype: str
|
|
|
|
"""
|
2018-08-17 06:16:15 +02:00
|
|
|
config = get_docker_completion().get(name, {})
|
2017-10-26 09:21:46 +02:00
|
|
|
|
2018-08-17 06:16:15 +02:00
|
|
|
return config.get('name', name)
|
2017-01-19 01:31:34 +01:00
|
|
|
|
|
|
|
|
2019-01-10 23:43:21 +01:00
|
|
|
@contextlib.contextmanager
|
|
|
|
def named_temporary_file(args, prefix, suffix, directory, content):
|
|
|
|
"""
|
|
|
|
:param args: CommonConfig
|
|
|
|
:param prefix: str
|
|
|
|
:param suffix: str
|
|
|
|
:param directory: str
|
|
|
|
:param content: str | bytes | unicode
|
|
|
|
:rtype: str
|
|
|
|
"""
|
|
|
|
if not isinstance(content, bytes):
|
|
|
|
content = content.encode('utf-8')
|
|
|
|
|
|
|
|
if args.explain:
|
|
|
|
yield os.path.join(directory, '%stemp%s' % (prefix, suffix))
|
|
|
|
else:
|
|
|
|
with tempfile.NamedTemporaryFile(prefix=prefix, suffix=suffix, dir=directory) as tempfile_fd:
|
|
|
|
tempfile_fd.write(content)
|
|
|
|
tempfile_fd.flush()
|
|
|
|
|
|
|
|
yield tempfile_fd.name
|
|
|
|
|
|
|
|
|
2018-09-21 07:20:08 +02:00
|
|
|
def parse_to_list_of_dict(pattern, value):
|
2017-04-13 19:28:52 +02:00
|
|
|
"""
|
|
|
|
:type pattern: str
|
|
|
|
:type value: str
|
2018-09-21 07:20:08 +02:00
|
|
|
:return: list[dict[str, str]]
|
2017-04-13 19:28:52 +02:00
|
|
|
"""
|
2018-09-21 07:20:08 +02:00
|
|
|
matched = []
|
|
|
|
unmatched = []
|
2017-04-13 19:28:52 +02:00
|
|
|
|
2018-09-21 07:20:08 +02:00
|
|
|
for line in value.splitlines():
|
|
|
|
match = re.search(pattern, line)
|
2017-04-13 19:28:52 +02:00
|
|
|
|
2018-09-21 07:20:08 +02:00
|
|
|
if match:
|
|
|
|
matched.append(match.groupdict())
|
|
|
|
else:
|
|
|
|
unmatched.append(line)
|
|
|
|
|
|
|
|
if unmatched:
|
|
|
|
raise Exception('Pattern "%s" did not match values:\n%s' % (pattern, '\n'.join(unmatched)))
|
|
|
|
|
|
|
|
return matched
|
2017-04-13 19:28:52 +02:00
|
|
|
|
|
|
|
|
2018-05-09 18:24:39 +02:00
|
|
|
def get_available_port():
|
|
|
|
"""
|
|
|
|
:rtype: int
|
|
|
|
"""
|
|
|
|
# this relies on the kernel not reusing previously assigned ports immediately
|
|
|
|
socket_fd = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
|
|
|
|
|
|
|
with contextlib.closing(socket_fd):
|
|
|
|
socket_fd.bind(('', 0))
|
|
|
|
return socket_fd.getsockname()[1]
|
|
|
|
|
|
|
|
|
2017-05-05 10:23:00 +02:00
|
|
|
def get_subclasses(class_type):
|
|
|
|
"""
|
|
|
|
:type class_type: type
|
|
|
|
:rtype: set[str]
|
|
|
|
"""
|
|
|
|
subclasses = set()
|
|
|
|
queue = [class_type]
|
|
|
|
|
|
|
|
while queue:
|
|
|
|
parent = queue.pop()
|
|
|
|
|
|
|
|
for child in parent.__subclasses__():
|
|
|
|
if child not in subclasses:
|
2017-08-19 02:21:11 +02:00
|
|
|
if not inspect.isabstract(child):
|
|
|
|
subclasses.add(child)
|
2017-05-05 10:23:00 +02:00
|
|
|
queue.append(child)
|
|
|
|
|
|
|
|
return subclasses
|
|
|
|
|
|
|
|
|
|
|
|
def import_plugins(directory):
|
|
|
|
"""
|
|
|
|
:type directory: str
|
|
|
|
"""
|
|
|
|
path = os.path.join(os.path.dirname(__file__), directory)
|
|
|
|
prefix = 'lib.%s.' % directory
|
|
|
|
|
|
|
|
for (_, name, _) in pkgutil.iter_modules([path], prefix=prefix):
|
|
|
|
__import__(name)
|
|
|
|
|
|
|
|
|
|
|
|
def load_plugins(base_type, database):
|
|
|
|
"""
|
|
|
|
:type base_type: type
|
|
|
|
:type database: dict[str, type]
|
|
|
|
"""
|
|
|
|
plugins = dict((sc.__module__.split('.')[2], sc) for sc in get_subclasses(base_type)) # type: dict [str, type]
|
|
|
|
|
|
|
|
for plugin in plugins:
|
|
|
|
database[plugin] = plugins[plugin]
|
|
|
|
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
display = Display() # pylint: disable=locally-disabled, invalid-name
|