1
0
Fork 0
mirror of https://github.com/ansible-collections/community.general.git synced 2024-09-14 20:13:21 +02:00

misc cleanup in the runner module, splitting some things out into utils, breaking up functions into smaller functions.

This commit is contained in:
Michael DeHaan 2012-03-21 23:39:09 -04:00
parent fef73393f0
commit 6a7aac38c5
3 changed files with 259 additions and 337 deletions

View file

@ -295,7 +295,7 @@ class PlayBook(object):
remote_user=remote_user,
setup_cache=SETUP_CACHE,
basedir=self.basedir,
conditionally_execute_if=only_if
conditional=only_if
)
if async_seconds == 0:

View file

@ -33,9 +33,6 @@ import ansible.connection
from ansible import utils
from ansible import errors
# should be True except in debug
CLEANUP_FILES = True
################################################
def _executor_hook(job_queue, result_queue):
@ -62,43 +59,23 @@ class Runner(object):
_external_variable_script = None
def __init__(self,
host_list=C.DEFAULT_HOST_LIST,
module_path=C.DEFAULT_MODULE_PATH,
module_name=C.DEFAULT_MODULE_NAME,
module_args=C.DEFAULT_MODULE_ARGS,
forks=C.DEFAULT_FORKS,
timeout=C.DEFAULT_TIMEOUT,
pattern=C.DEFAULT_PATTERN,
remote_user=C.DEFAULT_REMOTE_USER,
remote_pass=C.DEFAULT_REMOTE_PASS,
background=0,
basedir=None,
setup_cache=None,
transport='paramiko',
conditionally_execute_if='True',
verbose=False):
'''
Constructor
host_list -- file on disk listing hosts to manage, or an array of hostnames
pattern ------ a fnmatch pattern selecting some of the hosts in host_list
module_path -- location of ansible library on disk
module_name -- which module to run
module_args -- arguments to pass to module
forks -------- how parallel should we be? 1 is extra debuggable.
remote_user -- who to login as (default root)
remote_pass -- provide only if you don't want to use keys or ssh-agent
background --- if non 0, run async, failing after X seconds, -1 == infinite
setup_cache -- used only by playbook (complex explanation pending)
'''
def __init__(self, host_list=C.DEFAULT_HOST_LIST, module_path=C.DEFAULT_MODULE_PATH,
module_name=C.DEFAULT_MODULE_NAME, module_args=C.DEFAULT_MODULE_ARGS,
forks=C.DEFAULT_FORKS, timeout=C.DEFAULT_TIMEOUT, pattern=C.DEFAULT_PATTERN,
remote_user=C.DEFAULT_REMOTE_USER, remote_pass=C.DEFAULT_REMOTE_PASS,
background=0, basedir=None, setup_cache=None, transport='paramiko',
conditional='True', verbose=False):
if setup_cache is None:
setup_cache = {}
self.setup_cache = setup_cache
self.conditionally_execute_if = conditionally_execute_if
if basedir is None:
basedir = os.getcwd()
self.generated_jid = str(random.randint(0, 999999999999))
self.connector = ansible.connection.Connection(self, transport)
self.host_list, self.groups = self.parse_hosts(host_list)
self.setup_cache = setup_cache
self.conditional = conditional
self.module_path = module_path
self.module_name = module_name
self.forks = int(forks)
@ -109,44 +86,20 @@ class Runner(object):
self.remote_user = remote_user
self.remote_pass = remote_pass
self.background = background
if basedir is None:
basedir = os.getcwd()
self.basedir = basedir
# hosts in each group name in the inventory file
self._tmp_paths = {}
random.seed()
self.generated_jid = str(random.randint(0, 999999999999))
self.connector = ansible.connection.Connection(self, transport)
# *****************************************************
@classmethod
def parse_hosts(cls, host_list):
'''
parse the host inventory file, returns (hosts, groups)
[groupname]
host1
host2
'''
def parse_hosts_from_regular_file(cls, host_list, results, groups):
''' parse a textual host file '''
if type(host_list) == list:
return (host_list, {})
host_list = os.path.expanduser(host_list)
if not os.path.exists(host_list):
raise errors.AnsibleFileNotFound("inventory file not found: %s" % host_list)
results = []
groups = { 'ungrouped' : [] }
if not os.access(host_list, os.X_OK):
# it's a regular file
lines = file(host_list).read().split("\n")
group_name = 'ungrouped'
results = []
for item in lines:
item = item.lstrip().rstrip()
if item.startswith("#"):
@ -160,11 +113,15 @@ class Runner(object):
# looks like a regular host
groups[group_name].append(item)
results.append(item)
else:
# *****************************************************
@classmethod
def parse_hosts_from_script(cls, host_list, results, groups):
''' evaluate a script that returns list of hosts by groups '''
host_list = os.path.abspath(host_list)
cls._external_variable_script = host_list
# it's a script -- expect a return of a JSON hash with group names keyed
# to lists of hosts
cmd = subprocess.Popen([host_list], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False)
out, err = cmd.communicate()
try:
@ -176,6 +133,25 @@ class Runner(object):
if host not in results:
results.append(host)
# *****************************************************
@classmethod
def parse_hosts(cls, host_list):
''' parse the host inventory file, returns (hosts, groups) '''
if type(host_list) == list:
return (host_list, {})
host_list = os.path.expanduser(host_list)
if not os.path.exists(host_list):
raise errors.AnsibleFileNotFound("inventory file not found: %s" % host_list)
results = []
groups = dict(ungrouped=[])
if not os.access(host_list, os.X_OK):
Runner.parse_hosts_from_regular_file(host_list, results, groups)
else:
Runner.parse_hosts_from_script(host_list, results, groups)
return (results, groups)
# *****************************************************
@ -192,13 +168,9 @@ class Runner(object):
pattern = pattern.replace(";",":")
subpatterns = pattern.split(":")
for subpattern in subpatterns:
# the pattern could be a real glob
if subpattern == 'all':
if subpattern == 'all' or fnmatch.fnmatch(host_name, subpattern):
return True
if fnmatch.fnmatch(host_name, subpattern):
return True
# or it could be a literal group name instead
if subpattern in self.groups:
elif subpattern in self.groups:
if host_name in self.groups[subpattern]:
return True
return False
@ -206,11 +178,7 @@ class Runner(object):
# *****************************************************
def _connect(self, host):
'''
obtains a connection to the host.
on success, returns (True, connection)
on failure, returns (False, traceback str)
'''
''' connects to a host, returns (is_successful, connection_object OR traceback_string) '''
try:
return [ True, self.connector.connect(host) ]
@ -223,14 +191,11 @@ class Runner(object):
''' helper function to handle JSON parsing of results '''
try:
# try to parse the JSON response
result = utils.parse_json(result)
if executed is not None:
result['invocation'] = executed
return [ host, True, result ]
except Exception, e:
# it failed to parse, say so, but return the string anyway so
# it can be debugged
return [ host, False, "%s/%s/%s" % (str(e), result, executed) ]
# *****************************************************
@ -243,7 +208,6 @@ class Runner(object):
for filename in files:
if not filename.startswith('/tmp/'):
raise Exception("not going to happen")
if CLEANUP_FILES:
self._exec_command(conn, "rm -rf %s" % filename)
# *****************************************************
@ -256,10 +220,7 @@ class Runner(object):
# *****************************************************
def _transfer_module(self, conn, tmp, module):
'''
transfers a module file to the remote side to execute it,
but does not execute it yet
'''
''' transfers a module file to the remote side to execute it, but does not execute it yet '''
outpath = self._copy_module(conn, tmp, module)
self._exec_command(conn, "chmod +x %s" % outpath)
@ -268,58 +229,25 @@ class Runner(object):
# *****************************************************
def _transfer_argsfile(self, conn, tmp, args_str):
'''
transfer arguments as a single file to be fed to the module.
this is to avoid various shell things being eaten by SSH
'''
''' transfer arguments as a single file to be fed to the module. '''
args_fd, args_file = tempfile.mkstemp()
args_fo = os.fdopen(args_fd, 'w')
args_fo.write(args_str)
args_fo.flush()
args_fo.close()
args_remote = os.path.join(tmp, 'arguments')
self._transfer_file(conn, args_file, args_remote)
if CLEANUP_FILES:
os.unlink(args_file)
return args_remote
# *****************************************************
def _execute_module(self, conn, tmp, remote_module_path, module_args,
async_jid=None, async_module=None, async_limit=None):
'''
runs a module that has already been transferred, but first
modifies the command using setup_cache variables (see playbook)
'''
def _add_variables_from_script(self, conn, inject):
''' support per system variabes from external variable scripts, see web docs '''
args = module_args
if type(args) == list:
if remote_module_path.endswith('setup'):
# briefly converting arguments to strings before file transfer
# causes some translation errors. This is a workaround only
# needed for the setup module
args = " ".join([ "\"%s\"" % str(x) for x in module_args ])
else:
args = " ".join([ str(x) for x in module_args ])
# by default the args to substitute in the action line are those from the setup cache
inject_vars = self.setup_cache.get(conn.host,{})
# see if we really need to run this or not...
# doubly templated so we can store a conditional expression in a variable!
conditional = utils.template(
utils.template(self.conditionally_execute_if, inject_vars),
inject_vars
)
if not eval(conditional):
return [ utils.smjson(dict(skipped=True)), 'skipped' ]
# if the host file was an external script, execute it with the hostname
# as a first parameter to get the variables to use for the host
inject2 = {}
if Runner._external_variable_script is not None:
host = conn.host
cmd = subprocess.Popen([Runner._external_variable_script, host],
stdout=subprocess.PIPE,
@ -336,25 +264,62 @@ class Runner(object):
host
))
# store injected variables in the templates
inject_vars.update(inject2)
inject.update(inject2)
if self.module_name == 'setup':
for (k,v) in inject_vars.iteritems():
# *****************************************************
def _add_setup_vars(self, inject, args):
''' setup module variables need special handling '''
for (k,v) in inject.iteritems():
if not k.startswith('facter_') and not k.startswith('ohai_'):
if str(v).find(" ") != -1:
v = "\"%s\"" % v
args += " %s=%s" % (k, str(v).replace(" ","~~~"))
# the metadata location for the setup module is transparently managed
# since it's an 'internals' module, kind of a black box. See playbook
# other modules are not allowed to have this kind of handling
if remote_module_path.endswith("/setup") and args.find("metadata=") == -1:
# *****************************************************
def _add_setup_metadata(self, args):
''' automatically determine where to store variables for the setup module '''
if args.find("metadata=") == -1:
if self.remote_user == 'root':
args = "%s metadata=/etc/ansible/setup" % args
else:
args = "%s metadata=~/.ansible/setup" % args
args = utils.template(args, inject_vars)
# *****************************************************
def _coerce_args_to_string(self, args, remote_module_path):
''' final arguments must always be made a string '''
if type(args) == list:
if remote_module_path.endswith('setup'):
# quote long strings so setup module gets them unscathed
args = " ".join([ "\"%s\"" % str(x) for x in args ])
else:
args = " ".join([ str(x) for x in args ])
return args
# *****************************************************
def _execute_module(self, conn, tmp, remote_module_path, args,
async_jid=None, async_module=None, async_limit=None):
''' runs a module that has already been transferred '''
args = self._coerce_args_to_string(args, remote_module_path)
inject = self.setup_cache.get(conn.host,{})
conditional = utils.double_template(self.conditional, inject)
if not eval(conditional):
return [ utils.smjson(dict(skipped=True)), 'skipped' ]
if Runner._external_variable_script is not None:
self._add_variables_from_script(conn, inject)
if self.module_name == 'setup':
self._add_setup_vars(inject, args)
self._add_setup_metadata(args)
args = utils.template(args, inject)
module_name_tail = remote_module_path.split("/")[-1]
client_executed_str = "%s %s" % (module_name_tail, args.strip())
@ -362,34 +327,14 @@ class Runner(object):
if async_jid is None:
cmd = "%s %s" % (remote_module_path, argsfile)
else:
args = [str(x) for x in [remote_module_path, async_jid, async_limit, async_module, argsfile]]
cmd = " ".join(args)
result = self._exec_command(conn, cmd)
return [ result, client_executed_str ]
cmd = " ".join([str(x) for x in [remote_module_path, async_jid, async_limit, async_module, argsfile]])
return [ self._exec_command(conn, cmd), client_executed_str ]
# *****************************************************
def _execute_normal_module(self, conn, host, tmp, module_name):
'''
transfer & execute a module that is not 'copy' or 'template'
because those require extra work.
'''
def _add_result_to_setup_cache(self, conn, result):
''' allows discovered variables to be used in templates and action statements '''
# hack to make the 'shell' module keyword really be executed
# by the command module
module_args = self.module_args
if module_name == 'shell':
module_name = 'command'
module_args.append("#USE_SHELL")
module = self._transfer_module(conn, tmp, module_name)
(result, executed) = self._execute_module(conn, tmp, module, module_args)
# when running the setup module, which pushes vars to the host and ALSO
# returns them (+factoids), store the variables that were returned such that commands
# run AFTER setup use these variables for templating when executed
# from playbooks
if module_name == 'setup':
host = conn.host
try:
var_result = utils.parse_json(result)
@ -405,15 +350,28 @@ class Runner(object):
if not k in self.setup_cache[host]:
self.setup_cache[host][k] = v
# *****************************************************
def _execute_normal_module(self, conn, host, tmp, module_name):
''' transfer & execute a module that is not 'copy' or 'template' '''
# shell and command are the same module
if module_name == 'shell':
module_name = 'command'
self.module_args.append("#USE_SHELL")
module = self._transfer_module(conn, tmp, module_name)
(result, executed) = self._execute_module(conn, tmp, module, self.module_args)
if module_name == 'setup':
self._add_result_to_setup_cache(conn, result)
return self._return_from_module(conn, host, result, executed)
# *****************************************************
def _execute_async_module(self, conn, host, tmp, module_name):
'''
transfer the given module name, plus the async module
and then run the async module wrapping the other module
'''
''' transfer the given module name, plus the async module, then run it '''
# hack to make the 'shell' module keyword really be executed
# by the command module
@ -434,30 +392,16 @@ class Runner(object):
# *****************************************************
def _parse_kv(self, args):
# FIXME: move to utils
''' helper function to convert a string of key/value items to a dict '''
options = {}
for x in args:
if x.find("=") != -1:
k, v = x.split("=")
options[k]=v
return options
# *****************************************************
def _execute_copy(self, conn, host, tmp):
''' handler for file transfer operations '''
# load up options
options = self._parse_kv(self.module_args)
options = utils.parse_kv(self.module_args)
source = options['src']
dest = options['dest']
# transfer the file to a remote tmp location
tmp_path = tmp
tmp_src = tmp_path + source.split('/')[-1]
tmp_src = tmp + source.split('/')[-1]
self._transfer_file(conn, utils.path_dwim(self.basedir, source), tmp_src)
# install the copy module
@ -467,26 +411,29 @@ class Runner(object):
# run the copy module
args = [ "src=%s" % tmp_src, "dest=%s" % dest ]
(result1, executed) = self._execute_module(conn, tmp, module, args)
results1 = self._return_from_module(conn, host, result1, executed)
(host, ok, data) = results1
(host, ok, data) = self._return_from_module(conn, host, result1, executed)
# magically chain into the file module
if ok:
# unless failed, run the file module to adjust file aspects
return self._chain_file_module(conn, tmp, data, options, executed)
else:
return results1
# *****************************************************
def _chain_file_module(self, conn, tmp, data, options, executed):
''' handles changing file attribs after copy/template operations '''
old_changed = data.get('changed', False)
module = self._transfer_module(conn, tmp, 'file')
args = [ "%s=%s" % (k,v) for (k,v) in options.items() ]
(result2, executed2) = self._execute_module(conn, tmp, module, args)
results2 = self._return_from_module(conn, host, result2, executed)
results2 = self._return_from_module(conn, conn.host, result2, executed)
(host, ok, data2) = results2
new_changed = data2.get('changed', False)
data.update(data2)
if old_changed or new_changed:
data['changed'] = True
return (host, ok, data)
else:
# copy failed, return orig result without going through 'file' module
return results1
# *****************************************************
@ -494,7 +441,7 @@ class Runner(object):
''' handler for template operations '''
# load up options
options = self._parse_kv(self.module_args)
options = utils.parse_kv(self.module_args)
source = options['src']
dest = options['dest']
metadata = options.get('metadata', None)
@ -506,9 +453,7 @@ class Runner(object):
metadata = '~/.ansible/setup'
# first copy the source template over
tpath = tmp
tempname = os.path.split(source)[-1]
temppath = tpath + tempname
temppath = tmp + os.path.split(source)[-1]
self._transfer_file(conn, utils.path_dwim(self.basedir, source), temppath)
# install the template module
@ -517,41 +462,17 @@ class Runner(object):
# run the template module
args = [ "src=%s" % temppath, "dest=%s" % dest, "metadata=%s" % metadata ]
(result1, executed) = self._execute_module(conn, tmp, template_module, args)
results1 = self._return_from_module(conn, host, result1, executed)
(host, ok, data) = results1
(host, ok, data) = self._return_from_module(conn, host, result1, executed)
# magically chain into the file module
if ok:
# unless failed, run the file module to adjust file aspects
old_changed = data.get('changed', False)
module = self._transfer_module(conn, tmp, 'file')
args = [ "%s=%s" % (k,v) for (k,v) in options.items() ]
(result2, executed2) = self._execute_module(conn, tmp, module, args)
results2 = self._return_from_module(conn, host, result2, executed)
(host, ok, data2) = results2
new_changed = data2.get('changed', False)
data.update(data2)
if old_changed or new_changed:
data['changed'] = True
return (host, ok, data)
return self._chain_file_module(conn, tmp, data, options, executed)
else:
# copy failed, return orig result without going through 'file' module
return results1
# *****************************************************
def _executor(self, host):
'''
callback executed in parallel for each host.
returns (hostname, connected_ok, extra)
where extra is the result of a successful connect
or a traceback string
'''
# depending on whether it's a normal module,
# or a request to use the copy or template
# module, call the appropriate executor function
''' callback executed in parallel for each host. returns (hostname, connected_ok, extra) '''
ok, conn = self._connect(host)
if not ok:
@ -562,24 +483,18 @@ class Runner(object):
tmp = self._get_tmp_path(conn)
result = None
if self.module_name not in [ 'copy', 'template' ]:
if self.module_name == 'copy':
result = self._execute_copy(conn, host, tmp)
elif self.module_name == 'template':
result = self._execute_template(conn, host, tmp)
else:
if self.background == 0:
result = self._execute_normal_module(conn, host, tmp, module_name)
else:
result = self._execute_async_module(conn, host, tmp, module_name)
elif self.module_name == 'copy':
result = self._execute_copy(conn, host, tmp)
elif self.module_name == 'template':
result = self._execute_template(conn, host, tmp)
else:
# this would be a coding error in THIS module
# shouldn't occur
raise Exception("???")
self._delete_remote_files(conn, tmp)
conn.close()
return result
# *****************************************************
@ -619,41 +534,27 @@ class Runner(object):
# *****************************************************
def match_hosts(self, pattern):
def _match_hosts(self, pattern):
''' return all matched hosts fitting a pattern '''
return [ h for h in self.host_list if self._matches(h, pattern) ]
# *****************************************************
def run(self):
''' xfer & run module on all matched hosts '''
def _parallel_exec(self, hosts):
''' handles mulitprocessing when more than 1 fork is required '''
# find hosts that match the pattern
hosts = self.match_hosts(self.pattern)
if len(hosts) == 0:
return {
'contacted' : {},
'dark' : {}
}
# attack pool of hosts in N forks
# _executor_hook does all of the work
hosts = [ (self,x) for x in hosts ]
if self.forks > 1:
job_queue = multiprocessing.Manager().Queue()
result_queue = multiprocessing.Manager().Queue()
for i in hosts:
job_queue.put(i)
[job_queue.put(i) for i in hosts]
workers = []
for i in range(self.forks):
tmp = multiprocessing.Process(target=_executor_hook,
prc = multiprocessing.Process(target=_executor_hook,
args=(job_queue, result_queue))
tmp.start()
workers.append(tmp)
prc.start()
workers.append(prc)
try:
for worker in workers:
@ -666,34 +567,44 @@ class Runner(object):
results = []
while not result_queue.empty():
results.append(result_queue.get(block=False))
return results
else:
results = [ x._executor(h) for (x,h) in hosts ]
# *****************************************************
# sort hosts by ones we successfully contacted
# and ones we did not so that we can return a
# dictionary containing results of everything
def _partition_results(self, results):
''' seperate results by ones we contacted & ones we didn't '''
results2 = {
"contacted" : {},
"dark" : {}
}
hosts_with_results = []
for x in results:
(host, is_ok, result) = x
hosts_with_results.append(host)
if not is_ok:
results2["dark"][host] = result
else:
results2 = dict(contacted={}, dark={})
for result in results:
(host, contacted_ok, result) = result
if contacted_ok:
results2["contacted"][host] = result
# hosts which were contacted but never got a chance
# to return a result before we exited/ctrl-c'd
# perhaps these shouldn't be 'dark' but I'm not sure if they fit
# anywhere else.
for host in self.match_hosts(self.pattern):
if host not in hosts_with_results:
else:
results2["dark"][host] = result
# hosts which were contacted but never got a chance to return
for host in self._match_hosts(self.pattern):
if not (host in results2['dark'] or host in results2['contacted']):
results2["dark"][host] = {}
return results2
# *****************************************************
def run(self):
''' xfer & run module on all matched hosts '''
# find hosts that match the pattern
hosts = self._match_hosts(self.pattern)
if len(hosts) == 0:
return dict(contacted={}, dark={})
hosts = [ (self,x) for x in hosts ]
if self.forks > 1:
results = self._parallel_exec(hosts, results)
else:
results = [ x._executor(h) for (x,h) in hosts ]
return self._partition_results(results)

View file

@ -272,6 +272,9 @@ def template(text, vars):
template = jinja2.Template(text)
return template.render(vars)
def double_template(text, vars):
return template(template(text, vars), vars)
def template_from_file(path, vars):
''' run a file through the templating engine '''
data = file(path).read()
@ -287,4 +290,12 @@ def parse_yaml_from_file(path):
raise errors.AnsibleError("file not found: %s" % path)
return parse_yaml(data)
def parse_kv(args):
''' convert a string of key/value items to a dict '''
options = {}
for x in args:
if x.find("=") != -1:
k, v = x.split("=")
options[k]=v
return options