mirror of
https://github.com/ansible-collections/community.general.git
synced 2024-09-14 20:13:21 +02:00
Merge branch 'master' into localconnection
Merge the SortedOptParser bits and debug attribute commits into localconnection. Conflicts: bin/ansible lib/ansible/playbook.py lib/ansible/runner.py lib/ansible/utils.py
This commit is contained in:
commit
0675f2511b
14 changed files with 247 additions and 483 deletions
22
bin/ansible
22
bin/ansible
|
@ -47,24 +47,12 @@ class Cli(object):
|
||||||
def parse(self):
|
def parse(self):
|
||||||
''' create an options parser for bin/ansible '''
|
''' create an options parser for bin/ansible '''
|
||||||
|
|
||||||
|
parser = utils.base_parser(constants=C, runas_opts=True, async_opts=True,
|
||||||
options = {
|
output_opts=True, connect_opts=True, usage='%prog <host-pattern> [options]')
|
||||||
'-a' : dict(long='--args', dest='module_args',
|
parser.add_option('-a', '--args', dest='module_args',
|
||||||
help="module arguments", default=C.DEFAULT_MODULE_ARGS),
|
help="module arguments", default=C.DEFAULT_MODULE_ARGS)
|
||||||
'-m' : dict(long='--module-name', dest='module_name',
|
parser.add_option('-m', '--module-name', dest='module_name',
|
||||||
help="module name to execute", default=C.DEFAULT_MODULE_NAME)
|
help="module name to execute", default=C.DEFAULT_MODULE_NAME)
|
||||||
}
|
|
||||||
|
|
||||||
parser = utils.make_parser(
|
|
||||||
options,
|
|
||||||
usage='ansible <host-pattern> [options]',
|
|
||||||
runas_opts=True,
|
|
||||||
async_opts=True,
|
|
||||||
output_opts=True,
|
|
||||||
connect_opts=True,
|
|
||||||
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
options, args = parser.parse_args()
|
options, args = parser.parse_args()
|
||||||
self.callbacks.options = options
|
self.callbacks.options = options
|
||||||
|
|
|
@ -32,18 +32,19 @@ def main(args):
|
||||||
''' run ansible-playbook operations '''
|
''' run ansible-playbook operations '''
|
||||||
|
|
||||||
# create parser for CLI options
|
# create parser for CLI options
|
||||||
usage = "ansible-playbook playbook.yml [options]"
|
usage = "%prog playbook.yml"
|
||||||
options = {
|
parser = utils.base_parser(constants=C, usage=usage)
|
||||||
'-e' : dict(long='--extra-vars', dest='extra_vars',
|
parser.add_option('-e', '--extra-vars', dest='extra_vars',
|
||||||
help='pass in extra key=value variables from outside the playbook'),
|
help='arguments to pass to the inventory script')
|
||||||
'-O' : dict(long='--override-hosts', dest="override_hosts", default=None,
|
parser.add_option('-O', '--override-hosts', dest="override_hosts", default=None,
|
||||||
help="run playbook against only hosts, ignorning the inventory file")
|
help="run playbook against these hosts regardless of inventory settings")
|
||||||
}
|
|
||||||
parser = utils.make_parser(options, constants=C, usage=usage)
|
|
||||||
options, args = parser.parse_args(args)
|
options, args = parser.parse_args(args)
|
||||||
|
|
||||||
if len(args) == 0:
|
if len(args) == 0:
|
||||||
print >> sys.stderr, "playbook path is a required argument"
|
parser.print_help(file=sys.stderr)
|
||||||
|
#QUESTION for M.D. This would match bin/ansible's behavior. Do we want them consistent?
|
||||||
|
#parser.print_help()
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
sshpass = None
|
sshpass = None
|
||||||
|
|
|
@ -95,6 +95,9 @@ class DefaultRunnerCallbacks(object):
|
||||||
def on_unreachable(self, host, res):
|
def on_unreachable(self, host, res):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
def on_no_hosts(self):
|
||||||
|
pass
|
||||||
|
|
||||||
########################################################################
|
########################################################################
|
||||||
|
|
||||||
class CliRunnerCallbacks(DefaultRunnerCallbacks):
|
class CliRunnerCallbacks(DefaultRunnerCallbacks):
|
||||||
|
@ -121,6 +124,9 @@ class CliRunnerCallbacks(DefaultRunnerCallbacks):
|
||||||
def on_error(self, host, err):
|
def on_error(self, host, err):
|
||||||
print >>sys.stderr, "stderr: [%s] => %s\n" % (host, err)
|
print >>sys.stderr, "stderr: [%s] => %s\n" % (host, err)
|
||||||
|
|
||||||
|
def on_no_hosts(self):
|
||||||
|
print >>sys.stderr, "no hosts matched\n"
|
||||||
|
|
||||||
def _on_any(self, host, result):
|
def _on_any(self, host, result):
|
||||||
print utils.host_report_msg(host, self.options.module_name, result, self.options.one_line)
|
print utils.host_report_msg(host, self.options.module_name, result, self.options.one_line)
|
||||||
if self.options.tree:
|
if self.options.tree:
|
||||||
|
@ -159,6 +165,9 @@ class PlaybookRunnerCallbacks(DefaultRunnerCallbacks):
|
||||||
def on_skipped(self, host):
|
def on_skipped(self, host):
|
||||||
print "skipping: [%s]\n" % host
|
print "skipping: [%s]\n" % host
|
||||||
|
|
||||||
|
def on_no_hosts(self):
|
||||||
|
print "no hosts matched or remaining\n"
|
||||||
|
|
||||||
########################################################################
|
########################################################################
|
||||||
|
|
||||||
class PlaybookCallbacks(object):
|
class PlaybookCallbacks(object):
|
||||||
|
|
|
@ -18,7 +18,13 @@
|
||||||
|
|
||||||
################################################
|
################################################
|
||||||
|
|
||||||
import paramiko
|
import warnings
|
||||||
|
# prevent paramiko warning noise
|
||||||
|
# see http://stackoverflow.com/questions/3920502/
|
||||||
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("ignore")
|
||||||
|
import paramiko
|
||||||
|
|
||||||
import traceback
|
import traceback
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
|
@ -142,6 +148,15 @@ class ParamikoConnection(object):
|
||||||
raise errors.AnsibleError("failed to transfer file to %s" % out_path)
|
raise errors.AnsibleError("failed to transfer file to %s" % out_path)
|
||||||
sftp.close()
|
sftp.close()
|
||||||
|
|
||||||
|
def fetch_file(self, in_path, out_path):
|
||||||
|
sftp = self.ssh.open_sftp()
|
||||||
|
try:
|
||||||
|
sftp.get(in_path, out_path)
|
||||||
|
except IOError:
|
||||||
|
traceback.print_exc()
|
||||||
|
raise errors.AnsibleError("failed to transfer file from %s" % in_path)
|
||||||
|
sftp.close()
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
''' terminate the connection '''
|
''' terminate the connection '''
|
||||||
|
|
||||||
|
@ -184,6 +199,10 @@ class LocalConnection(object):
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
raise errors.AnsibleError("failed to transfer file to %s" % out_path)
|
raise errors.AnsibleError("failed to transfer file to %s" % out_path)
|
||||||
|
|
||||||
|
def fetch_file(self, in_path, out_path):
|
||||||
|
''' fetch a file from local to local -- for copatibility '''
|
||||||
|
self.put_file(in_path, out_path)
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
''' terminate the connection; nothing to do here '''
|
''' terminate the connection; nothing to do here '''
|
||||||
|
|
||||||
|
|
|
@ -439,24 +439,17 @@ class PlayBook(object):
|
||||||
else:
|
else:
|
||||||
self.callbacks.on_setup_primary()
|
self.callbacks.on_setup_primary()
|
||||||
|
|
||||||
# first run the setup task on every node, which gets the variables
|
|
||||||
# written to the JSON file and will also bubble facts back up via
|
|
||||||
# magic in Runner()
|
|
||||||
push_var_str=''
|
|
||||||
for (k,v) in vars.iteritems():
|
|
||||||
push_var_str += "%s=\"%s\" " % (k,v)
|
|
||||||
|
|
||||||
host_list = [ h for h in self.host_list if not (h in self.stats.failures or h in self.stats.dark) ]
|
host_list = [ h for h in self.host_list if not (h in self.stats.failures or h in self.stats.dark) ]
|
||||||
|
|
||||||
# push any variables down to the system
|
# push any variables down to the system
|
||||||
setup_results = ansible.runner.Runner(
|
setup_results = ansible.runner.Runner(
|
||||||
pattern=pattern, groups=self.groups, module_name='setup',
|
pattern=pattern, groups=self.groups, module_name='setup',
|
||||||
module_args=push_var_str, host_list=host_list,
|
module_args=vars, host_list=host_list,
|
||||||
forks=self.forks, module_path=self.module_path,
|
forks=self.forks, module_path=self.module_path,
|
||||||
timeout=self.timeout, remote_user=user,
|
timeout=self.timeout, remote_user=user,
|
||||||
remote_pass=self.remote_pass, remote_port=self.remote_port,
|
remote_pass=self.remote_pass, remote_port=self.remote_port,
|
||||||
setup_cache=SETUP_CACHE,
|
setup_cache=SETUP_CACHE,
|
||||||
callbacks=self.runner_callbacks, sudo=sudo,
|
callbacks=self.runner_callbacks, sudo=sudo, debug=self.debug,
|
||||||
transport=transport,
|
transport=transport,
|
||||||
).run()
|
).run()
|
||||||
self.stats.compute(setup_results, setup=True)
|
self.stats.compute(setup_results, setup=True)
|
||||||
|
|
|
@ -119,8 +119,8 @@ class Runner(object):
|
||||||
euid = pwd.getpwuid(os.geteuid())[0]
|
euid = pwd.getpwuid(os.geteuid())[0]
|
||||||
if self.transport == 'local' and self.remote_user != euid:
|
if self.transport == 'local' and self.remote_user != euid:
|
||||||
raise Exception("User mismatch: expected %s, but is %s" % (self.remote_user, euid))
|
raise Exception("User mismatch: expected %s, but is %s" % (self.remote_user, euid))
|
||||||
if type(self.module_args) != str:
|
if type(self.module_args) != str and type(self.module_args) != dict:
|
||||||
raise Exception("module_args must be a string: %s" % self.module_args)
|
raise Exception("module_args must be a string or dict: %s" % self.module_args)
|
||||||
|
|
||||||
self._tmp_paths = {}
|
self._tmp_paths = {}
|
||||||
random.seed()
|
random.seed()
|
||||||
|
@ -277,6 +277,9 @@ class Runner(object):
|
||||||
def _transfer_str(self, conn, tmp, name, args_str):
|
def _transfer_str(self, conn, tmp, name, args_str):
|
||||||
''' transfer arguments as a single file to be fed to the module. '''
|
''' transfer arguments as a single file to be fed to the module. '''
|
||||||
|
|
||||||
|
if type(args_str) == dict:
|
||||||
|
args_str = utils.smjson(args_str)
|
||||||
|
|
||||||
args_fd, args_file = tempfile.mkstemp()
|
args_fd, args_file = tempfile.mkstemp()
|
||||||
args_fo = os.fdopen(args_fd, 'w')
|
args_fo = os.fdopen(args_fd, 'w')
|
||||||
args_fo.write(args_str)
|
args_fo.write(args_str)
|
||||||
|
@ -322,11 +325,19 @@ class Runner(object):
|
||||||
def _add_setup_vars(self, inject, args):
|
def _add_setup_vars(self, inject, args):
|
||||||
''' setup module variables need special handling '''
|
''' setup module variables need special handling '''
|
||||||
|
|
||||||
|
is_dict = False
|
||||||
|
if type(args) == dict:
|
||||||
|
is_dict = True
|
||||||
|
|
||||||
|
# TODO: keep this as a dict through the whole path to simplify this code
|
||||||
for (k,v) in inject.iteritems():
|
for (k,v) in inject.iteritems():
|
||||||
if not k.startswith('facter_') and not k.startswith('ohai_'):
|
if not k.startswith('facter_') and not k.startswith('ohai_'):
|
||||||
|
if not is_dict:
|
||||||
if str(v).find(" ") != -1:
|
if str(v).find(" ") != -1:
|
||||||
v = "\"%s\"" % v
|
v = "\"%s\"" % v
|
||||||
args += " %s=%s" % (k, str(v).replace(" ","~~~"))
|
args += " %s=%s" % (k, str(v).replace(" ","~~~"))
|
||||||
|
else:
|
||||||
|
args[k]=v
|
||||||
return args
|
return args
|
||||||
|
|
||||||
# *****************************************************
|
# *****************************************************
|
||||||
|
@ -334,11 +345,23 @@ class Runner(object):
|
||||||
def _add_setup_metadata(self, args):
|
def _add_setup_metadata(self, args):
|
||||||
''' automatically determine where to store variables for the setup module '''
|
''' automatically determine where to store variables for the setup module '''
|
||||||
|
|
||||||
|
is_dict = False
|
||||||
|
if type(args) == dict:
|
||||||
|
is_dict = True
|
||||||
|
|
||||||
|
# TODO: keep this as a dict through the whole path to simplify this code
|
||||||
|
if not is_dict:
|
||||||
if args.find("metadata=") == -1:
|
if args.find("metadata=") == -1:
|
||||||
if self.remote_user == 'root':
|
if self.remote_user == 'root':
|
||||||
args = "%s metadata=/etc/ansible/setup" % args
|
args = "%s metadata=/etc/ansible/setup" % args
|
||||||
else:
|
else:
|
||||||
args = "%s metadata=/home/%s/.ansible/setup" % (args, self.remote_user)
|
args = "%s metadata=/home/%s/.ansible/setup" % (args, self.remote_user)
|
||||||
|
else:
|
||||||
|
if not 'metadata' in args:
|
||||||
|
if self.remote_user == 'root':
|
||||||
|
args['metadata'] = '/etc/ansible/setup'
|
||||||
|
else:
|
||||||
|
args['metadata'] = "/home/%s/.ansible/setup" % (self.remote_user)
|
||||||
return args
|
return args
|
||||||
|
|
||||||
# *****************************************************
|
# *****************************************************
|
||||||
|
@ -358,9 +381,11 @@ class Runner(object):
|
||||||
args = self._add_setup_vars(inject, args)
|
args = self._add_setup_vars(inject, args)
|
||||||
args = self._add_setup_metadata(args)
|
args = self._add_setup_metadata(args)
|
||||||
|
|
||||||
|
if type(args) == dict:
|
||||||
|
args = utils.bigjson(args)
|
||||||
args = utils.template(args, inject)
|
args = utils.template(args, inject)
|
||||||
|
|
||||||
module_name_tail = remote_module_path.split("/")[-1]
|
module_name_tail = remote_module_path.split("/")[-1]
|
||||||
client_executed_str = "%s %s" % (module_name_tail, args.strip())
|
|
||||||
|
|
||||||
argsfile = self._transfer_str(conn, tmp, 'arguments', args)
|
argsfile = self._transfer_str(conn, tmp, 'arguments', args)
|
||||||
if async_jid is None:
|
if async_jid is None:
|
||||||
|
@ -368,12 +393,8 @@ class Runner(object):
|
||||||
else:
|
else:
|
||||||
cmd = " ".join([str(x) for x in [remote_module_path, async_jid, async_limit, async_module, argsfile]])
|
cmd = " ".join([str(x) for x in [remote_module_path, async_jid, async_limit, async_module, argsfile]])
|
||||||
|
|
||||||
# log command as the full command not as the path to args file - helps with debugging
|
|
||||||
msg = '%s: "%s"' % (self.module_name, args)
|
|
||||||
conn.exec_command('/usr/bin/logger -t ansible -p auth.info "%s"' % msg, None)
|
|
||||||
|
|
||||||
|
|
||||||
res, err = self._exec_command(conn, cmd, tmp, sudoable=True)
|
res, err = self._exec_command(conn, cmd, tmp, sudoable=True)
|
||||||
|
client_executed_str = "%s %s" % (module_name_tail, args.strip())
|
||||||
return ( res, err, client_executed_str )
|
return ( res, err, client_executed_str )
|
||||||
|
|
||||||
# *****************************************************
|
# *****************************************************
|
||||||
|
@ -443,8 +464,10 @@ class Runner(object):
|
||||||
|
|
||||||
# load up options
|
# load up options
|
||||||
options = utils.parse_kv(self.module_args)
|
options = utils.parse_kv(self.module_args)
|
||||||
source = options['src']
|
source = options.get('src', None)
|
||||||
dest = options['dest']
|
dest = options.get('dest', None)
|
||||||
|
if source is None or dest is None:
|
||||||
|
return (host, True, dict(failed=True, msg="src and dest are required"), '')
|
||||||
|
|
||||||
# transfer the file to a remote tmp location
|
# transfer the file to a remote tmp location
|
||||||
tmp_src = tmp + source.split('/')[-1]
|
tmp_src = tmp + source.split('/')[-1]
|
||||||
|
@ -466,6 +489,42 @@ class Runner(object):
|
||||||
|
|
||||||
# *****************************************************
|
# *****************************************************
|
||||||
|
|
||||||
|
def _execute_fetch(self, conn, host, tmp):
|
||||||
|
''' handler for fetch operations '''
|
||||||
|
|
||||||
|
# load up options
|
||||||
|
options = utils.parse_kv(self.module_args)
|
||||||
|
source = options.get('src', None)
|
||||||
|
dest = options.get('dest', None)
|
||||||
|
if source is None or dest is None:
|
||||||
|
return (host, True, dict(failed=True, msg="src and dest are required"), '')
|
||||||
|
|
||||||
|
# files are saved in dest dir, with a subdir for each host, then the filename
|
||||||
|
filename = os.path.basename(source)
|
||||||
|
dest = "%s/%s/%s" % (utils.path_dwim(self.basedir, dest), host, filename)
|
||||||
|
|
||||||
|
# compare old and new md5 for support of change hooks
|
||||||
|
local_md5 = None
|
||||||
|
if os.path.exists(dest):
|
||||||
|
local_md5 = os.popen("md5sum %s" % dest).read().split()[0]
|
||||||
|
remote_md5 = self._exec_command(conn, "md5sum %s" % source, tmp, True)[0].split()[0]
|
||||||
|
|
||||||
|
if remote_md5 != local_md5:
|
||||||
|
# create the containing directories, if needed
|
||||||
|
os.makedirs(os.path.dirname(dest))
|
||||||
|
# fetch the file and check for changes
|
||||||
|
conn.fetch_file(source, dest)
|
||||||
|
new_md5 = os.popen("md5sum %s" % dest).read().split()[0]
|
||||||
|
changed = (new_md5 != local_md5)
|
||||||
|
if new_md5 != remote_md5:
|
||||||
|
return (host, True, dict(failed=True, msg="md5 mismatch", md5sum=new_md5), '')
|
||||||
|
return (host, True, dict(changed=True, md5sum=new_md5), '')
|
||||||
|
else:
|
||||||
|
return (host, True, dict(changed=False, md5sum=local_md5), '')
|
||||||
|
|
||||||
|
|
||||||
|
# *****************************************************
|
||||||
|
|
||||||
def _chain_file_module(self, conn, tmp, data, err, options, executed):
|
def _chain_file_module(self, conn, tmp, data, err, options, executed):
|
||||||
''' handles changing file attribs after copy/template operations '''
|
''' handles changing file attribs after copy/template operations '''
|
||||||
|
|
||||||
|
@ -488,9 +547,11 @@ class Runner(object):
|
||||||
|
|
||||||
# load up options
|
# load up options
|
||||||
options = utils.parse_kv(self.module_args)
|
options = utils.parse_kv(self.module_args)
|
||||||
source = options['src']
|
source = options.get('src', None)
|
||||||
dest = options['dest']
|
dest = options.get('dest', None)
|
||||||
metadata = options.get('metadata', None)
|
metadata = options.get('metadata', None)
|
||||||
|
if source is None or dest is None:
|
||||||
|
return (host, True, dict(failed=True, msg="src and dest are required"), '')
|
||||||
|
|
||||||
if metadata is None:
|
if metadata is None:
|
||||||
if self.remote_user == 'root':
|
if self.remote_user == 'root':
|
||||||
|
@ -555,6 +616,8 @@ class Runner(object):
|
||||||
|
|
||||||
if self.module_name == 'copy':
|
if self.module_name == 'copy':
|
||||||
result = self._execute_copy(conn, host, tmp)
|
result = self._execute_copy(conn, host, tmp)
|
||||||
|
elif self.module_name == 'fetch':
|
||||||
|
result = self._execute_fetch(conn, host, tmp)
|
||||||
elif self.module_name == 'template':
|
elif self.module_name == 'template':
|
||||||
result = self._execute_template(conn, host, tmp)
|
result = self._execute_template(conn, host, tmp)
|
||||||
else:
|
else:
|
||||||
|
@ -587,10 +650,6 @@ class Runner(object):
|
||||||
def _exec_command(self, conn, cmd, tmp, sudoable=False):
|
def _exec_command(self, conn, cmd, tmp, sudoable=False):
|
||||||
''' execute a command string over SSH, return the output '''
|
''' execute a command string over SSH, return the output '''
|
||||||
|
|
||||||
msg = '%s: %s' % (self.module_name, cmd)
|
|
||||||
# log remote command execution
|
|
||||||
conn.exec_command('/usr/bin/logger -t ansible -p auth.info "%s"' % msg, None)
|
|
||||||
# now run actual command
|
|
||||||
stdin, stdout, stderr = conn.exec_command(cmd, tmp, sudoable=sudoable)
|
stdin, stdout, stderr = conn.exec_command(cmd, tmp, sudoable=sudoable)
|
||||||
|
|
||||||
if type(stderr) != str:
|
if type(stderr) != str:
|
||||||
|
@ -697,6 +756,7 @@ class Runner(object):
|
||||||
# find hosts that match the pattern
|
# find hosts that match the pattern
|
||||||
hosts = self._match_hosts(self.pattern)
|
hosts = self._match_hosts(self.pattern)
|
||||||
if len(hosts) == 0:
|
if len(hosts) == 0:
|
||||||
|
self.callbacks.on_no_hosts()
|
||||||
return dict(contacted={}, dark={})
|
return dict(contacted={}, dark={})
|
||||||
|
|
||||||
hosts = [ (self,x) for x in hosts ]
|
hosts = [ (self,x) for x in hosts ]
|
||||||
|
|
|
@ -24,7 +24,7 @@ import re
|
||||||
import jinja2
|
import jinja2
|
||||||
import yaml
|
import yaml
|
||||||
import optparse
|
import optparse
|
||||||
|
from operator import methodcaller
|
||||||
try:
|
try:
|
||||||
import json
|
import json
|
||||||
except ImportError:
|
except ImportError:
|
||||||
|
@ -273,79 +273,55 @@ def parse_kv(args):
|
||||||
options[k]=v
|
options[k]=v
|
||||||
return options
|
return options
|
||||||
|
|
||||||
def make_parser(add_options, constants=C, usage="", output_opts=False, runas_opts=False, async_opts=False, connect_opts=False):
|
class SortedOptParser(optparse.OptionParser):
|
||||||
''' create an options parser w/ common options for any ansible program '''
|
'''Optparser which sorts the options by opt before outputting --help'''
|
||||||
|
def format_help(self, formatter=None):
|
||||||
|
self.option_list.sort(key=methodcaller('get_opt_string'))
|
||||||
|
return optparse.OptionParser.format_help(self, formatter=None)
|
||||||
|
|
||||||
options = base_parser_options(
|
def base_parser(constants=C, usage="", output_opts=False, runas_opts=False, async_opts=False, connect_opts=False):
|
||||||
constants=constants,
|
''' create an options parser for any ansible script '''
|
||||||
output_opts=output_opts,
|
|
||||||
runas_opts=runas_opts,
|
|
||||||
async_opts=async_opts,
|
|
||||||
connect_opts=connect_opts
|
|
||||||
)
|
|
||||||
options.update(add_options)
|
|
||||||
|
|
||||||
parser = optparse.OptionParser()
|
parser = SortedOptParser(usage)
|
||||||
names = sorted(options.keys())
|
parser.add_option('-D','--debug', default=False, action="store_true",
|
||||||
for n in names:
|
help='enable standard error debugging of modules.')
|
||||||
data = options[n].copy()
|
parser.add_option('-f','--forks', dest='forks', default=constants.DEFAULT_FORKS, type='int',
|
||||||
long = data['long']
|
help='number of parallel processes to use')
|
||||||
del data['long']
|
parser.add_option('-i', '--inventory-file', dest='inventory',
|
||||||
parser.add_option(n, long, **data)
|
help='inventory host file', default=constants.DEFAULT_HOST_LIST)
|
||||||
return parser
|
parser.add_option('-k', '--ask-pass', default=False, action='store_true',
|
||||||
|
help='ask for SSH password')
|
||||||
def base_parser_options(constants=C, output_opts=False, runas_opts=False, async_opts=False, connect_opts=False):
|
parser.add_option('-M', '--module-path', dest='module_path',
|
||||||
''' creates common options for ansible programs '''
|
help="path to module library", default=constants.DEFAULT_MODULE_PATH)
|
||||||
|
parser.add_option('-T', '--timeout', default=constants.DEFAULT_TIMEOUT, type='int',
|
||||||
options = {
|
dest='timeout', help='set the SSH timeout in seconds')
|
||||||
'-D': dict(long='--debug', default=False, action="store_true",
|
parser.add_option('-p', '--port', default=constants.DEFAULT_REMOTE_PORT, type='int',
|
||||||
help='show debug/verbose module output'),
|
dest='remote_port', help='set the remote ssh port')
|
||||||
'-f': dict(long='--forks', dest='forks', default=constants.DEFAULT_FORKS, type='int',
|
|
||||||
help='number of parallel processes to use'),
|
|
||||||
'-i': dict(long='--inventory-file', dest='inventory',
|
|
||||||
help='path to inventory host file', default=constants.DEFAULT_HOST_LIST),
|
|
||||||
'-k': dict(long='--ask-pass', default=False, action='store_true',
|
|
||||||
help='ask for SSH password'),
|
|
||||||
'-M': dict(long='--module-path', dest='module_path',
|
|
||||||
help="path to module library directory", default=constants.DEFAULT_MODULE_PATH),
|
|
||||||
'-T': dict(long='--timeout', default=constants.DEFAULT_TIMEOUT, type='int',
|
|
||||||
dest='timeout', help='set the SSH connection timeout in seconds'),
|
|
||||||
'-p': dict(long='--port', default=constants.DEFAULT_REMOTE_PORT, type='int',
|
|
||||||
dest='remote_port', help='use this remote SSH port'),
|
|
||||||
}
|
|
||||||
|
|
||||||
if output_opts:
|
if output_opts:
|
||||||
options.update({
|
parser.add_option('-o', '--one-line', dest='one_line', action='store_true',
|
||||||
'-o' : dict(long='--one-line', dest='one_line', action='store_true',
|
help='condense output')
|
||||||
help='condense output'),
|
parser.add_option('-t', '--tree', dest='tree', default=None,
|
||||||
'-t' : dict(long='--tree', dest='tree', default=None,
|
help='log output to this directory')
|
||||||
help='log results to this directory')
|
|
||||||
})
|
|
||||||
|
|
||||||
if runas_opts:
|
if runas_opts:
|
||||||
options.update({
|
parser.add_option("-s", "--sudo", default=False, action="store_true",
|
||||||
'-s' : dict(long="--sudo", default=False, action="store_true",
|
dest='sudo', help="run operations with sudo (nopasswd)")
|
||||||
dest='sudo', help="run operations with sudo (nopasswd)"),
|
parser.add_option('-u', '--user', default=constants.DEFAULT_REMOTE_USER,
|
||||||
'-u' : dict(long='--user', default=constants.DEFAULT_REMOTE_USER,
|
dest='remote_user', help='connect as this user')
|
||||||
dest='remote_user', help='connect as this user'),
|
|
||||||
})
|
|
||||||
|
|
||||||
if connect_opts:
|
if connect_opts:
|
||||||
options.update({
|
parser.add_option('-c', '--connection', dest='connection',
|
||||||
'-c' : dict(long='--connection', dest='connection',
|
|
||||||
choices=C.DEFAULT_TRANSPORT_OPTS,
|
choices=C.DEFAULT_TRANSPORT_OPTS,
|
||||||
default=C.DEFAULT_TRANSPORT,
|
default=C.DEFAULT_TRANSPORT,
|
||||||
help="connection type to use")
|
help="connection type to use")
|
||||||
})
|
|
||||||
|
|
||||||
if async_opts:
|
if async_opts:
|
||||||
options.update({
|
parser.add_option('-P', '--poll', default=constants.DEFAULT_POLL_INTERVAL, type='int',
|
||||||
'-P' : dict(long='--poll', default=constants.DEFAULT_POLL_INTERVAL, type='int',
|
dest='poll_interval', help='set the poll interval if using -B')
|
||||||
dest='poll_interval', help='set the poll interval if using -B'),
|
parser.add_option('-B', '--background', dest='seconds', type='int', default=0,
|
||||||
'-B' : dict(long='--background', dest='seconds', type='int', default=0,
|
help='run asynchronously, failing after X seconds')
|
||||||
help='run asynchronously, failing after X seconds'),
|
|
||||||
})
|
|
||||||
|
|
||||||
return options
|
return parser
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -31,9 +31,7 @@ APT_PATH = "/usr/bin/apt-get"
|
||||||
APT = "DEBIAN_PRIORITY=critical %s" % APT_PATH
|
APT = "DEBIAN_PRIORITY=critical %s" % APT_PATH
|
||||||
|
|
||||||
def debug(msg):
|
def debug(msg):
|
||||||
# ansible ignores stderr, so it's safe to use for debug
|
|
||||||
print >>sys.stderr, msg
|
print >>sys.stderr, msg
|
||||||
#pass
|
|
||||||
|
|
||||||
def exit_json(rc=0, **kwargs):
|
def exit_json(rc=0, **kwargs):
|
||||||
print json.dumps(kwargs)
|
print json.dumps(kwargs)
|
||||||
|
@ -46,7 +44,7 @@ def fail_json(**kwargs):
|
||||||
try:
|
try:
|
||||||
import apt
|
import apt
|
||||||
except ImportError:
|
except ImportError:
|
||||||
fail_json(msg="could not import apt")
|
fail_json(msg="could not import apt, please install the python-apt package on this host")
|
||||||
|
|
||||||
def run_apt(command):
|
def run_apt(command):
|
||||||
try:
|
try:
|
||||||
|
@ -115,7 +113,7 @@ for x in items:
|
||||||
params[k] = v
|
params[k] = v
|
||||||
|
|
||||||
state = params.get('state','installed')
|
state = params.get('state','installed')
|
||||||
package = params.get('pkg', None)
|
package = params.get('pkg', params.get('package', params.get('name', None)))
|
||||||
update_cache = params.get('update-cache', 'no')
|
update_cache = params.get('update-cache', 'no')
|
||||||
purge = params.get('purge', 'no')
|
purge = params.get('purge', 'no')
|
||||||
|
|
||||||
|
|
24
library/fetch
Executable file
24
library/fetch
Executable file
|
@ -0,0 +1,24 @@
|
||||||
|
#!/usr/bin/python
|
||||||
|
|
||||||
|
# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
|
||||||
|
#
|
||||||
|
# This file is part of Ansible
|
||||||
|
#
|
||||||
|
# Ansible is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# Ansible is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
|
||||||
|
### THIS FILE IS FOR REFERENCE OR FUTURE USE ###
|
||||||
|
|
||||||
|
# See lib/ansible/runner.py for implementation of the fetch functionality #
|
||||||
|
|
|
@ -23,6 +23,7 @@ import sys
|
||||||
import os
|
import os
|
||||||
import shlex
|
import shlex
|
||||||
import subprocess
|
import subprocess
|
||||||
|
import traceback
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import json
|
import json
|
||||||
|
@ -34,18 +35,22 @@ except ImportError:
|
||||||
if len(sys.argv) == 1:
|
if len(sys.argv) == 1:
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
argfile = sys.argv[1]
|
argfile = sys.argv[1]
|
||||||
if not os.path.exists(argfile):
|
if not os.path.exists(argfile):
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
input_data = shlex.split(open(argfile, 'r').read())
|
setup_options = open(argfile).read().strip()
|
||||||
|
try:
|
||||||
|
setup_options = json.loads(setup_options)
|
||||||
|
except:
|
||||||
|
list_options = shlex.split(setup_options)
|
||||||
|
setup_options = {}
|
||||||
|
for opt in list_options:
|
||||||
|
(k,v) = opt.split("=")
|
||||||
|
setup_options[k]=v
|
||||||
|
|
||||||
# turn urlencoded k=v string (space delimited) to regular k=v directionary
|
ansible_file = setup_options.get('metadata', DEFAULT_ANSIBLE_SETUP)
|
||||||
splitted = [x.split('=',1) for x in input_data ]
|
|
||||||
splitted = [ (x[0], x[1].replace("~~~"," ")) for x in splitted ]
|
|
||||||
new_options = dict(splitted)
|
|
||||||
|
|
||||||
ansible_file = new_options.get('metadata', DEFAULT_ANSIBLE_SETUP)
|
|
||||||
ansible_dir = os.path.dirname(ansible_file)
|
ansible_dir = os.path.dirname(ansible_file)
|
||||||
|
|
||||||
# create the config dir if it doesn't exist
|
# create the config dir if it doesn't exist
|
||||||
|
@ -74,7 +79,7 @@ if os.path.exists("/usr/bin/facter"):
|
||||||
facter = False
|
facter = False
|
||||||
if facter:
|
if facter:
|
||||||
for (k,v) in facter_ds.items():
|
for (k,v) in facter_ds.items():
|
||||||
new_options["facter_%s" % k] = v
|
setup_options["facter_%s" % k] = v
|
||||||
|
|
||||||
# ditto for ohai, but just top level string keys
|
# ditto for ohai, but just top level string keys
|
||||||
# because it contains a lot of nested stuff we can't use for
|
# because it contains a lot of nested stuff we can't use for
|
||||||
|
@ -93,13 +98,13 @@ if os.path.exists("/usr/bin/ohai"):
|
||||||
for (k,v) in ohai_ds.items():
|
for (k,v) in ohai_ds.items():
|
||||||
if type(v) == str or type(v) == unicode:
|
if type(v) == str or type(v) == unicode:
|
||||||
k2 = "ohai_%s" % k
|
k2 = "ohai_%s" % k
|
||||||
new_options[k2] = v
|
setup_options[k2] = v
|
||||||
|
|
||||||
# write the template/settings file using
|
# write the template/settings file using
|
||||||
# instructions from server
|
# instructions from server
|
||||||
|
|
||||||
f = open(ansible_file, "w+")
|
f = open(ansible_file, "w+")
|
||||||
reformat = json.dumps(new_options, sort_keys=True, indent=4)
|
reformat = json.dumps(setup_options, sort_keys=True, indent=4)
|
||||||
f.write(reformat)
|
f.write(reformat)
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
|
@ -108,9 +113,9 @@ md5sum2 = os.popen("md5sum %s" % ansible_file).read().split()[0]
|
||||||
if md5sum != md5sum2:
|
if md5sum != md5sum2:
|
||||||
changed = True
|
changed = True
|
||||||
|
|
||||||
new_options['written'] = ansible_file
|
setup_options['written'] = ansible_file
|
||||||
new_options['changed'] = changed
|
setup_options['changed'] = changed
|
||||||
new_options['md5sum'] = md5sum2
|
setup_options['md5sum'] = md5sum2
|
||||||
|
|
||||||
print json.dumps(new_options)
|
print json.dumps(setup_options)
|
||||||
|
|
||||||
|
|
14
library/yum
14
library/yum
|
@ -317,22 +317,18 @@ def main():
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
return 1, str(e)
|
return 1, str(e)
|
||||||
|
|
||||||
elif 'state' in params:
|
else:
|
||||||
if 'pkg' not in params:
|
pkg = params.get('pkg', params.get('package', params.get('name', None)))
|
||||||
|
if 'pkg' is None:
|
||||||
results['msg'] = "No pkg specified"
|
results['msg'] = "No pkg specified"
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
my = yum_base(conf_file=params['conf_file'], cachedir=True)
|
my = yum_base(conf_file=params['conf_file'], cachedir=True)
|
||||||
state = params['state']
|
state = params.get('state', 'installed')
|
||||||
pkgspec = params['pkg']
|
results = ensure(my, state, pkg)
|
||||||
results = ensure(my, state, pkgspec)
|
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
return 1, str(e)
|
return 1, str(e)
|
||||||
|
|
||||||
else:
|
|
||||||
print json.dumps(dict(failed=True, msg='invalid module parameters'))
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
print json.dumps(results)
|
print json.dumps(results)
|
||||||
return 0, None
|
return 0, None
|
||||||
|
|
||||||
|
|
|
@ -89,8 +89,10 @@ class TestCallbacks(object):
|
||||||
def on_setup_secondary(self):
|
def on_setup_secondary(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
def on_no_hosts(self):
|
||||||
|
pass
|
||||||
|
|
||||||
class TestRunner(unittest.TestCase):
|
class TestPlaybook(unittest.TestCase):
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.user = getpass.getuser()
|
self.user = getpass.getuser()
|
||||||
|
@ -139,20 +141,27 @@ class TestRunner(unittest.TestCase):
|
||||||
callbacks = self.test_callbacks,
|
callbacks = self.test_callbacks,
|
||||||
runner_callbacks = self.test_callbacks
|
runner_callbacks = self.test_callbacks
|
||||||
)
|
)
|
||||||
results = self.playbook.run()
|
return self.playbook.run()
|
||||||
return dict(
|
|
||||||
results = results,
|
|
||||||
events = EVENTS
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_one(self):
|
def test_one(self):
|
||||||
pb = os.path.join(self.test_dir, 'playbook1.yml')
|
pb = os.path.join(self.test_dir, 'playbook1.yml')
|
||||||
expected = os.path.join(self.test_dir, 'playbook1.events')
|
|
||||||
expected = utils.json_loads(file(expected).read())
|
|
||||||
actual = self._run(pb)
|
actual = self._run(pb)
|
||||||
|
|
||||||
# if different, this will output to screen
|
# if different, this will output to screen
|
||||||
|
print "**ACTUAL**"
|
||||||
print utils.bigjson(actual)
|
print utils.bigjson(actual)
|
||||||
assert cmp(expected, actual) == 0, "expected events match actual events"
|
expected = {
|
||||||
|
"127.0.0.2": {
|
||||||
|
"changed": 9,
|
||||||
|
"failures": 0,
|
||||||
|
"ok": 12,
|
||||||
|
"skipped": 1,
|
||||||
|
"unreachable": 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
print "**EXPECTED**"
|
||||||
|
print utils.bigjson(expected)
|
||||||
|
assert utils.bigjson(expected) == utils.bigjson(actual)
|
||||||
|
|
||||||
# make sure the template module took options from the vars section
|
# make sure the template module took options from the vars section
|
||||||
data = file('/tmp/ansible_test_data_template.out').read()
|
data = file('/tmp/ansible_test_data_template.out').read()
|
||||||
|
|
|
@ -189,6 +189,14 @@ class TestRunner(unittest.TestCase):
|
||||||
assert 'stdout' in result
|
assert 'stdout' in result
|
||||||
assert result['ansible_job_id'] == jid
|
assert result['ansible_job_id'] == jid
|
||||||
|
|
||||||
|
def test_fetch(self):
|
||||||
|
input = self._get_test_file('sample.j2')
|
||||||
|
output = self._get_stage_file('127.0.0.2/sample.j2')
|
||||||
|
result = self._run('fetch', [ "src=%s" % input, "dest=%s" % self.stage_dir ])
|
||||||
|
print "output file=%s" % output
|
||||||
|
assert os.path.exists(output)
|
||||||
|
assert open(input).read() == open(output).read()
|
||||||
|
|
||||||
def test_yum(self):
|
def test_yum(self):
|
||||||
result = self._run('yum', [ "list=repos" ])
|
result = self._run('yum', [ "list=repos" ])
|
||||||
assert 'failed' not in result
|
assert 'failed' not in result
|
||||||
|
|
|
@ -1,322 +0,0 @@
|
||||||
{
|
|
||||||
"events": [
|
|
||||||
"start",
|
|
||||||
[
|
|
||||||
"play start",
|
|
||||||
[
|
|
||||||
"all"
|
|
||||||
]
|
|
||||||
],
|
|
||||||
[
|
|
||||||
"ok",
|
|
||||||
[
|
|
||||||
"127.0.0.2",
|
|
||||||
{
|
|
||||||
"answer": "Wuh, I think so, Brain, but if we didn't have ears, we'd look like weasels.",
|
|
||||||
"changed": true,
|
|
||||||
"metadata": "/etc/ansible/setup",
|
|
||||||
"port": "5150",
|
|
||||||
"written": "/etc/ansible/setup"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
],
|
|
||||||
[
|
|
||||||
"import",
|
|
||||||
[
|
|
||||||
"127.0.0.2",
|
|
||||||
"/home/mdehaan/ansible/test/common_vars.yml"
|
|
||||||
]
|
|
||||||
],
|
|
||||||
[
|
|
||||||
"import",
|
|
||||||
[
|
|
||||||
"127.0.0.2",
|
|
||||||
"/home/mdehaan/ansible/test/CentOS.yml"
|
|
||||||
]
|
|
||||||
],
|
|
||||||
[
|
|
||||||
"ok",
|
|
||||||
[
|
|
||||||
"127.0.0.2",
|
|
||||||
{
|
|
||||||
"answer": "Wuh, I think so, Brain, but if we didn't have ears, we'd look like weasels.",
|
|
||||||
"changed": true,
|
|
||||||
"cow": "moo",
|
|
||||||
"duck": "quack",
|
|
||||||
"metadata": "/etc/ansible/setup",
|
|
||||||
"port": "5150",
|
|
||||||
"testing": "default",
|
|
||||||
"written": "/etc/ansible/setup"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
],
|
|
||||||
[
|
|
||||||
"task start",
|
|
||||||
[
|
|
||||||
"test basic success command",
|
|
||||||
false
|
|
||||||
]
|
|
||||||
],
|
|
||||||
[
|
|
||||||
"ok",
|
|
||||||
[
|
|
||||||
"127.0.0.2",
|
|
||||||
{
|
|
||||||
"changed": true,
|
|
||||||
"cmd": [
|
|
||||||
"/bin/true"
|
|
||||||
],
|
|
||||||
"rc": 0,
|
|
||||||
"stderr": "",
|
|
||||||
"stdout": ""
|
|
||||||
}
|
|
||||||
]
|
|
||||||
],
|
|
||||||
[
|
|
||||||
"task start",
|
|
||||||
[
|
|
||||||
"test basic success command 2",
|
|
||||||
false
|
|
||||||
]
|
|
||||||
],
|
|
||||||
[
|
|
||||||
"ok",
|
|
||||||
[
|
|
||||||
"127.0.0.2",
|
|
||||||
{
|
|
||||||
"changed": true,
|
|
||||||
"cmd": [
|
|
||||||
"/bin/true"
|
|
||||||
],
|
|
||||||
"rc": 0,
|
|
||||||
"stderr": "",
|
|
||||||
"stdout": ""
|
|
||||||
}
|
|
||||||
]
|
|
||||||
],
|
|
||||||
[
|
|
||||||
"task start",
|
|
||||||
[
|
|
||||||
"test basic shell, plus two ways to dereference a variable",
|
|
||||||
false
|
|
||||||
]
|
|
||||||
],
|
|
||||||
[
|
|
||||||
"ok",
|
|
||||||
[
|
|
||||||
"127.0.0.2",
|
|
||||||
{
|
|
||||||
"changed": true,
|
|
||||||
"cmd": "echo $HOME 5150 5150 ",
|
|
||||||
"rc": 0,
|
|
||||||
"stderr": "",
|
|
||||||
"stdout": "/root 5150 5150"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
],
|
|
||||||
[
|
|
||||||
"task start",
|
|
||||||
[
|
|
||||||
"test vars_files imports",
|
|
||||||
false
|
|
||||||
]
|
|
||||||
],
|
|
||||||
[
|
|
||||||
"ok",
|
|
||||||
[
|
|
||||||
"127.0.0.2",
|
|
||||||
{
|
|
||||||
"changed": true,
|
|
||||||
"cmd": "echo quack moo default ",
|
|
||||||
"rc": 0,
|
|
||||||
"stderr": "",
|
|
||||||
"stdout": "quack moo default"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
],
|
|
||||||
[
|
|
||||||
"task start",
|
|
||||||
[
|
|
||||||
"test copy",
|
|
||||||
false
|
|
||||||
]
|
|
||||||
],
|
|
||||||
[
|
|
||||||
"ok",
|
|
||||||
[
|
|
||||||
"127.0.0.2",
|
|
||||||
{
|
|
||||||
"changed": true,
|
|
||||||
"group": "root",
|
|
||||||
"mode": 420,
|
|
||||||
"path": "/tmp/ansible_test_data_copy.out",
|
|
||||||
"state": "file",
|
|
||||||
"user": "root"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
],
|
|
||||||
[
|
|
||||||
"notify",
|
|
||||||
[
|
|
||||||
"127.0.0.2",
|
|
||||||
"on change 1"
|
|
||||||
]
|
|
||||||
],
|
|
||||||
[
|
|
||||||
"task start",
|
|
||||||
[
|
|
||||||
"test template",
|
|
||||||
false
|
|
||||||
]
|
|
||||||
],
|
|
||||||
[
|
|
||||||
"ok",
|
|
||||||
[
|
|
||||||
"127.0.0.2",
|
|
||||||
{
|
|
||||||
"changed": true,
|
|
||||||
"group": "root",
|
|
||||||
"mode": 420,
|
|
||||||
"path": "/tmp/ansible_test_data_template.out",
|
|
||||||
"state": "file",
|
|
||||||
"user": "root"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
],
|
|
||||||
[
|
|
||||||
"notify",
|
|
||||||
[
|
|
||||||
"127.0.0.2",
|
|
||||||
"on change 1"
|
|
||||||
]
|
|
||||||
],
|
|
||||||
[
|
|
||||||
"notify",
|
|
||||||
[
|
|
||||||
"127.0.0.2",
|
|
||||||
"on change 2"
|
|
||||||
]
|
|
||||||
],
|
|
||||||
[
|
|
||||||
"task start",
|
|
||||||
[
|
|
||||||
"async poll test",
|
|
||||||
false
|
|
||||||
]
|
|
||||||
],
|
|
||||||
[
|
|
||||||
"ok",
|
|
||||||
[
|
|
||||||
"127.0.0.2",
|
|
||||||
{
|
|
||||||
"started": 1
|
|
||||||
}
|
|
||||||
]
|
|
||||||
],
|
|
||||||
[
|
|
||||||
"ok",
|
|
||||||
[
|
|
||||||
"127.0.0.2",
|
|
||||||
{
|
|
||||||
"started": 1
|
|
||||||
}
|
|
||||||
]
|
|
||||||
],
|
|
||||||
[
|
|
||||||
"async poll",
|
|
||||||
[
|
|
||||||
"127.0.0.2"
|
|
||||||
]
|
|
||||||
],
|
|
||||||
[
|
|
||||||
"ok",
|
|
||||||
[
|
|
||||||
"127.0.0.2",
|
|
||||||
{
|
|
||||||
"changed": true,
|
|
||||||
"cmd": "sleep 5 ",
|
|
||||||
"finished": 1,
|
|
||||||
"rc": 0,
|
|
||||||
"stderr": "",
|
|
||||||
"stdout": ""
|
|
||||||
}
|
|
||||||
]
|
|
||||||
],
|
|
||||||
[
|
|
||||||
"task start",
|
|
||||||
[
|
|
||||||
"this should be skipped",
|
|
||||||
false
|
|
||||||
]
|
|
||||||
],
|
|
||||||
[
|
|
||||||
"skipped",
|
|
||||||
[
|
|
||||||
"127.0.0.2"
|
|
||||||
]
|
|
||||||
],
|
|
||||||
[
|
|
||||||
"task start",
|
|
||||||
[
|
|
||||||
"on change 1",
|
|
||||||
true
|
|
||||||
]
|
|
||||||
],
|
|
||||||
[
|
|
||||||
"ok",
|
|
||||||
[
|
|
||||||
"127.0.0.2",
|
|
||||||
{
|
|
||||||
"changed": true,
|
|
||||||
"cmd": "echo 'this should fire once' ",
|
|
||||||
"rc": 0,
|
|
||||||
"stderr": "",
|
|
||||||
"stdout": "this should fire once"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
],
|
|
||||||
[
|
|
||||||
"ok",
|
|
||||||
[
|
|
||||||
"127.0.0.2",
|
|
||||||
{
|
|
||||||
"changed": true,
|
|
||||||
"cmd": "echo 'this should fire once' ",
|
|
||||||
"rc": 0,
|
|
||||||
"stderr": "",
|
|
||||||
"stdout": "this should fire once"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
],
|
|
||||||
[
|
|
||||||
"task start",
|
|
||||||
[
|
|
||||||
"on change 2",
|
|
||||||
true
|
|
||||||
]
|
|
||||||
],
|
|
||||||
[
|
|
||||||
"ok",
|
|
||||||
[
|
|
||||||
"127.0.0.2",
|
|
||||||
{
|
|
||||||
"changed": true,
|
|
||||||
"cmd": "echo 'this should fire once also' ",
|
|
||||||
"rc": 0,
|
|
||||||
"stderr": "",
|
|
||||||
"stdout": "this should fire once also"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
]
|
|
||||||
],
|
|
||||||
"results": {
|
|
||||||
"127.0.0.2": {
|
|
||||||
"changed": 9,
|
|
||||||
"failures": 0,
|
|
||||||
"ok": 12,
|
|
||||||
"skipped": 1,
|
|
||||||
"unreachable": 0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
Loading…
Reference in a new issue