mirror of
https://github.com/ansible-collections/community.general.git
synced 2024-09-14 20:13:21 +02:00
test/: PEP8 compliancy (#24803)
* test/: PEP8 compliancy - Make PEP8 compliant * Python3 chokes on casting int to bytes (#24952) But if we tell the formatter that the var is a number, it works
This commit is contained in:
parent
31c59ad5f9
commit
4efec414e7
110 changed files with 1702 additions and 1547 deletions
8
setup.py
8
setup.py
|
@ -8,15 +8,15 @@ try:
|
|||
from setuptools import setup, find_packages
|
||||
except ImportError:
|
||||
print("Ansible now needs setuptools in order to build. Install it using"
|
||||
" your package manager (usually python-setuptools) or via pip (pip"
|
||||
" install setuptools).")
|
||||
" your package manager (usually python-setuptools) or via pip (pip"
|
||||
" install setuptools).")
|
||||
sys.exit(1)
|
||||
|
||||
with open('requirements.txt') as requirements_file:
|
||||
install_requirements = requirements_file.read().splitlines()
|
||||
if not install_requirements:
|
||||
print("Unable to read requirements from the requirements.txt file"
|
||||
"That indicates this copy of the source code is incomplete.")
|
||||
"That indicates this copy of the source code is incomplete.")
|
||||
sys.exit(2)
|
||||
|
||||
SYMLINKS = {'ansible': frozenset(('ansible-console',
|
||||
|
@ -49,7 +49,7 @@ setup(
|
|||
# Ansible will also make use of a system copy of python-six and
|
||||
# python-selectors2 if installed but use a Bundled copy if it's not.
|
||||
install_requires=install_requirements,
|
||||
package_dir={ '': 'lib' },
|
||||
package_dir={'': 'lib'},
|
||||
packages=find_packages('lib'),
|
||||
package_data={
|
||||
'': [
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
|
|
@ -4,15 +4,16 @@ Find and delete AWS resources matching the provided --match string. Unless
|
|||
Please use caution, you can easily delete you're *ENTIRE* EC2 infrastructure.
|
||||
'''
|
||||
|
||||
import boto
|
||||
import boto.ec2.elb
|
||||
import optparse
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
import boto
|
||||
import optparse
|
||||
import yaml
|
||||
import os.path
|
||||
import boto.ec2.elb
|
||||
import time
|
||||
import yaml
|
||||
|
||||
|
||||
def delete_aws_resources(get_func, attr, opts):
|
||||
for item in get_func():
|
||||
|
@ -20,6 +21,7 @@ def delete_aws_resources(get_func, attr, opts):
|
|||
if re.search(opts.match_re, val):
|
||||
prompt_and_delete(item, "Delete matching %s? [y/n]: " % (item,), opts.assumeyes)
|
||||
|
||||
|
||||
def delete_autoscaling_group(get_func, attr, opts):
|
||||
assumeyes = opts.assumeyes
|
||||
group_name = None
|
||||
|
@ -49,7 +51,8 @@ def delete_autoscaling_group(get_func, attr, opts):
|
|||
group.delete()
|
||||
while len(asg.get_all_groups(names=[group_name])):
|
||||
time.sleep(5)
|
||||
print ("Terminated ASG: %s" % group_name)
|
||||
print("Terminated ASG: %s" % group_name)
|
||||
|
||||
|
||||
def delete_aws_eips(get_func, attr, opts):
|
||||
|
||||
|
@ -65,22 +68,25 @@ def delete_aws_eips(get_func, attr, opts):
|
|||
if val in eip_log:
|
||||
prompt_and_delete(item, "Delete matching %s? [y/n]: " % (item,), opts.assumeyes)
|
||||
|
||||
|
||||
def delete_aws_instances(reservation, opts):
|
||||
for list in reservation:
|
||||
for item in list.instances:
|
||||
prompt_and_delete(item, "Delete matching %s? [y/n]: " % (item,), opts.assumeyes)
|
||||
|
||||
|
||||
def prompt_and_delete(item, prompt, assumeyes):
|
||||
if not assumeyes:
|
||||
assumeyes = raw_input(prompt).lower() == 'y'
|
||||
assert hasattr(item, 'delete') or hasattr(item, 'terminate') , "Class <%s> has no delete or terminate attribute" % item.__class__
|
||||
assert hasattr(item, 'delete') or hasattr(item, 'terminate'), "Class <%s> has no delete or terminate attribute" % item.__class__
|
||||
if assumeyes:
|
||||
if hasattr(item, 'delete'):
|
||||
item.delete()
|
||||
print ("Deleted %s" % item)
|
||||
print("Deleted %s" % item)
|
||||
if hasattr(item, 'terminate'):
|
||||
item.terminate()
|
||||
print ("Terminated %s" % item)
|
||||
print("Terminated %s" % item)
|
||||
|
||||
|
||||
def parse_args():
|
||||
# Load details from credentials.yml
|
||||
|
@ -94,45 +100,61 @@ def parse_args():
|
|||
if default_aws_secret_key is None:
|
||||
default_aws_secret_key = credentials['ec2_secret_key']
|
||||
|
||||
parser = optparse.OptionParser(usage="%s [options]" % (sys.argv[0],),
|
||||
description=__doc__)
|
||||
parser.add_option("--access",
|
||||
parser = optparse.OptionParser(
|
||||
usage="%s [options]" % (sys.argv[0], ),
|
||||
description=__doc__
|
||||
)
|
||||
parser.add_option(
|
||||
"--access",
|
||||
action="store", dest="ec2_access_key",
|
||||
default=default_aws_access_key,
|
||||
help="Amazon ec2 access id. Can use EC2_ACCESS_KEY environment variable, or a values from credentials.yml.")
|
||||
parser.add_option("--secret",
|
||||
help="Amazon ec2 access id. Can use EC2_ACCESS_KEY environment variable, or a values from credentials.yml."
|
||||
)
|
||||
parser.add_option(
|
||||
"--secret",
|
||||
action="store", dest="ec2_secret_key",
|
||||
default=default_aws_secret_key,
|
||||
help="Amazon ec2 secret key. Can use EC2_SECRET_KEY environment variable, or a values from credentials.yml.")
|
||||
parser.add_option("--eip-log",
|
||||
help="Amazon ec2 secret key. Can use EC2_SECRET_KEY environment variable, or a values from credentials.yml."
|
||||
)
|
||||
parser.add_option(
|
||||
"--eip-log",
|
||||
action="store", dest="eip_log",
|
||||
default = None,
|
||||
help = "Path to log of EIPs created during test.")
|
||||
parser.add_option("--integration-config",
|
||||
default=None,
|
||||
help="Path to log of EIPs created during test."
|
||||
)
|
||||
parser.add_option(
|
||||
"--integration-config",
|
||||
action="store", dest="int_config",
|
||||
default = "integration_config.yml",
|
||||
help = "path to integration config")
|
||||
parser.add_option("--credentials", "-c",
|
||||
default="integration_config.yml",
|
||||
help="path to integration config"
|
||||
)
|
||||
parser.add_option(
|
||||
"--credentials", "-c",
|
||||
action="store", dest="credential_file",
|
||||
default="credentials.yml",
|
||||
help="YAML file to read cloud credentials (default: %default)")
|
||||
parser.add_option("--yes", "-y",
|
||||
help="YAML file to read cloud credentials (default: %default)"
|
||||
)
|
||||
parser.add_option(
|
||||
"--yes", "-y",
|
||||
action="store_true", dest="assumeyes",
|
||||
default=False,
|
||||
help="Don't prompt for confirmation")
|
||||
parser.add_option("--match",
|
||||
help="Don't prompt for confirmation"
|
||||
)
|
||||
parser.add_option(
|
||||
"--match",
|
||||
action="store", dest="match_re",
|
||||
default="^ansible-testing-",
|
||||
help="Regular expression used to find AWS resources (default: %default)")
|
||||
help="Regular expression used to find AWS resources (default: %default)"
|
||||
)
|
||||
|
||||
(opts, args) = parser.parse_args()
|
||||
for required in ['ec2_access_key', 'ec2_secret_key']:
|
||||
if getattr(opts, required) is None:
|
||||
parser.error("Missing required parameter: --%s" % required)
|
||||
|
||||
|
||||
return (opts, args)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
(opts, args) = parse_args()
|
||||
|
@ -140,17 +162,17 @@ if __name__ == '__main__':
|
|||
int_config = yaml.load(open(opts.int_config).read())
|
||||
if not opts.eip_log:
|
||||
output_dir = os.path.expanduser(int_config["output_dir"])
|
||||
opts.eip_log = output_dir + '/' + opts.match_re.replace('^','') + '-eip_integration_tests.log'
|
||||
opts.eip_log = output_dir + '/' + opts.match_re.replace('^', '') + '-eip_integration_tests.log'
|
||||
|
||||
# Connect to AWS
|
||||
aws = boto.connect_ec2(aws_access_key_id=opts.ec2_access_key,
|
||||
aws_secret_access_key=opts.ec2_secret_key)
|
||||
aws_secret_access_key=opts.ec2_secret_key)
|
||||
|
||||
elb = boto.connect_elb(aws_access_key_id=opts.ec2_access_key,
|
||||
aws_secret_access_key=opts.ec2_secret_key)
|
||||
aws_secret_access_key=opts.ec2_secret_key)
|
||||
|
||||
asg = boto.connect_autoscale(aws_access_key_id=opts.ec2_access_key,
|
||||
aws_secret_access_key=opts.ec2_secret_key)
|
||||
aws_secret_access_key=opts.ec2_secret_key)
|
||||
|
||||
try:
|
||||
# Delete matching keys
|
||||
|
@ -172,7 +194,7 @@ if __name__ == '__main__':
|
|||
delete_aws_eips(aws.get_all_addresses, 'public_ip', opts)
|
||||
|
||||
# Delete temporary instances
|
||||
filters = {"tag:Name":opts.match_re.replace('^',''), "instance-state-name": ['running', 'pending', 'stopped' ]}
|
||||
filters = {"tag:Name": opts.match_re.replace('^', ''), "instance-state-name": ['running', 'pending', 'stopped']}
|
||||
delete_aws_instances(aws.get_all_instances(filters=filters), opts)
|
||||
|
||||
except KeyboardInterrupt as e:
|
||||
|
|
|
@ -4,21 +4,25 @@ Find and delete GCE resources matching the provided --match string. Unless
|
|||
Please use caution, you can easily delete your *ENTIRE* GCE infrastructure.
|
||||
'''
|
||||
|
||||
import optparse
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import optparse
|
||||
import yaml
|
||||
|
||||
try:
|
||||
from libcloud.compute.types import Provider
|
||||
from libcloud.common.google import (
|
||||
GoogleBaseError,
|
||||
QuotaExceededError,
|
||||
ResourceExistsError,
|
||||
ResourceInUseError,
|
||||
ResourceNotFoundError,
|
||||
)
|
||||
from libcloud.compute.providers import get_driver
|
||||
from libcloud.common.google import GoogleBaseError, QuotaExceededError, \
|
||||
ResourceExistsError, ResourceInUseError, ResourceNotFoundError
|
||||
from libcloud.compute.types import Provider
|
||||
_ = Provider.GCE
|
||||
except ImportError:
|
||||
print("failed=True " + \
|
||||
"msg='libcloud with GCE support (0.13.3+) required for this module'")
|
||||
print("failed=True msg='libcloud with GCE support (0.13.3+) required for this module'")
|
||||
sys.exit(1)
|
||||
|
||||
import gce_credentials
|
||||
|
@ -30,26 +34,34 @@ def delete_gce_resources(get_func, attr, opts):
|
|||
if re.search(opts.match_re, val, re.IGNORECASE):
|
||||
prompt_and_delete(item, "Delete matching %s? [y/n]: " % (item,), opts.assumeyes)
|
||||
|
||||
|
||||
def prompt_and_delete(item, prompt, assumeyes):
|
||||
if not assumeyes:
|
||||
assumeyes = raw_input(prompt).lower() == 'y'
|
||||
assert hasattr(item, 'destroy'), "Class <%s> has no delete attribute" % item.__class__
|
||||
if assumeyes:
|
||||
item.destroy()
|
||||
print ("Deleted %s" % item)
|
||||
print("Deleted %s" % item)
|
||||
|
||||
|
||||
def parse_args():
|
||||
parser = optparse.OptionParser(usage="%s [options]" % (sys.argv[0],),
|
||||
description=__doc__)
|
||||
parser = optparse.OptionParser(
|
||||
usage="%s [options]" % sys.argv[0],
|
||||
description=__doc__
|
||||
)
|
||||
gce_credentials.add_credentials_options(parser)
|
||||
parser.add_option("--yes", "-y",
|
||||
parser.add_option(
|
||||
"--yes", "-y",
|
||||
action="store_true", dest="assumeyes",
|
||||
default=False,
|
||||
help="Don't prompt for confirmation")
|
||||
parser.add_option("--match",
|
||||
help="Don't prompt for confirmation"
|
||||
)
|
||||
parser.add_option(
|
||||
"--match",
|
||||
action="store", dest="match_re",
|
||||
default="^ansible-testing-",
|
||||
help="Regular expression used to find GCE resources (default: %default)")
|
||||
help="Regular expression used to find GCE resources (default: %default)"
|
||||
)
|
||||
|
||||
(opts, args) = parser.parse_args()
|
||||
gce_credentials.check_required(opts, parser)
|
||||
|
@ -65,6 +77,7 @@ if __name__ == '__main__':
|
|||
try:
|
||||
# Delete matching instances
|
||||
delete_gce_resources(gce.list_nodes, 'name', opts)
|
||||
|
||||
# Delete matching snapshots
|
||||
def get_snapshots():
|
||||
for volume in gce.list_volumes():
|
||||
|
|
|
@ -55,19 +55,19 @@ def prompt_and_delete(item, prompt, assumeyes):
|
|||
if not assumeyes:
|
||||
assumeyes = raw_input(prompt).lower() == 'y'
|
||||
assert hasattr(item, 'delete') or hasattr(item, 'terminate'), \
|
||||
"Class <%s> has no delete or terminate attribute" % item.__class__
|
||||
"Class <%s> has no delete or terminate attribute" % item.__class__
|
||||
if assumeyes:
|
||||
if hasattr(item, 'delete'):
|
||||
item.delete()
|
||||
print ("Deleted %s" % item)
|
||||
print("Deleted %s" % item)
|
||||
if hasattr(item, 'terminate'):
|
||||
item.terminate()
|
||||
print ("Terminated %s" % item)
|
||||
print("Terminated %s" % item)
|
||||
|
||||
|
||||
def delete_rax(args):
|
||||
"""Function for deleting CloudServers"""
|
||||
print ("--- Cleaning CloudServers matching '%s'" % args.match_re)
|
||||
print("--- Cleaning CloudServers matching '%s'" % args.match_re)
|
||||
search_opts = dict(name='^%s' % args.match_re)
|
||||
for region in pyrax.identity.services.compute.regions:
|
||||
cs = pyrax.connect_to_cloudservers(region=region)
|
||||
|
@ -80,7 +80,7 @@ def delete_rax(args):
|
|||
|
||||
def delete_rax_clb(args):
|
||||
"""Function for deleting Cloud Load Balancers"""
|
||||
print ("--- Cleaning Cloud Load Balancers matching '%s'" % args.match_re)
|
||||
print("--- Cleaning Cloud Load Balancers matching '%s'" % args.match_re)
|
||||
for region in pyrax.identity.services.load_balancer.regions:
|
||||
clb = pyrax.connect_to_cloud_loadbalancers(region=region)
|
||||
for lb in rax_list_iterator(clb):
|
||||
|
@ -92,7 +92,7 @@ def delete_rax_clb(args):
|
|||
|
||||
def delete_rax_keypair(args):
|
||||
"""Function for deleting Rackspace Key pairs"""
|
||||
print ("--- Cleaning Key Pairs matching '%s'" % args.match_re)
|
||||
print("--- Cleaning Key Pairs matching '%s'" % args.match_re)
|
||||
for region in pyrax.identity.services.compute.regions:
|
||||
cs = pyrax.connect_to_cloudservers(region=region)
|
||||
for keypair in cs.keypairs.list():
|
||||
|
@ -104,7 +104,7 @@ def delete_rax_keypair(args):
|
|||
|
||||
def delete_rax_network(args):
|
||||
"""Function for deleting Cloud Networks"""
|
||||
print ("--- Cleaning Cloud Networks matching '%s'" % args.match_re)
|
||||
print("--- Cleaning Cloud Networks matching '%s'" % args.match_re)
|
||||
for region in pyrax.identity.services.network.regions:
|
||||
cnw = pyrax.connect_to_cloud_networks(region=region)
|
||||
for network in cnw.list():
|
||||
|
@ -116,7 +116,7 @@ def delete_rax_network(args):
|
|||
|
||||
def delete_rax_cbs(args):
|
||||
"""Function for deleting Cloud Networks"""
|
||||
print ("--- Cleaning Cloud Block Storage matching '%s'" % args.match_re)
|
||||
print("--- Cleaning Cloud Block Storage matching '%s'" % args.match_re)
|
||||
for region in pyrax.identity.services.network.regions:
|
||||
cbs = pyrax.connect_to_cloud_blockstorage(region=region)
|
||||
for volume in cbs.list():
|
||||
|
@ -128,7 +128,7 @@ def delete_rax_cbs(args):
|
|||
|
||||
def delete_rax_cdb(args):
|
||||
"""Function for deleting Cloud Databases"""
|
||||
print ("--- Cleaning Cloud Databases matching '%s'" % args.match_re)
|
||||
print("--- Cleaning Cloud Databases matching '%s'" % args.match_re)
|
||||
for region in pyrax.identity.services.database.regions:
|
||||
cdb = pyrax.connect_to_cloud_databases(region=region)
|
||||
for db in rax_list_iterator(cdb):
|
||||
|
@ -146,7 +146,7 @@ def _force_delete_rax_scaling_group(manager):
|
|||
|
||||
def delete_rax_scaling_group(args):
|
||||
"""Function for deleting Autoscale Groups"""
|
||||
print ("--- Cleaning Autoscale Groups matching '%s'" % args.match_re)
|
||||
print("--- Cleaning Autoscale Groups matching '%s'" % args.match_re)
|
||||
for region in pyrax.identity.services.autoscale.regions:
|
||||
asg = pyrax.connect_to_autoscale(region=region)
|
||||
for group in rax_list_iterator(asg):
|
||||
|
@ -170,11 +170,11 @@ def main():
|
|||
try:
|
||||
func(args)
|
||||
except Exception as e:
|
||||
print ("---- %s failed (%s)" % (func.__name__, e.message))
|
||||
print("---- %s failed (%s)" % (func.__name__, e.message))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
main()
|
||||
except KeyboardInterrupt:
|
||||
print ('\nExiting...')
|
||||
print('\nExiting...')
|
||||
|
|
|
@ -8,8 +8,7 @@ try:
|
|||
from libcloud.compute.providers import get_driver
|
||||
_ = Provider.GCE
|
||||
except ImportError:
|
||||
print("failed=True " + \
|
||||
"msg='libcloud with GCE support (0.13.3+) required for this module'")
|
||||
print("failed=True msg='libcloud with GCE support (0.13.3+) required for this module'")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
|
|
|
@ -7,19 +7,22 @@ ${prefix}-snapshot. prefix will be forced to lowercase, to ensure the names are
|
|||
legal GCE resource names.
|
||||
'''
|
||||
|
||||
import sys
|
||||
import optparse
|
||||
|
||||
import gce_credentials
|
||||
import optparse
|
||||
import sys
|
||||
|
||||
|
||||
def parse_args():
|
||||
parser = optparse.OptionParser(
|
||||
usage="%s [options] <prefix>" % (sys.argv[0],), description=__doc__)
|
||||
usage="%s [options] <prefix>" % (sys.argv[0],), description=__doc__
|
||||
)
|
||||
gce_credentials.add_credentials_options(parser)
|
||||
parser.add_option("--prefix",
|
||||
action="store", dest="prefix",
|
||||
help="String used to prefix GCE resource names (default: %default)")
|
||||
parser.add_option(
|
||||
"--prefix",
|
||||
action="store",
|
||||
dest="prefix",
|
||||
help="String used to prefix GCE resource names (default: %default)"
|
||||
)
|
||||
|
||||
(opts, args) = parser.parse_args()
|
||||
gce_credentials.check_required(opts, parser)
|
||||
|
@ -27,6 +30,7 @@ def parse_args():
|
|||
parser.error("Missing required argument: name prefix")
|
||||
return (opts, args)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
(opts, args) = parse_args()
|
||||
|
@ -34,9 +38,8 @@ if __name__ == '__main__':
|
|||
prefix = args[0].lower()
|
||||
try:
|
||||
base_volume = gce.create_volume(
|
||||
size=10, name=prefix+'-base', location='us-central1-a')
|
||||
gce.create_volume_snapshot(base_volume, name=prefix+'-snapshot')
|
||||
gce.create_volume(
|
||||
size=10, name=prefix+'-extra', location='us-central1-a')
|
||||
size=10, name=prefix + '-base', location='us-central1-a')
|
||||
gce.create_volume_snapshot(base_volume, name=prefix + '-snapshot')
|
||||
gce.create_volume(size=10, name=prefix + '-extra', location='us-central1-a')
|
||||
except KeyboardInterrupt as e:
|
||||
print("\nExiting on user command.")
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
import sys
|
||||
import json
|
||||
import sys
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
|
||||
def main():
|
||||
if "--interactive" in sys.argv:
|
||||
import ansible.module_utils.basic
|
||||
|
@ -11,8 +13,9 @@ def main():
|
|||
)
|
||||
))
|
||||
|
||||
module = AnsibleModule(argument_spec = dict(
|
||||
fail_mode = dict(type='list', default=['success'])
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
fail_mode=dict(type='list', default=['success'])
|
||||
)
|
||||
)
|
||||
|
||||
|
|
|
@ -14,4 +14,3 @@ if __name__ == '__main__':
|
|||
mimetypes.add_type('application/json', '.json')
|
||||
import SimpleHTTPServer
|
||||
SimpleHTTPServer.test()
|
||||
|
||||
|
|
|
@ -81,6 +81,7 @@ class Role(object):
|
|||
for dep in self.dependencies:
|
||||
f.write('- { role: %s }\n' % dep)
|
||||
|
||||
|
||||
class DynamicInventory(object):
|
||||
BASESCRIPT = '''#!/usr/bin/python
|
||||
import json
|
||||
|
@ -140,13 +141,12 @@ print(json.dumps(data, indent=2, sort_keys=True))
|
|||
'hosts': [xhost],
|
||||
}
|
||||
|
||||
|
||||
def write_script(self):
|
||||
fdir = os.path.join(TESTDIR, 'inventory')
|
||||
if not os.path.isdir(fdir):
|
||||
os.makedirs(fdir)
|
||||
fpath = os.path.join(fdir, 'hosts')
|
||||
#fpath = os.path.join(TESTDIR, 'inventory')
|
||||
# fpath = os.path.join(TESTDIR, 'inventory')
|
||||
self.fpath = fpath
|
||||
|
||||
data = json.dumps(self.inventory)
|
||||
|
@ -414,22 +414,22 @@ def main():
|
|||
features = [
|
||||
'extra_vars',
|
||||
'include_params',
|
||||
#'role_params', # FIXME: we don't yet validate tasks within a role
|
||||
# 'role_params', # FIXME: we don't yet validate tasks within a role
|
||||
'set_fact',
|
||||
#'registered_vars', # FIXME: hard to simulate
|
||||
# 'registered_vars', # FIXME: hard to simulate
|
||||
'include_vars',
|
||||
#'role_dep_params',
|
||||
# 'role_dep_params',
|
||||
'task_vars',
|
||||
'block_vars',
|
||||
'role_var',
|
||||
'vars_file',
|
||||
'play_var',
|
||||
#'host_facts', # FIXME: hard to simulate
|
||||
# 'host_facts', # FIXME: hard to simulate
|
||||
'pb_host_vars_file',
|
||||
'ini_host_vars_file',
|
||||
'ini_host',
|
||||
'pb_group_vars_file_child',
|
||||
#'ini_group_vars_file_child', #FIXME: this contradicts documented precedence pb group vars files should override inventory ones
|
||||
# 'ini_group_vars_file_child', #FIXME: this contradicts documented precedence pb group vars files should override inventory ones
|
||||
'pb_group_vars_file_parent',
|
||||
'ini_group_vars_file_parent',
|
||||
'pb_group_vars_file_all',
|
||||
|
@ -489,7 +489,7 @@ def main():
|
|||
dinv = options.use_dynamic_inventory
|
||||
if dinv:
|
||||
# some features are specific to ini, so swap those
|
||||
for idx,x in enumerate(features):
|
||||
for (idx, x) in enumerate(features):
|
||||
if x.startswith('ini_') and 'vars_file' not in x:
|
||||
features[idx] = x.replace('ini_', 'script_')
|
||||
|
||||
|
|
|
@ -821,110 +821,3 @@ lib/ansible/utils/path.py
|
|||
lib/ansible/utils/ssh_functions.py
|
||||
lib/ansible/utils/vars.py
|
||||
lib/ansible/vars/manager.py
|
||||
setup.py
|
||||
test/integration/cleanup_azure.py
|
||||
test/integration/cleanup_ec2.py
|
||||
test/integration/cleanup_gce.py
|
||||
test/integration/cleanup_rax.py
|
||||
test/integration/gce_credentials.py
|
||||
test/integration/setup_gce.py
|
||||
test/integration/targets/async/library/async_test.py
|
||||
test/integration/targets/uri/files/testserver.py
|
||||
test/sanity/code-smell/ansible-var-precedence-check.py
|
||||
test/units/cli/test_galaxy.py
|
||||
test/units/contrib/inventory/test_vmware_inventory.py
|
||||
test/units/errors/test_errors.py
|
||||
test/units/executor/module_common/test_recursive_finder.py
|
||||
test/units/executor/test_play_iterator.py
|
||||
test/units/executor/test_playbook_executor.py
|
||||
test/units/executor/test_task_executor.py
|
||||
test/units/executor/test_task_result.py
|
||||
test/units/inventory/test_inventory.py
|
||||
test/units/mock/generator.py
|
||||
test/units/mock/loader.py
|
||||
test/units/module_utils/basic/test__log_invocation.py
|
||||
test/units/module_utils/basic/test_deprecate_warn.py
|
||||
test/units/module_utils/basic/test_exit_json.py
|
||||
test/units/module_utils/basic/test_heuristic_log_sanitize.py
|
||||
test/units/module_utils/basic/test_log.py
|
||||
test/units/module_utils/basic/test_no_log.py
|
||||
test/units/module_utils/basic/test_run_command.py
|
||||
test/units/module_utils/basic/test_safe_eval.py
|
||||
test/units/module_utils/basic/test_set_mode_if_different.py
|
||||
test/units/module_utils/ec2/test_aws.py
|
||||
test/units/module_utils/json_utils/test_filter_non_json_lines.py
|
||||
test/units/module_utils/test_basic.py
|
||||
test/units/module_utils/test_distribution_version.py
|
||||
test/units/module_utils/test_facts.py
|
||||
test/units/module_utils/test_postgresql.py
|
||||
test/units/module_utils/test_text.py
|
||||
test/units/modules/cloud/amazon/test_ec2_vpc_nat_gateway.py
|
||||
test/units/modules/cloud/amazon/test_lambda.py
|
||||
test/units/modules/cloud/amazon/test_s3.py
|
||||
test/units/modules/cloud/docker/test_docker.py
|
||||
test/units/modules/cloud/google/test_gce_tag.py
|
||||
test/units/modules/cloud/openstack/test_os_server.py
|
||||
test/units/modules/network/cumulus/test_nclu.py
|
||||
test/units/modules/network/eos/eos_module.py
|
||||
test/units/modules/network/eos/test_eos_command.py
|
||||
test/units/modules/network/eos/test_eos_config.py
|
||||
test/units/modules/network/eos/test_eos_system.py
|
||||
test/units/modules/network/eos/test_eos_user.py
|
||||
test/units/modules/network/ios/ios_module.py
|
||||
test/units/modules/network/ios/test_ios_banner.py
|
||||
test/units/modules/network/ios/test_ios_command.py
|
||||
test/units/modules/network/ios/test_ios_config.py
|
||||
test/units/modules/network/ios/test_ios_system.py
|
||||
test/units/modules/network/ios/test_ios_template.py
|
||||
test/units/modules/network/ios/test_ios_vrf.py
|
||||
test/units/modules/network/iosxr/iosxr_module.py
|
||||
test/units/modules/network/iosxr/test_iosxr_config.py
|
||||
test/units/modules/network/iosxr/test_iosxr_facts.py
|
||||
test/units/modules/network/iosxr/test_iosxr_system.py
|
||||
test/units/modules/network/nxos/nxos_module.py
|
||||
test/units/modules/network/nxos/test_nxos_command.py
|
||||
test/units/modules/network/nxos/test_nxos_config.py
|
||||
test/units/modules/network/nxos/test_nxos_evpn_global.py
|
||||
test/units/modules/network/nxos/test_nxos_system.py
|
||||
test/units/modules/network/vyos/test_vyos_command.py
|
||||
test/units/modules/network/vyos/vyos_module.py
|
||||
test/units/modules/packaging/os/test_apt.py
|
||||
test/units/parsing/test_dataloader.py
|
||||
test/units/parsing/test_mod_args.py
|
||||
test/units/parsing/utils/test_addresses.py
|
||||
test/units/parsing/utils/test_jsonify.py
|
||||
test/units/parsing/vault/test_vault.py
|
||||
test/units/parsing/vault/test_vault_editor.py
|
||||
test/units/parsing/yaml/test_dumper.py
|
||||
test/units/parsing/yaml/test_loader.py
|
||||
test/units/parsing/yaml/test_objects.py
|
||||
test/units/playbook/role/test_role.py
|
||||
test/units/playbook/test_attribute.py
|
||||
test/units/playbook/test_base.py
|
||||
test/units/playbook/test_block.py
|
||||
test/units/playbook/test_conditional.py
|
||||
test/units/playbook/test_helpers.py
|
||||
test/units/playbook/test_play_context.py
|
||||
test/units/playbook/test_playbook.py
|
||||
test/units/playbook/test_taggable.py
|
||||
test/units/playbook/test_task.py
|
||||
test/units/plugins/action/test_action.py
|
||||
test/units/plugins/action/test_raw.py
|
||||
test/units/plugins/action/test_synchronize.py
|
||||
test/units/plugins/connection/test_connection.py
|
||||
test/units/plugins/connection/test_netconf.py
|
||||
test/units/plugins/connection/test_network_cli.py
|
||||
test/units/plugins/connection/test_ssh.py
|
||||
test/units/plugins/lookup/test_ini.py
|
||||
test/units/plugins/lookup/test_password.py
|
||||
test/units/plugins/strategy/test_strategy_base.py
|
||||
test/units/plugins/test_plugins.py
|
||||
test/units/template/test_safe_eval.py
|
||||
test/units/template/test_templar.py
|
||||
test/units/template/test_template_utilities.py
|
||||
test/units/template/test_vars.py
|
||||
test/units/test_constants.py
|
||||
test/units/utils/test_helpers.py
|
||||
test/units/utils/test_shlex.py
|
||||
test/units/utils/test_vars.py
|
||||
test/units/vars/test_variable_manager.py
|
||||
|
|
|
@ -19,20 +19,19 @@
|
|||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import ansible
|
||||
import os
|
||||
import shutil
|
||||
import tarfile
|
||||
import tempfile
|
||||
import yaml
|
||||
|
||||
from ansible.cli.galaxy import GalaxyCLI
|
||||
from ansible.compat.tests import unittest
|
||||
from ansible.compat.tests.mock import call, patch
|
||||
from ansible.errors import AnsibleError, AnsibleOptionsError
|
||||
from ansible.module_utils.six import PY3
|
||||
|
||||
import ansible
|
||||
from ansible.errors import AnsibleError, AnsibleOptionsError
|
||||
|
||||
from ansible.cli.galaxy import GalaxyCLI
|
||||
|
||||
class TestGalaxy(unittest.TestCase):
|
||||
@classmethod
|
||||
|
@ -72,7 +71,7 @@ class TestGalaxy(unittest.TestCase):
|
|||
try:
|
||||
tar = tarfile.open(output_file, "w:gz")
|
||||
tar.add(source_dir, arcname=os.path.basename(source_dir))
|
||||
except AttributeError: # tarfile obj. has no attribute __exit__ prior to python 2. 7
|
||||
except AttributeError: # tarfile obj. has no attribute __exit__ prior to python 2. 7
|
||||
pass
|
||||
finally: # ensuring closure of tarfile obj
|
||||
tar.close()
|
||||
|
@ -101,7 +100,7 @@ class TestGalaxy(unittest.TestCase):
|
|||
gc = GalaxyCLI(args=self.default_args)
|
||||
role_info = {'name': 'some_role_name'}
|
||||
display_result = gc._display_role_info(role_info)
|
||||
self.assertTrue(display_result.find('some_role_name') >-1)
|
||||
self.assertTrue(display_result.find('some_role_name') > -1)
|
||||
|
||||
def test_display_galaxy_info(self):
|
||||
gc = GalaxyCLI(args=self.default_args)
|
||||
|
@ -330,7 +329,7 @@ class ValidRoleTests(object):
|
|||
self.assertTrue(os.path.isdir(os.path.join(self.role_dir, d)), msg="Expected role subdirectory {0} doesn't exist".format(d))
|
||||
|
||||
def test_travis_yml(self):
|
||||
with open(os.path.join(self.role_dir,'.travis.yml'), 'r') as f:
|
||||
with open(os.path.join(self.role_dir, '.travis.yml'), 'r') as f:
|
||||
contents = f.read()
|
||||
|
||||
with open(os.path.join(self.role_skeleton_path, '.travis.yml'), 'r') as f:
|
||||
|
@ -378,7 +377,7 @@ class TestGalaxyInitContainerEnabled(unittest.TestCase, ValidRoleTests):
|
|||
def test_metadata_container_tag(self):
|
||||
with open(os.path.join(self.role_dir, 'meta', 'main.yml'), 'r') as mf:
|
||||
metadata = yaml.safe_load(mf)
|
||||
self.assertIn('container', metadata.get('galaxy_info', dict()).get('galaxy_tags',[]), msg='container tag not set in role metadata')
|
||||
self.assertIn('container', metadata.get('galaxy_info', dict()).get('galaxy_tags', []), msg='container tag not set in role metadata')
|
||||
|
||||
def test_metadata_contents(self):
|
||||
with open(os.path.join(self.role_dir, 'meta', 'main.yml'), 'r') as mf:
|
||||
|
|
|
@ -10,8 +10,6 @@ except ImportError:
|
|||
from nose.plugins.skip import SkipTest
|
||||
raise SkipTest("test_vmware_inventory.py requires the python module 'vmware_inventory'")
|
||||
|
||||
|
||||
|
||||
# contrib's dirstruct doesn't contain __init__.py files
|
||||
checkout_path = os.path.dirname(__file__)
|
||||
checkout_path = checkout_path.replace('/test/units/contrib/inventory', '')
|
||||
|
@ -21,15 +19,19 @@ sys.path.append(os.path.abspath(inventory_dir))
|
|||
# cleanup so that nose's path is not polluted with other inv scripts
|
||||
sys.path.remove(os.path.abspath(inventory_dir))
|
||||
|
||||
BASICINVENTORY = {
|
||||
'all': {
|
||||
'hosts': ['foo', 'bar']
|
||||
},
|
||||
'_meta': {
|
||||
'hostvars': {
|
||||
'foo': {'hostname': 'foo'},
|
||||
'bar': {'hostname': 'bar'}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
BASICINVENTORY = {'all': {'hosts': ['foo', 'bar']},
|
||||
'_meta': { 'hostvars': { 'foo': {'hostname': 'foo'},
|
||||
'bar': {'hostname': 'bar'}}
|
||||
}
|
||||
}
|
||||
|
||||
class FakeArgs(object):
|
||||
debug = False
|
||||
write_dumpfile = None
|
||||
|
@ -37,6 +39,7 @@ class FakeArgs(object):
|
|||
host = False
|
||||
list = True
|
||||
|
||||
|
||||
class TestVMWareInventory(unittest.TestCase):
|
||||
|
||||
def test_host_info_returns_single_host(self):
|
||||
|
@ -61,7 +64,7 @@ class TestVMWareInventory(unittest.TestCase):
|
|||
except:
|
||||
pass
|
||||
assert serializable
|
||||
#import epdb; epdb.st()
|
||||
# import epdb; epdb.st()
|
||||
|
||||
def test_show_list_returns_serializable_data(self):
|
||||
fakeargs = FakeArgs()
|
||||
|
@ -78,7 +81,7 @@ class TestVMWareInventory(unittest.TestCase):
|
|||
except:
|
||||
pass
|
||||
assert serializable
|
||||
#import epdb; epdb.st()
|
||||
# import epdb; epdb.st()
|
||||
|
||||
def test_show_list_returns_all_data(self):
|
||||
fakeargs = FakeArgs()
|
||||
|
@ -105,7 +108,7 @@ class TestVMWareInventory(unittest.TestCase):
|
|||
except:
|
||||
pass
|
||||
assert serializable
|
||||
#import epdb; epdb.st()
|
||||
# import epdb; epdb.st()
|
||||
|
||||
def test_show_host_returns_just_host(self):
|
||||
fakeargs = FakeArgs()
|
||||
|
@ -117,5 +120,5 @@ class TestVMWareInventory(unittest.TestCase):
|
|||
showdata = vmw.show()
|
||||
expected = BASICINVENTORY['_meta']['hostvars']['foo']
|
||||
expected = json.dumps(expected, indent=2)
|
||||
#import epdb; epdb.st()
|
||||
# import epdb; epdb.st()
|
||||
assert showdata == expected
|
||||
|
|
|
@ -19,13 +19,12 @@
|
|||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
from ansible.compat.tests import unittest
|
||||
|
||||
from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject
|
||||
from ansible.errors import AnsibleError
|
||||
|
||||
from ansible.compat.tests import BUILTINS
|
||||
from ansible.compat.tests import BUILTINS, unittest
|
||||
from ansible.compat.tests.mock import mock_open, patch
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject
|
||||
|
||||
|
||||
class TestErrors(unittest.TestCase):
|
||||
|
||||
|
|
|
@ -20,13 +20,13 @@ from __future__ import (absolute_import, division, print_function)
|
|||
__metaclass__ = type
|
||||
|
||||
import imp
|
||||
import pytest
|
||||
import zipfile
|
||||
|
||||
from collections import namedtuple
|
||||
from functools import partial
|
||||
from io import BytesIO, StringIO
|
||||
|
||||
import pytest
|
||||
|
||||
import ansible.errors
|
||||
|
||||
from ansible.executor.module_common import recursive_finder
|
||||
|
@ -42,12 +42,12 @@ def finder_containers():
|
|||
FinderContainers = namedtuple('FinderContainers', ['py_module_names', 'py_module_cache', 'zf'])
|
||||
|
||||
py_module_names = set()
|
||||
#py_module_cache = {('__init__',): b''}
|
||||
# py_module_cache = {('__init__',): b''}
|
||||
py_module_cache = {}
|
||||
|
||||
zipoutput = BytesIO()
|
||||
zf = zipfile.ZipFile(zipoutput, mode='w', compression=zipfile.ZIP_STORED)
|
||||
#zf.writestr('ansible/__init__.py', b'')
|
||||
# zf.writestr('ansible/__init__.py', b'')
|
||||
|
||||
return FinderContainers(py_module_names, py_module_cache, zf)
|
||||
|
||||
|
@ -79,7 +79,7 @@ class TestRecursiveFinder(object):
|
|||
else:
|
||||
module_utils_data = StringIO(u'# License\ndef do_something():\n pass\n')
|
||||
mocker.patch('imp.find_module', side_effect=partial(find_package_foo, module_utils_data))
|
||||
mocker.patch('ansible.executor.module_common._slurp', side_effect= lambda x: b'# License\ndef do_something():\n pass\n')
|
||||
mocker.patch('ansible.executor.module_common._slurp', side_effect=lambda x: b'# License\ndef do_something():\n pass\n')
|
||||
|
||||
name = 'ping'
|
||||
data = b'#!/usr/bin/python\nfrom ansible.module_utils import foo'
|
||||
|
|
|
@ -57,10 +57,9 @@ class TestPlayIterator(unittest.TestCase):
|
|||
|
||||
new_hs = hs.copy()
|
||||
|
||||
|
||||
@patch('ansible.playbook.role.definition.unfrackpath', mock_unfrackpath_noop)
|
||||
def test_play_iterator(self):
|
||||
#import epdb; epdb.st()
|
||||
# import epdb; epdb.st()
|
||||
fake_loader = DictDataLoader({
|
||||
"test_play.yml": """
|
||||
- hosts: all
|
||||
|
@ -191,11 +190,11 @@ class TestPlayIterator(unittest.TestCase):
|
|||
self.assertEqual(task.name, "role always task")
|
||||
self.assertIsNotNone(task._role)
|
||||
# role include task
|
||||
#(host_state, task) = itr.get_next_task_for_host(hosts[0])
|
||||
#self.assertIsNotNone(task)
|
||||
#self.assertEqual(task.action, 'debug')
|
||||
#self.assertEqual(task.name, "role included task")
|
||||
#self.assertIsNotNone(task._role)
|
||||
# (host_state, task) = itr.get_next_task_for_host(hosts[0])
|
||||
# self.assertIsNotNone(task)
|
||||
# self.assertEqual(task.action, 'debug')
|
||||
# self.assertEqual(task.name, "role included task")
|
||||
# self.assertIsNotNone(task._role)
|
||||
# role task after include
|
||||
(host_state, task) = itr.get_next_task_for_host(hosts[0])
|
||||
self.assertIsNotNone(task)
|
||||
|
@ -427,7 +426,7 @@ class TestPlayIterator(unittest.TestCase):
|
|||
)
|
||||
|
||||
# test the high-level add_tasks() method
|
||||
s = HostState(blocks=[0,1,2])
|
||||
s = HostState(blocks=[0, 1, 2])
|
||||
itr._insert_tasks_into_state = MagicMock(return_value=s)
|
||||
itr.add_tasks(hosts[0], [MagicMock(), MagicMock(), MagicMock()])
|
||||
self.assertEqual(itr._host_states[hosts[0].name], s)
|
||||
|
|
|
@ -21,13 +21,13 @@ __metaclass__ = type
|
|||
|
||||
from ansible.compat.tests import unittest
|
||||
from ansible.compat.tests.mock import MagicMock
|
||||
|
||||
from ansible.executor.playbook_executor import PlaybookExecutor
|
||||
from ansible.playbook import Playbook
|
||||
from ansible.template import Templar
|
||||
|
||||
from units.mock.loader import DictDataLoader
|
||||
|
||||
|
||||
class TestPlaybookExecutor(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
|
@ -96,46 +96,55 @@ class TestPlaybookExecutor(unittest.TestCase):
|
|||
playbook = Playbook.load(pbe._playbooks[0], variable_manager=mock_var_manager, loader=fake_loader)
|
||||
play = playbook.get_plays()[0]
|
||||
play.post_validate(templar)
|
||||
mock_inventory.get_hosts.return_value = ['host0','host1','host2','host3','host4','host5','host6','host7','host8','host9']
|
||||
self.assertEqual(pbe._get_serialized_batches(play), [['host0','host1','host2','host3','host4','host5','host6','host7','host8','host9']])
|
||||
mock_inventory.get_hosts.return_value = ['host0', 'host1', 'host2', 'host3', 'host4', 'host5', 'host6', 'host7', 'host8', 'host9']
|
||||
self.assertEqual(pbe._get_serialized_batches(play), [['host0', 'host1', 'host2', 'host3', 'host4', 'host5', 'host6', 'host7', 'host8', 'host9']])
|
||||
|
||||
playbook = Playbook.load(pbe._playbooks[1], variable_manager=mock_var_manager, loader=fake_loader)
|
||||
play = playbook.get_plays()[0]
|
||||
play.post_validate(templar)
|
||||
mock_inventory.get_hosts.return_value = ['host0','host1','host2','host3','host4','host5','host6','host7','host8','host9']
|
||||
self.assertEqual(pbe._get_serialized_batches(play), [['host0','host1'],['host2','host3'],['host4','host5'],['host6','host7'],['host8','host9']])
|
||||
mock_inventory.get_hosts.return_value = ['host0', 'host1', 'host2', 'host3', 'host4', 'host5', 'host6', 'host7', 'host8', 'host9']
|
||||
self.assertEqual(
|
||||
pbe._get_serialized_batches(play),
|
||||
[['host0', 'host1'], ['host2', 'host3'], ['host4', 'host5'], ['host6', 'host7'], ['host8', 'host9']]
|
||||
)
|
||||
|
||||
playbook = Playbook.load(pbe._playbooks[2], variable_manager=mock_var_manager, loader=fake_loader)
|
||||
play = playbook.get_plays()[0]
|
||||
play.post_validate(templar)
|
||||
mock_inventory.get_hosts.return_value = ['host0','host1','host2','host3','host4','host5','host6','host7','host8','host9']
|
||||
self.assertEqual(pbe._get_serialized_batches(play), [['host0','host1'],['host2','host3'],['host4','host5'],['host6','host7'],['host8','host9']])
|
||||
mock_inventory.get_hosts.return_value = ['host0', 'host1', 'host2', 'host3', 'host4', 'host5', 'host6', 'host7', 'host8', 'host9']
|
||||
self.assertEqual(
|
||||
pbe._get_serialized_batches(play),
|
||||
[['host0', 'host1'], ['host2', 'host3'], ['host4', 'host5'], ['host6', 'host7'], ['host8', 'host9']]
|
||||
)
|
||||
|
||||
playbook = Playbook.load(pbe._playbooks[3], variable_manager=mock_var_manager, loader=fake_loader)
|
||||
play = playbook.get_plays()[0]
|
||||
play.post_validate(templar)
|
||||
mock_inventory.get_hosts.return_value = ['host0','host1','host2','host3','host4','host5','host6','host7','host8','host9']
|
||||
self.assertEqual(pbe._get_serialized_batches(play), [['host0'],['host1','host2'],['host3','host4','host5'],['host6','host7','host8'],['host9']])
|
||||
mock_inventory.get_hosts.return_value = ['host0', 'host1', 'host2', 'host3', 'host4', 'host5', 'host6', 'host7', 'host8', 'host9']
|
||||
self.assertEqual(
|
||||
pbe._get_serialized_batches(play),
|
||||
[['host0'], ['host1', 'host2'], ['host3', 'host4', 'host5'], ['host6', 'host7', 'host8'], ['host9']]
|
||||
)
|
||||
|
||||
playbook = Playbook.load(pbe._playbooks[4], variable_manager=mock_var_manager, loader=fake_loader)
|
||||
play = playbook.get_plays()[0]
|
||||
play.post_validate(templar)
|
||||
mock_inventory.get_hosts.return_value = ['host0','host1','host2','host3','host4','host5','host6','host7','host8','host9']
|
||||
self.assertEqual(pbe._get_serialized_batches(play), [['host0'],['host1','host2'],['host3','host4','host5','host6','host7','host8','host9']])
|
||||
mock_inventory.get_hosts.return_value = ['host0', 'host1', 'host2', 'host3', 'host4', 'host5', 'host6', 'host7', 'host8', 'host9']
|
||||
self.assertEqual(pbe._get_serialized_batches(play), [['host0'], ['host1', 'host2'], ['host3', 'host4', 'host5', 'host6', 'host7', 'host8', 'host9']])
|
||||
|
||||
# Test when serial percent is under 1.0
|
||||
playbook = Playbook.load(pbe._playbooks[2], variable_manager=mock_var_manager, loader=fake_loader)
|
||||
play = playbook.get_plays()[0]
|
||||
play.post_validate(templar)
|
||||
mock_inventory.get_hosts.return_value = ['host0','host1','host2']
|
||||
self.assertEqual(pbe._get_serialized_batches(play), [['host0'],['host1'],['host2']])
|
||||
mock_inventory.get_hosts.return_value = ['host0', 'host1', 'host2']
|
||||
self.assertEqual(pbe._get_serialized_batches(play), [['host0'], ['host1'], ['host2']])
|
||||
|
||||
# Test when there is a remainder for serial as a percent
|
||||
playbook = Playbook.load(pbe._playbooks[2], variable_manager=mock_var_manager, loader=fake_loader)
|
||||
play = playbook.get_plays()[0]
|
||||
play.post_validate(templar)
|
||||
mock_inventory.get_hosts.return_value = ['host0','host1','host2','host3','host4','host5','host6','host7','host8','host9','host10']
|
||||
mock_inventory.get_hosts.return_value = ['host0', 'host1', 'host2', 'host3', 'host4', 'host5', 'host6', 'host7', 'host8', 'host9', 'host10']
|
||||
self.assertEqual(
|
||||
pbe._get_serialized_batches(play),
|
||||
[['host0','host1'],['host2','host3'],['host4','host5'],['host6','host7'],['host8','host9'],['host10']]
|
||||
[['host0', 'host1'], ['host2', 'host3'], ['host4', 'host5'], ['host6', 'host7'], ['host8', 'host9'], ['host10']]
|
||||
)
|
||||
|
|
|
@ -21,7 +21,6 @@ __metaclass__ = type
|
|||
|
||||
from ansible.compat.tests import unittest
|
||||
from ansible.compat.tests.mock import patch, MagicMock
|
||||
|
||||
from ansible.errors import AnsibleError, AnsibleParserError
|
||||
from ansible.executor.task_executor import TaskExecutor
|
||||
from ansible.playbook.play_context import PlayContext
|
||||
|
@ -30,6 +29,7 @@ from ansible.parsing.yaml.objects import AnsibleUnicode
|
|||
|
||||
from units.mock.loader import DictDataLoader
|
||||
|
||||
|
||||
class TestTaskExecutor(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
|
@ -48,14 +48,14 @@ class TestTaskExecutor(unittest.TestCase):
|
|||
job_vars = dict()
|
||||
mock_queue = MagicMock()
|
||||
te = TaskExecutor(
|
||||
host = mock_host,
|
||||
task = mock_task,
|
||||
job_vars = job_vars,
|
||||
play_context = mock_play_context,
|
||||
new_stdin = new_stdin,
|
||||
loader = fake_loader,
|
||||
shared_loader_obj = mock_shared_loader,
|
||||
rslt_q = mock_queue,
|
||||
host=mock_host,
|
||||
task=mock_task,
|
||||
job_vars=job_vars,
|
||||
play_context=mock_play_context,
|
||||
new_stdin=new_stdin,
|
||||
loader=fake_loader,
|
||||
shared_loader_obj=mock_shared_loader,
|
||||
rslt_q=mock_queue,
|
||||
)
|
||||
|
||||
def test_task_executor_run(self):
|
||||
|
@ -75,14 +75,14 @@ class TestTaskExecutor(unittest.TestCase):
|
|||
job_vars = dict()
|
||||
|
||||
te = TaskExecutor(
|
||||
host = mock_host,
|
||||
task = mock_task,
|
||||
job_vars = job_vars,
|
||||
play_context = mock_play_context,
|
||||
new_stdin = new_stdin,
|
||||
loader = fake_loader,
|
||||
shared_loader_obj = mock_shared_loader,
|
||||
rslt_q = mock_queue,
|
||||
host=mock_host,
|
||||
task=mock_task,
|
||||
job_vars=job_vars,
|
||||
play_context=mock_play_context,
|
||||
new_stdin=new_stdin,
|
||||
loader=fake_loader,
|
||||
shared_loader_obj=mock_shared_loader,
|
||||
rslt_q=mock_queue,
|
||||
)
|
||||
|
||||
te._get_loop_items = MagicMock(return_value=None)
|
||||
|
@ -92,7 +92,7 @@ class TestTaskExecutor(unittest.TestCase):
|
|||
te._get_loop_items = MagicMock(return_value=[])
|
||||
res = te.run()
|
||||
|
||||
te._get_loop_items = MagicMock(return_value=['a','b','c'])
|
||||
te._get_loop_items = MagicMock(return_value=['a', 'b', 'c'])
|
||||
te._run_loop = MagicMock(return_value=[dict(item='a', changed=True), dict(item='b', failed=True), dict(item='c')])
|
||||
res = te.run()
|
||||
|
||||
|
@ -119,14 +119,14 @@ class TestTaskExecutor(unittest.TestCase):
|
|||
mock_queue = MagicMock()
|
||||
|
||||
te = TaskExecutor(
|
||||
host = mock_host,
|
||||
task = mock_task,
|
||||
job_vars = job_vars,
|
||||
play_context = mock_play_context,
|
||||
new_stdin = new_stdin,
|
||||
loader = fake_loader,
|
||||
shared_loader_obj = mock_shared_loader,
|
||||
rslt_q = mock_queue,
|
||||
host=mock_host,
|
||||
task=mock_task,
|
||||
job_vars=job_vars,
|
||||
play_context=mock_play_context,
|
||||
new_stdin=new_stdin,
|
||||
loader=fake_loader,
|
||||
shared_loader_obj=mock_shared_loader,
|
||||
rslt_q=mock_queue,
|
||||
)
|
||||
|
||||
items = te._get_loop_items()
|
||||
|
@ -155,14 +155,14 @@ class TestTaskExecutor(unittest.TestCase):
|
|||
job_vars = dict()
|
||||
|
||||
te = TaskExecutor(
|
||||
host = mock_host,
|
||||
task = mock_task,
|
||||
job_vars = job_vars,
|
||||
play_context = mock_play_context,
|
||||
new_stdin = new_stdin,
|
||||
loader = fake_loader,
|
||||
shared_loader_obj = mock_shared_loader,
|
||||
rslt_q = mock_queue,
|
||||
host=mock_host,
|
||||
task=mock_task,
|
||||
job_vars=job_vars,
|
||||
play_context=mock_play_context,
|
||||
new_stdin=new_stdin,
|
||||
loader=fake_loader,
|
||||
shared_loader_obj=mock_shared_loader,
|
||||
rslt_q=mock_queue,
|
||||
)
|
||||
|
||||
def _execute(variables):
|
||||
|
@ -201,51 +201,49 @@ class TestTaskExecutor(unittest.TestCase):
|
|||
job_vars = dict(pkg_mgr='yum')
|
||||
|
||||
te = TaskExecutor(
|
||||
host = mock_host,
|
||||
task = mock_task,
|
||||
job_vars = job_vars,
|
||||
play_context = mock_play_context,
|
||||
new_stdin = new_stdin,
|
||||
loader = fake_loader,
|
||||
shared_loader_obj = mock_shared_loader,
|
||||
rslt_q = mock_queue,
|
||||
host=mock_host,
|
||||
task=mock_task,
|
||||
job_vars=job_vars,
|
||||
play_context=mock_play_context,
|
||||
new_stdin=new_stdin,
|
||||
loader=fake_loader,
|
||||
shared_loader_obj=mock_shared_loader,
|
||||
rslt_q=mock_queue,
|
||||
)
|
||||
|
||||
#
|
||||
# No replacement
|
||||
#
|
||||
mock_task.action = 'yum'
|
||||
new_items = te._squash_items(items=items, loop_var='item', variables=job_vars)
|
||||
self.assertEqual(new_items, ['a', 'b', 'c'])
|
||||
self.assertIsInstance(mock_task.args, MagicMock)
|
||||
|
||||
mock_task.action = 'foo'
|
||||
mock_task.args={'name': '{{item}}'}
|
||||
mock_task.args = {'name': '{{item}}'}
|
||||
new_items = te._squash_items(items=items, loop_var='item', variables=job_vars)
|
||||
self.assertEqual(new_items, ['a', 'b', 'c'])
|
||||
self.assertEqual(mock_task.args, {'name': '{{item}}'})
|
||||
|
||||
mock_task.action = 'yum'
|
||||
mock_task.args={'name': 'static'}
|
||||
mock_task.args = {'name': 'static'}
|
||||
new_items = te._squash_items(items=items, loop_var='item', variables=job_vars)
|
||||
self.assertEqual(new_items, ['a', 'b', 'c'])
|
||||
self.assertEqual(mock_task.args, {'name': 'static'})
|
||||
|
||||
mock_task.action = 'yum'
|
||||
mock_task.args={'name': '{{pkg_mgr}}'}
|
||||
mock_task.args = {'name': '{{pkg_mgr}}'}
|
||||
new_items = te._squash_items(items=items, loop_var='item', variables=job_vars)
|
||||
self.assertEqual(new_items, ['a', 'b', 'c'])
|
||||
self.assertEqual(mock_task.args, {'name': '{{pkg_mgr}}'})
|
||||
|
||||
mock_task.action = '{{unknown}}'
|
||||
mock_task.args={'name': '{{item}}'}
|
||||
mock_task.args = {'name': '{{item}}'}
|
||||
new_items = te._squash_items(items=items, loop_var='item', variables=job_vars)
|
||||
self.assertEqual(new_items, ['a', 'b', 'c'])
|
||||
self.assertEqual(mock_task.args, {'name': '{{item}}'})
|
||||
|
||||
# Could do something like this to recover from bad deps in a package
|
||||
job_vars = dict(pkg_mgr='yum', packages=['a', 'b'])
|
||||
items = [ 'absent', 'latest' ]
|
||||
items = ['absent', 'latest']
|
||||
mock_task.action = 'yum'
|
||||
mock_task.args = {'name': '{{ packages }}', 'state': '{{ item }}'}
|
||||
new_items = te._squash_items(items=items, loop_var='item', variables=job_vars)
|
||||
|
@ -261,7 +259,7 @@ class TestTaskExecutor(unittest.TestCase):
|
|||
# an error later. If so, we can throw it now instead.
|
||||
# Squashing in this case would not be intuitive as the user is being
|
||||
# explicit in using each list entry as a key.
|
||||
job_vars = dict(pkg_mgr='yum', packages={ "a": "foo", "b": "bar", "foo": "baz", "bar": "quux" })
|
||||
job_vars = dict(pkg_mgr='yum', packages={"a": "foo", "b": "bar", "foo": "baz", "bar": "quux"})
|
||||
items = [['a', 'b'], ['foo', 'bar']]
|
||||
mock_task.action = 'yum'
|
||||
mock_task.args = {'name': '{{ packages[item] }}'}
|
||||
|
@ -269,21 +267,19 @@ class TestTaskExecutor(unittest.TestCase):
|
|||
self.assertEqual(new_items, items)
|
||||
self.assertEqual(mock_task.args, {'name': '{{ packages[item] }}'})
|
||||
|
||||
#
|
||||
# Replaces
|
||||
#
|
||||
items = ['a', 'b', 'c']
|
||||
mock_task.action = 'yum'
|
||||
mock_task.args={'name': '{{item}}'}
|
||||
new_items = te._squash_items(items=items, loop_var='item', variables=job_vars)
|
||||
self.assertEqual(new_items, [['a','c']])
|
||||
self.assertEqual(mock_task.args, {'name': ['a','c']})
|
||||
|
||||
mock_task.action = '{{pkg_mgr}}'
|
||||
mock_task.args={'name': '{{item}}'}
|
||||
mock_task.args = {'name': '{{item}}'}
|
||||
new_items = te._squash_items(items=items, loop_var='item', variables=job_vars)
|
||||
self.assertEqual(new_items, [['a', 'c']])
|
||||
self.assertEqual(mock_task.args, {'name': ['a','c']})
|
||||
self.assertEqual(mock_task.args, {'name': ['a', 'c']})
|
||||
|
||||
mock_task.action = '{{pkg_mgr}}'
|
||||
mock_task.args = {'name': '{{item}}'}
|
||||
new_items = te._squash_items(items=items, loop_var='item', variables=job_vars)
|
||||
self.assertEqual(new_items, [['a', 'c']])
|
||||
self.assertEqual(mock_task.args, {'name': ['a', 'c']})
|
||||
|
||||
# New loop_var
|
||||
mock_task.action = 'yum'
|
||||
|
@ -292,7 +288,7 @@ class TestTaskExecutor(unittest.TestCase):
|
|||
loop_var = 'a_loop_var_item'
|
||||
new_items = te._squash_items(items=items, loop_var='a_loop_var_item', variables=job_vars)
|
||||
self.assertEqual(new_items, [['a', 'c']])
|
||||
self.assertEqual(mock_task.args, {'name': ['a','c']})
|
||||
self.assertEqual(mock_task.args, {'name': ['a', 'c']})
|
||||
loop_var = 'item'
|
||||
|
||||
#
|
||||
|
@ -307,8 +303,8 @@ class TestTaskExecutor(unittest.TestCase):
|
|||
mock_task.action = 'yum'
|
||||
mock_task.args = {'name': '{{ item }}'}
|
||||
new_items = te._squash_items(items=items, loop_var='item', variables=job_vars)
|
||||
#self.assertEqual(new_items, [['a', 'b', 'foo', 'bar']])
|
||||
#self.assertEqual(mock_task.args, {'name': ['a', 'b', 'foo', 'bar']})
|
||||
# self.assertEqual(new_items, [['a', 'b', 'foo', 'bar']])
|
||||
# self.assertEqual(mock_task.args, {'name': ['a', 'b', 'foo', 'bar']})
|
||||
self.assertEqual(new_items, items)
|
||||
self.assertEqual(mock_task.args, {'name': '{{ item }}'})
|
||||
|
||||
|
@ -317,8 +313,8 @@ class TestTaskExecutor(unittest.TestCase):
|
|||
mock_task.action = 'yum'
|
||||
mock_task.args = {'name': '{{ packages[item] }}'}
|
||||
new_items = te._squash_items(items=items, loop_var='item', variables=job_vars)
|
||||
#self.assertEqual(new_items, [['foo', 'baz']])
|
||||
#self.assertEqual(mock_task.args, {'name': ['foo', 'baz']})
|
||||
# self.assertEqual(new_items, [['foo', 'baz']])
|
||||
# self.assertEqual(mock_task.args, {'name': ['foo', 'baz']})
|
||||
self.assertEqual(new_items, items)
|
||||
self.assertEqual(mock_task.args, {'name': '{{ packages[item] }}'})
|
||||
|
||||
|
@ -328,35 +324,38 @@ class TestTaskExecutor(unittest.TestCase):
|
|||
mock_task.action = 'yum'
|
||||
mock_task.args = {'name': '{{ item["package"] }}'}
|
||||
new_items = te._squash_items(items=items, loop_var='item', variables=job_vars)
|
||||
#self.assertEqual(new_items, [['foo', 'bar']])
|
||||
#self.assertEqual(mock_task.args, {'name': ['foo', 'bar']})
|
||||
# self.assertEqual(new_items, [['foo', 'bar']])
|
||||
# self.assertEqual(mock_task.args, {'name': ['foo', 'bar']})
|
||||
self.assertEqual(new_items, items)
|
||||
self.assertEqual(mock_task.args, {'name': '{{ item["package"] }}'})
|
||||
|
||||
items = [dict(name='a', state='present'),
|
||||
dict(name='b', state='present'),
|
||||
dict(name='c', state='present')]
|
||||
items = [
|
||||
dict(name='a', state='present'),
|
||||
dict(name='b', state='present'),
|
||||
dict(name='c', state='present'),
|
||||
]
|
||||
mock_task.action = 'yum'
|
||||
mock_task.args={'name': '{{item.name}}', 'state': '{{item.state}}'}
|
||||
mock_task.args = {'name': '{{item.name}}', 'state': '{{item.state}}'}
|
||||
new_items = te._squash_items(items=items, loop_var='item', variables=job_vars)
|
||||
#self.assertEqual(new_items, [dict(name=['a', 'b', 'c'], state='present')])
|
||||
#self.assertEqual(mock_task.args, {'name': ['a', 'b', 'c'], 'state': 'present'})
|
||||
# self.assertEqual(new_items, [dict(name=['a', 'b', 'c'], state='present')])
|
||||
# self.assertEqual(mock_task.args, {'name': ['a', 'b', 'c'], 'state': 'present'})
|
||||
self.assertEqual(new_items, items)
|
||||
self.assertEqual(mock_task.args, {'name': '{{item.name}}', 'state': '{{item.state}}'})
|
||||
|
||||
items = [dict(name='a', state='present'),
|
||||
dict(name='b', state='present'),
|
||||
dict(name='c', state='absent')]
|
||||
items = [
|
||||
dict(name='a', state='present'),
|
||||
dict(name='b', state='present'),
|
||||
dict(name='c', state='absent'),
|
||||
]
|
||||
mock_task.action = 'yum'
|
||||
mock_task.args={'name': '{{item.name}}', 'state': '{{item.state}}'}
|
||||
mock_task.args = {'name': '{{item.name}}', 'state': '{{item.state}}'}
|
||||
new_items = te._squash_items(items=items, loop_var='item', variables=job_vars)
|
||||
#self.assertEqual(new_items, [dict(name=['a', 'b'], state='present'),
|
||||
# dict(name='c', state='absent')])
|
||||
#self.assertEqual(mock_task.args, {'name': '{{item.name}}', 'state': '{{item.state}}'})
|
||||
# self.assertEqual(new_items, [dict(name=['a', 'b'], state='present'),
|
||||
# dict(name='c', state='absent')])
|
||||
# self.assertEqual(mock_task.args, {'name': '{{item.name}}', 'state': '{{item.state}}'})
|
||||
self.assertEqual(new_items, items)
|
||||
self.assertEqual(mock_task.args, {'name': '{{item.name}}', 'state': '{{item.state}}'})
|
||||
|
||||
|
||||
def test_task_executor_execute(self):
|
||||
fake_loader = DictDataLoader({})
|
||||
|
||||
|
@ -394,14 +393,14 @@ class TestTaskExecutor(unittest.TestCase):
|
|||
job_vars = dict(omit="XXXXXXXXXXXXXXXXXXX")
|
||||
|
||||
te = TaskExecutor(
|
||||
host = mock_host,
|
||||
task = mock_task,
|
||||
job_vars = job_vars,
|
||||
play_context = mock_play_context,
|
||||
new_stdin = new_stdin,
|
||||
loader = fake_loader,
|
||||
shared_loader_obj = shared_loader,
|
||||
rslt_q = mock_queue,
|
||||
host=mock_host,
|
||||
task=mock_task,
|
||||
job_vars=job_vars,
|
||||
play_context=mock_play_context,
|
||||
new_stdin=new_stdin,
|
||||
loader=fake_loader,
|
||||
shared_loader_obj=shared_loader,
|
||||
rslt_q=mock_queue,
|
||||
)
|
||||
|
||||
te._get_connection = MagicMock(return_value=mock_connection)
|
||||
|
@ -433,7 +432,7 @@ class TestTaskExecutor(unittest.TestCase):
|
|||
|
||||
mock_task = MagicMock()
|
||||
mock_task.async = 0.1
|
||||
mock_task.poll = 0.05
|
||||
mock_task.poll = 0.05
|
||||
|
||||
mock_play_context = MagicMock()
|
||||
|
||||
|
@ -449,14 +448,14 @@ class TestTaskExecutor(unittest.TestCase):
|
|||
job_vars = dict(omit="XXXXXXXXXXXXXXXXXXX")
|
||||
|
||||
te = TaskExecutor(
|
||||
host = mock_host,
|
||||
task = mock_task,
|
||||
job_vars = job_vars,
|
||||
play_context = mock_play_context,
|
||||
new_stdin = new_stdin,
|
||||
loader = fake_loader,
|
||||
shared_loader_obj = shared_loader,
|
||||
rslt_q = mock_queue,
|
||||
host=mock_host,
|
||||
task=mock_task,
|
||||
job_vars=job_vars,
|
||||
play_context=mock_play_context,
|
||||
new_stdin=new_stdin,
|
||||
loader=fake_loader,
|
||||
shared_loader_obj=shared_loader,
|
||||
rslt_q=mock_queue,
|
||||
)
|
||||
|
||||
te._connection = MagicMock()
|
||||
|
@ -485,4 +484,3 @@ class TestTaskExecutor(unittest.TestCase):
|
|||
mock_templar = MagicMock()
|
||||
res = te._poll_async_result(result=dict(ansible_job_id=1), templar=mock_templar)
|
||||
self.assertEqual(res, dict(finished=1))
|
||||
|
||||
|
|
|
@ -24,6 +24,7 @@ from ansible.compat.tests.mock import patch, MagicMock
|
|||
|
||||
from ansible.executor.task_result import TaskResult
|
||||
|
||||
|
||||
class TestTaskResult(unittest.TestCase):
|
||||
def test_task_result_basic(self):
|
||||
mock_host = MagicMock()
|
||||
|
|
|
@ -29,6 +29,7 @@ from ansible.vars.manager import VariableManager
|
|||
|
||||
from units.mock.loader import DictDataLoader
|
||||
|
||||
|
||||
class TestInventory(unittest.TestCase):
|
||||
|
||||
patterns = {
|
||||
|
@ -38,7 +39,7 @@ class TestInventory(unittest.TestCase):
|
|||
' a,b ,c[1:2] ': ['a', 'b', 'c[1:2]'],
|
||||
'9a01:7f8:191:7701::9': ['9a01:7f8:191:7701::9'],
|
||||
'9a01:7f8:191:7701::9,9a01:7f8:191:7701::9': ['9a01:7f8:191:7701::9', '9a01:7f8:191:7701::9'],
|
||||
'9a01:7f8:191:7701::9,9a01:7f8:191:7701::9,foo': ['9a01:7f8:191:7701::9', '9a01:7f8:191:7701::9','foo'],
|
||||
'9a01:7f8:191:7701::9,9a01:7f8:191:7701::9,foo': ['9a01:7f8:191:7701::9', '9a01:7f8:191:7701::9', 'foo'],
|
||||
'foo[1:2]': ['foo[1:2]'],
|
||||
'a::b': ['a::b'],
|
||||
'a:b': ['a', 'b'],
|
||||
|
@ -51,7 +52,7 @@ class TestInventory(unittest.TestCase):
|
|||
[['a', 'b'], ['a', 'b']],
|
||||
[['a, b'], ['a', 'b']],
|
||||
[['9a01:7f8:191:7701::9', '9a01:7f8:191:7701::9,foo'],
|
||||
['9a01:7f8:191:7701::9', '9a01:7f8:191:7701::9','foo']]
|
||||
['9a01:7f8:191:7701::9', '9a01:7f8:191:7701::9', 'foo']]
|
||||
]
|
||||
|
||||
# pattern_string: [ ('base_pattern', (a,b)), ['x','y','z'] ]
|
||||
|
@ -59,7 +60,7 @@ class TestInventory(unittest.TestCase):
|
|||
# when applied to string.ascii_letters.
|
||||
|
||||
subscripts = {
|
||||
'a': [('a',None), list(string.ascii_letters)],
|
||||
'a': [('a', None), list(string.ascii_letters)],
|
||||
'a[0]': [('a', (0, None)), ['a']],
|
||||
'a[1]': [('a', (1, None)), ['b']],
|
||||
'a[2:3]': [('a', (2, 3)), ['c', 'd']],
|
||||
|
@ -143,9 +144,9 @@ class InventoryDefaultGroup(unittest.TestCase):
|
|||
|
||||
def _get_inventory(self, inventory_content):
|
||||
|
||||
fake_loader = DictDataLoader({ __file__: inventory_content})
|
||||
fake_loader = DictDataLoader({__file__: inventory_content})
|
||||
|
||||
return InventoryManager(loader=fake_loader, sources=[__file__])
|
||||
return InventoryManager(loader=fake_loader, sources=[__file__])
|
||||
|
||||
def _test_default_groups(self, inventory_content):
|
||||
inventory = self._get_inventory(inventory_content)
|
||||
|
|
|
@ -21,6 +21,7 @@ __metaclass__ = type
|
|||
|
||||
from collections import Mapping
|
||||
|
||||
|
||||
def make_method(func, args, kwargs):
|
||||
|
||||
def test_method(self):
|
||||
|
|
|
@ -70,7 +70,7 @@ class DictDataLoader(DataLoader):
|
|||
self._known_directories.append(directory)
|
||||
|
||||
def _build_known_directories(self):
|
||||
self._known_directories = []
|
||||
self._known_directories = []
|
||||
for path in self._file_mapping:
|
||||
dirname = os.path.dirname(path)
|
||||
while dirname not in ('/', ''):
|
||||
|
|
|
@ -20,8 +20,8 @@
|
|||
from __future__ import (absolute_import, division)
|
||||
__metaclass__ = type
|
||||
|
||||
import sys
|
||||
import json
|
||||
import sys
|
||||
|
||||
from units.mock.procenv import swap_stdin_and_argv
|
||||
|
||||
|
@ -31,23 +31,21 @@ from ansible.compat.tests.mock import MagicMock
|
|||
|
||||
class TestModuleUtilsBasic(unittest.TestCase):
|
||||
def test_module_utils_basic__log_invocation(self):
|
||||
with swap_stdin_and_argv(stdin_data=json.dumps(
|
||||
dict(
|
||||
ANSIBLE_MODULE_ARGS=dict(
|
||||
foo=False, bar=[1,2,3], bam="bam", baz=u'baz'),
|
||||
))):
|
||||
with swap_stdin_and_argv(stdin_data=json.dumps(dict(
|
||||
ANSIBLE_MODULE_ARGS=dict(foo=False, bar=[1, 2, 3], bam="bam", baz=u'baz')),
|
||||
)):
|
||||
from ansible.module_utils import basic
|
||||
|
||||
# test basic log invocation
|
||||
basic._ANSIBLE_ARGS = None
|
||||
am = basic.AnsibleModule(
|
||||
argument_spec=dict(
|
||||
foo = dict(default=True, type='bool'),
|
||||
bar = dict(default=[], type='list'),
|
||||
bam = dict(default="bam"),
|
||||
baz = dict(default=u"baz"),
|
||||
password = dict(default=True),
|
||||
no_log = dict(default="you shouldn't see me", no_log=True),
|
||||
foo=dict(default=True, type='bool'),
|
||||
bar=dict(default=[], type='list'),
|
||||
bam=dict(default="bam"),
|
||||
baz=dict(default=u"baz"),
|
||||
password=dict(default=True),
|
||||
no_log=dict(default="you shouldn't see me", no_log=True),
|
||||
),
|
||||
)
|
||||
|
||||
|
@ -73,13 +71,14 @@ class TestModuleUtilsBasic(unittest.TestCase):
|
|||
self.assertIn(' password=NOT_LOGGING_PASSWORD', message)
|
||||
|
||||
kwargs = am.log.call_args[1]
|
||||
self.assertEqual(kwargs,
|
||||
dict(log_args={
|
||||
'foo': 'False',
|
||||
'bar': '[1, 2, 3]',
|
||||
'bam': 'bam',
|
||||
'baz': 'baz',
|
||||
'password': 'NOT_LOGGING_PASSWORD',
|
||||
'no_log': 'NOT_LOGGING_PARAMETER',
|
||||
})
|
||||
)
|
||||
self.assertEqual(
|
||||
kwargs,
|
||||
dict(log_args={
|
||||
'foo': 'False',
|
||||
'bar': '[1, 2, 3]',
|
||||
'bam': 'bam',
|
||||
'baz': 'baz',
|
||||
'password': 'NOT_LOGGING_PASSWORD',
|
||||
'no_log': 'NOT_LOGGING_PARAMETER',
|
||||
})
|
||||
)
|
||||
|
|
|
@ -34,7 +34,7 @@ class TestAnsibleModuleWarnDeprecate(unittest.TestCase):
|
|||
|
||||
ansible.module_utils.basic._ANSIBLE_ARGS = None
|
||||
am = ansible.module_utils.basic.AnsibleModule(
|
||||
argument_spec = dict(),
|
||||
argument_spec=dict(),
|
||||
)
|
||||
am._name = 'unittest'
|
||||
|
||||
|
@ -51,7 +51,7 @@ class TestAnsibleModuleWarnDeprecate(unittest.TestCase):
|
|||
|
||||
ansible.module_utils.basic._ANSIBLE_ARGS = None
|
||||
am = ansible.module_utils.basic.AnsibleModule(
|
||||
argument_spec = dict(),
|
||||
argument_spec=dict(),
|
||||
)
|
||||
am._name = 'unittest'
|
||||
|
||||
|
@ -76,7 +76,7 @@ class TestAnsibleModuleWarnDeprecate(unittest.TestCase):
|
|||
|
||||
ansible.module_utils.basic._ANSIBLE_ARGS = None
|
||||
am = ansible.module_utils.basic.AnsibleModule(
|
||||
argument_spec = dict(),
|
||||
argument_spec=dict(),
|
||||
)
|
||||
am._name = 'unittest'
|
||||
|
||||
|
|
|
@ -25,13 +25,13 @@ import json
|
|||
import sys
|
||||
|
||||
from ansible.compat.tests import unittest
|
||||
from units.mock.procenv import swap_stdin_and_argv, swap_stdout
|
||||
|
||||
from ansible.module_utils import basic
|
||||
from units.mock.procenv import swap_stdin_and_argv, swap_stdout
|
||||
|
||||
|
||||
empty_invocation = {u'module_args': {}}
|
||||
|
||||
|
||||
class TestAnsibleModuleExitJson(unittest.TestCase):
|
||||
def setUp(self):
|
||||
args = json.dumps(dict(ANSIBLE_MODULE_ARGS={}))
|
||||
|
@ -89,31 +89,35 @@ class TestAnsibleModuleExitJson(unittest.TestCase):
|
|||
return_val = json.loads(self.fake_stream.getvalue())
|
||||
self.assertEquals(return_val, dict(changed=True, msg='success', invocation=empty_invocation))
|
||||
|
||||
|
||||
class TestAnsibleModuleExitValuesRemoved(unittest.TestCase):
|
||||
OMIT = 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER'
|
||||
dataset = (
|
||||
(dict(username='person', password='$ecret k3y'),
|
||||
dict(one=1, pwd='$ecret k3y', url='https://username:password12345@foo.com/login/',
|
||||
not_secret='following the leader', msg='here'),
|
||||
dict(one=1, pwd=OMIT, url='https://username:password12345@foo.com/login/',
|
||||
not_secret='following the leader', changed=False, msg='here',
|
||||
invocation=dict(module_args=dict(password=OMIT, token=None, username='person'))),
|
||||
),
|
||||
(dict(username='person', password='password12345'),
|
||||
dict(one=1, pwd='$ecret k3y', url='https://username:password12345@foo.com/login/',
|
||||
not_secret='following the leader', msg='here'),
|
||||
dict(one=1, pwd='$ecret k3y', url='https://username:********@foo.com/login/',
|
||||
not_secret='following the leader', changed=False, msg='here',
|
||||
invocation=dict(module_args=dict(password=OMIT, token=None, username='person'))),
|
||||
),
|
||||
(dict(username='person', password='$ecret k3y'),
|
||||
dict(one=1, pwd='$ecret k3y', url='https://username:$ecret k3y@foo.com/login/',
|
||||
not_secret='following the leader', msg='here'),
|
||||
dict(one=1, pwd=OMIT, url='https://username:********@foo.com/login/',
|
||||
not_secret='following the leader', changed=False, msg='here',
|
||||
invocation=dict(module_args=dict(password=OMIT, token=None, username='person'))),
|
||||
),
|
||||
)
|
||||
(
|
||||
dict(username='person', password='$ecret k3y'),
|
||||
dict(one=1, pwd='$ecret k3y', url='https://username:password12345@foo.com/login/',
|
||||
not_secret='following the leader', msg='here'),
|
||||
dict(one=1, pwd=OMIT, url='https://username:password12345@foo.com/login/',
|
||||
not_secret='following the leader', changed=False, msg='here',
|
||||
invocation=dict(module_args=dict(password=OMIT, token=None, username='person'))),
|
||||
),
|
||||
(
|
||||
dict(username='person', password='password12345'),
|
||||
dict(one=1, pwd='$ecret k3y', url='https://username:password12345@foo.com/login/',
|
||||
not_secret='following the leader', msg='here'),
|
||||
dict(one=1, pwd='$ecret k3y', url='https://username:********@foo.com/login/',
|
||||
not_secret='following the leader', changed=False, msg='here',
|
||||
invocation=dict(module_args=dict(password=OMIT, token=None, username='person'))),
|
||||
),
|
||||
(
|
||||
dict(username='person', password='$ecret k3y'),
|
||||
dict(one=1, pwd='$ecret k3y', url='https://username:$ecret k3y@foo.com/login/',
|
||||
not_secret='following the leader', msg='here'),
|
||||
dict(one=1, pwd=OMIT, url='https://username:********@foo.com/login/',
|
||||
not_secret='following the leader', changed=False, msg='here',
|
||||
invocation=dict(module_args=dict(password=OMIT, token=None, username='person'))),
|
||||
),
|
||||
)
|
||||
|
||||
def test_exit_json_removes_values(self):
|
||||
self.maxDiff = None
|
||||
|
@ -125,12 +129,12 @@ class TestAnsibleModuleExitValuesRemoved(unittest.TestCase):
|
|||
with swap_stdout():
|
||||
basic._ANSIBLE_ARGS = None
|
||||
module = basic.AnsibleModule(
|
||||
argument_spec = dict(
|
||||
argument_spec=dict(
|
||||
username=dict(),
|
||||
password=dict(no_log=True),
|
||||
token=dict(no_log=True),
|
||||
),
|
||||
)
|
||||
),
|
||||
)
|
||||
with self.assertRaises(SystemExit) as ctx:
|
||||
self.assertEquals(module.exit_json(**return_val), expected)
|
||||
self.assertEquals(json.loads(sys.stdout.getvalue()), expected)
|
||||
|
@ -147,12 +151,12 @@ class TestAnsibleModuleExitValuesRemoved(unittest.TestCase):
|
|||
with swap_stdout():
|
||||
basic._ANSIBLE_ARGS = None
|
||||
module = basic.AnsibleModule(
|
||||
argument_spec = dict(
|
||||
argument_spec=dict(
|
||||
username=dict(),
|
||||
password=dict(no_log=True),
|
||||
token=dict(no_log=True),
|
||||
),
|
||||
)
|
||||
),
|
||||
)
|
||||
with self.assertRaises(SystemExit) as ctx:
|
||||
self.assertEquals(module.fail_json(**return_val), expected)
|
||||
self.assertEquals(json.loads(sys.stdout.getvalue()), expected)
|
||||
|
|
|
@ -25,9 +25,9 @@ import syslog
|
|||
|
||||
from ansible.compat.tests import unittest
|
||||
from ansible.compat.tests.mock import patch, MagicMock
|
||||
|
||||
from ansible.module_utils.basic import heuristic_log_sanitize
|
||||
|
||||
|
||||
class TestHeuristicLogSanitize(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.URL_SECRET = 'http://username:pas:word@foo.com/data'
|
||||
|
@ -39,13 +39,16 @@ class TestHeuristicLogSanitize(unittest.TestCase):
|
|||
def _gen_data(self, records, per_rec, top_level, secret_text):
|
||||
hostvars = {'hostvars': {}}
|
||||
for i in range(1, records, 1):
|
||||
host_facts = {'host%s' % i:
|
||||
{'pstack':
|
||||
{'running': '875.1',
|
||||
'symlinked': '880.0',
|
||||
'tars': [],
|
||||
'versions': ['885.0']},
|
||||
}}
|
||||
host_facts = {
|
||||
'host%s' % i: {
|
||||
'pstack': {
|
||||
'running': '875.1',
|
||||
'symlinked': '880.0',
|
||||
'tars': [],
|
||||
'versions': ['885.0']
|
||||
},
|
||||
}
|
||||
}
|
||||
if per_rec:
|
||||
host_facts['host%s' % i]['secret'] = secret_text
|
||||
hostvars['hostvars'].update(host_facts)
|
||||
|
|
|
@ -51,7 +51,7 @@ class TestAnsibleModuleSysLogSmokeTest(unittest.TestCase):
|
|||
|
||||
ansible.module_utils.basic._ANSIBLE_ARGS = None
|
||||
self.am = ansible.module_utils.basic.AnsibleModule(
|
||||
argument_spec = dict(),
|
||||
argument_spec=dict(),
|
||||
)
|
||||
self.am._name = 'unittest'
|
||||
|
||||
|
@ -89,7 +89,7 @@ class TestAnsibleModuleJournaldSmokeTest(unittest.TestCase):
|
|||
|
||||
ansible.module_utils.basic._ANSIBLE_ARGS = None
|
||||
self.am = ansible.module_utils.basic.AnsibleModule(
|
||||
argument_spec = dict(),
|
||||
argument_spec=dict(),
|
||||
)
|
||||
self.am._name = 'unittest'
|
||||
|
||||
|
@ -120,7 +120,7 @@ class TestAnsibleModuleLogSyslog(unittest.TestCase):
|
|||
b'Byte string': b'Byte string',
|
||||
u'Toshio くらとみ non-ascii test'.encode('utf-8'): u'Toshio くらとみ non-ascii test'.encode('utf-8'),
|
||||
b'non-utf8 :\xff: test': b'non-utf8 :\xff: test'.decode('utf-8', 'replace').encode('utf-8'),
|
||||
}
|
||||
}
|
||||
|
||||
py3_output_data = {
|
||||
u'Text string': u'Text string',
|
||||
|
@ -128,7 +128,7 @@ class TestAnsibleModuleLogSyslog(unittest.TestCase):
|
|||
b'Byte string': u'Byte string',
|
||||
u'Toshio くらとみ non-ascii test'.encode('utf-8'): u'Toshio くらとみ non-ascii test',
|
||||
b'non-utf8 :\xff: test': b'non-utf8 :\xff: test'.decode('utf-8', 'replace')
|
||||
}
|
||||
}
|
||||
|
||||
def setUp(self):
|
||||
args = json.dumps(dict(ANSIBLE_MODULE_ARGS={}))
|
||||
|
@ -138,7 +138,7 @@ class TestAnsibleModuleLogSyslog(unittest.TestCase):
|
|||
|
||||
ansible.module_utils.basic._ANSIBLE_ARGS = None
|
||||
self.am = ansible.module_utils.basic.AnsibleModule(
|
||||
argument_spec = dict(),
|
||||
argument_spec=dict(),
|
||||
)
|
||||
self.am._name = 'unittest'
|
||||
|
||||
|
@ -189,7 +189,7 @@ class TestAnsibleModuleLogJournal(unittest.TestCase):
|
|||
b'Byte string': u'Byte string',
|
||||
u'Toshio くらとみ non-ascii test'.encode('utf-8'): u'Toshio くらとみ non-ascii test',
|
||||
b'non-utf8 :\xff: test': b'non-utf8 :\xff: test'.decode('utf-8', 'replace')
|
||||
}
|
||||
}
|
||||
|
||||
# overriding run lets us use context managers for setup/teardown-esque behavior
|
||||
def setUp(self):
|
||||
|
@ -200,7 +200,7 @@ class TestAnsibleModuleLogJournal(unittest.TestCase):
|
|||
|
||||
ansible.module_utils.basic._ANSIBLE_ARGS = None
|
||||
self.am = ansible.module_utils.basic.AnsibleModule(
|
||||
argument_spec = dict(),
|
||||
argument_spec=dict(),
|
||||
)
|
||||
self.am._name = 'unittest'
|
||||
|
||||
|
@ -270,4 +270,3 @@ class TestAnsibleModuleLogJournal(unittest.TestCase):
|
|||
# We added this journal field
|
||||
self.assertIn('TEST', mock_func.call_args[1])
|
||||
self.assertIn('log unittest', mock_func.call_args[1]['TEST'])
|
||||
|
||||
|
|
|
@ -42,10 +42,21 @@ class TestReturnValues(unittest.TestCase):
|
|||
(['1', '2', '3'], frozenset(['1', '2', '3'])),
|
||||
(('1', '2', '3'), frozenset(['1', '2', '3'])),
|
||||
({'one': 1, 'two': 'dos'}, frozenset(['1', 'dos'])),
|
||||
({'one': 1, 'two': 'dos',
|
||||
'three': ['amigos', 'musketeers', None,
|
||||
{'ping': 'pong', 'base': ('balls', 'raquets')}]},
|
||||
frozenset(['1', 'dos', 'amigos', 'musketeers', 'pong', 'balls', 'raquets'])),
|
||||
(
|
||||
{
|
||||
'one': 1,
|
||||
'two': 'dos',
|
||||
'three': [
|
||||
'amigos', 'musketeers', None, {
|
||||
'ping': 'pong',
|
||||
'base': (
|
||||
'balls', 'raquets'
|
||||
)
|
||||
}
|
||||
]
|
||||
},
|
||||
frozenset(['1', 'dos', 'amigos', 'musketeers', 'pong', 'balls', 'raquets'])
|
||||
),
|
||||
(u'Toshio くらとみ', frozenset(['Toshio くらとみ'])),
|
||||
('Toshio くらとみ', frozenset(['Toshio くらとみ'])),
|
||||
)
|
||||
|
@ -67,13 +78,22 @@ class TestRemoveValues(unittest.TestCase):
|
|||
(1.0, frozenset(['4321'])),
|
||||
(['string', 'strang', 'strung'], frozenset(['nope'])),
|
||||
({'one': 1, 'two': 'dos', 'secret': 'key'}, frozenset(['nope'])),
|
||||
({'one': 1, 'two': 'dos',
|
||||
'three': ['amigos', 'musketeers', None,
|
||||
{'ping': 'pong', 'base': ['balls', 'raquets']}]},
|
||||
frozenset(['nope'])),
|
||||
(
|
||||
{
|
||||
'one': 1,
|
||||
'two': 'dos',
|
||||
'three': [
|
||||
'amigos', 'musketeers', None, {
|
||||
'ping': 'pong', 'base': ['balls', 'raquets']
|
||||
}
|
||||
]
|
||||
},
|
||||
frozenset(['nope'])
|
||||
),
|
||||
('Toshio くら', frozenset(['とみ'])),
|
||||
(u'Toshio くら', frozenset(['とみ'])),
|
||||
)
|
||||
)
|
||||
|
||||
dataset_remove = (
|
||||
('string', frozenset(['string']), OMIT),
|
||||
(1234, frozenset(['1234']), OMIT),
|
||||
|
@ -84,23 +104,43 @@ class TestRemoveValues(unittest.TestCase):
|
|||
(('string', 'strang', 'strung'), frozenset(['string', 'strung']), [OMIT, 'strang', OMIT]),
|
||||
((1234567890, 345678, 987654321), frozenset(['1234567890']), [OMIT, 345678, 987654321]),
|
||||
((1234567890, 345678, 987654321), frozenset(['345678']), [OMIT, OMIT, 987654321]),
|
||||
({'one': 1, 'two': 'dos', 'secret': 'key'}, frozenset(['key']),
|
||||
{'one': 1, 'two': 'dos', 'secret': OMIT}),
|
||||
({'one': 1, 'two': 'dos', 'secret': 'key'}, frozenset(['key', 'dos', '1']),
|
||||
{'one': OMIT, 'two': OMIT, 'secret': OMIT}),
|
||||
({'one': 1, 'two': 'dos', 'secret': 'key'}, frozenset(['key', 'dos', '1']),
|
||||
{'one': OMIT, 'two': OMIT, 'secret': OMIT}),
|
||||
({'one': 1, 'two': 'dos', 'three': ['amigos', 'musketeers', None,
|
||||
{'ping': 'pong', 'base': ['balls', 'raquets']}]},
|
||||
frozenset(['balls', 'base', 'pong', 'amigos']),
|
||||
{'one': 1, 'two': 'dos', 'three': [OMIT, 'musketeers',
|
||||
None, {'ping': OMIT, 'base': [OMIT, 'raquets']}]}),
|
||||
('This sentence has an enigma wrapped in a mystery inside of a secret. - mr mystery',
|
||||
frozenset(['enigma', 'mystery', 'secret']),
|
||||
'This sentence has an ******** wrapped in a ******** inside of a ********. - mr ********'),
|
||||
({'one': 1, 'two': 'dos', 'secret': 'key'}, frozenset(['key']), {'one': 1, 'two': 'dos', 'secret': OMIT}),
|
||||
({'one': 1, 'two': 'dos', 'secret': 'key'}, frozenset(['key', 'dos', '1']), {'one': OMIT, 'two': OMIT, 'secret': OMIT}),
|
||||
({'one': 1, 'two': 'dos', 'secret': 'key'}, frozenset(['key', 'dos', '1']), {'one': OMIT, 'two': OMIT, 'secret': OMIT}),
|
||||
(
|
||||
{
|
||||
'one': 1,
|
||||
'two': 'dos',
|
||||
'three': [
|
||||
'amigos', 'musketeers', None, {
|
||||
'ping': 'pong', 'base': [
|
||||
'balls', 'raquets'
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
frozenset(['balls', 'base', 'pong', 'amigos']),
|
||||
{
|
||||
'one': 1,
|
||||
'two': 'dos',
|
||||
'three': [
|
||||
OMIT, 'musketeers', None, {
|
||||
'ping': OMIT,
|
||||
'base': [
|
||||
OMIT, 'raquets'
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
),
|
||||
(
|
||||
'This sentence has an enigma wrapped in a mystery inside of a secret. - mr mystery',
|
||||
frozenset(['enigma', 'mystery', 'secret']),
|
||||
'This sentence has an ******** wrapped in a ******** inside of a ********. - mr ********'
|
||||
),
|
||||
('Toshio くらとみ', frozenset(['くらとみ']), 'Toshio ********'),
|
||||
(u'Toshio くらとみ', frozenset(['くらとみ']), u'Toshio ********'),
|
||||
)
|
||||
)
|
||||
|
||||
def test_no_removal(self):
|
||||
for value, no_log_strings in self.dataset_no_remove:
|
||||
|
@ -112,5 +152,3 @@ class TestRemoveValues(unittest.TestCase):
|
|||
|
||||
def test_unknown_type(self):
|
||||
self.assertRaises(TypeError, remove_values, object(), frozenset())
|
||||
|
||||
|
||||
|
|
|
@ -27,13 +27,13 @@ from io import BytesIO, StringIO
|
|||
|
||||
import pytest
|
||||
|
||||
from ansible.module_utils.six import PY3
|
||||
from ansible.compat.tests import unittest
|
||||
from ansible.compat.tests.mock import call, MagicMock, Mock, patch, sentinel
|
||||
from ansible.module_utils.six import PY3
|
||||
import ansible.module_utils.basic
|
||||
|
||||
from units.mock.procenv import swap_stdin_and_argv
|
||||
|
||||
import ansible.module_utils.basic
|
||||
|
||||
class OpenBytesIO(BytesIO):
|
||||
"""BytesIO with dummy close() method
|
||||
|
@ -68,7 +68,7 @@ class TestAnsibleModuleRunCommand(unittest.TestCase):
|
|||
if path.startswith('/'):
|
||||
return path
|
||||
else:
|
||||
return self.os.getcwd.return_value + '/' + path
|
||||
return self.os.getcwd.return_value + '/' + path
|
||||
|
||||
args = json.dumps(dict(ANSIBLE_MODULE_ARGS={}))
|
||||
# unittest doesn't have a clean place to use a context manager, so we have to enter/exit manually
|
||||
|
@ -207,4 +207,3 @@ class TestAnsibleModuleRunCommand(unittest.TestCase):
|
|||
else:
|
||||
self.assertEqual(stdout.decode('utf-8'), u'Žarn§')
|
||||
self.assertEqual(stderr.decode('utf-8'), u'لرئيسية')
|
||||
|
||||
|
|
|
@ -36,12 +36,12 @@ VALID_STRINGS = (
|
|||
[("True", True)],
|
||||
[("False", False)],
|
||||
[("{}", {})],
|
||||
)
|
||||
)
|
||||
|
||||
# Passing things that aren't strings should just return the object
|
||||
NONSTRINGS = (
|
||||
[({'a':1}, {'a':1})],
|
||||
)
|
||||
[({'a': 1}, {'a': 1})],
|
||||
)
|
||||
|
||||
# These strings are not basic types. For security, these should not be
|
||||
# executed. We return the same string and get an exception for some
|
||||
|
@ -50,25 +50,29 @@ INVALID_STRINGS = (
|
|||
[("a.foo()", "a.foo()", None)],
|
||||
[("import foo", "import foo", None)],
|
||||
[("__import__('foo')", "__import__('foo')", ValueError)],
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def _check_simple_types(self, code, expected):
|
||||
# test some basic usage for various types
|
||||
self.assertEqual(self.am.safe_eval(code), expected)
|
||||
|
||||
|
||||
def _check_simple_types_with_exceptions(self, code, expected):
|
||||
# Test simple types with exceptions requested
|
||||
self.assertEqual(self.am.safe_eval(code, include_exceptions=True), (expected, None))
|
||||
|
||||
|
||||
def _check_invalid_strings(self, code, expected):
|
||||
self.assertEqual(self.am.safe_eval(code), expected)
|
||||
|
||||
|
||||
def _check_invalid_strings_with_exceptions(self, code, expected, exception):
|
||||
res = self.am.safe_eval("a=1", include_exceptions=True)
|
||||
self.assertEqual(res[0], "a=1")
|
||||
self.assertEqual(type(res[1]), SyntaxError)
|
||||
|
||||
|
||||
@add_method(_check_simple_types, *VALID_STRINGS)
|
||||
@add_method(_check_simple_types, *NONSTRINGS)
|
||||
@add_method(_check_simple_types_with_exceptions, *VALID_STRINGS)
|
||||
|
|
|
@ -35,6 +35,7 @@ from ansible.module_utils import known_hosts
|
|||
from units.mock.procenv import ModuleTestCase
|
||||
from units.mock.generator import add_method
|
||||
|
||||
|
||||
class TestSetModeIfDifferentBase(ModuleTestCase):
|
||||
|
||||
def setUp(self):
|
||||
|
@ -49,7 +50,7 @@ class TestSetModeIfDifferentBase(ModuleTestCase):
|
|||
basic._ANSIBLE_ARGS = None
|
||||
|
||||
self.am = basic.AnsibleModule(
|
||||
argument_spec = dict(),
|
||||
argument_spec=dict(),
|
||||
)
|
||||
|
||||
def tearDown(self):
|
||||
|
@ -60,8 +61,10 @@ class TestSetModeIfDifferentBase(ModuleTestCase):
|
|||
|
||||
def _check_no_mode_given_returns_previous_changes(self, previous_changes=True):
|
||||
with patch('os.lstat', side_effect=[self.mock_stat1]):
|
||||
|
||||
self.assertEqual(self.am.set_mode_if_different('/path/to/file', None, previous_changes), previous_changes)
|
||||
|
||||
|
||||
def _check_mode_changed_to_0660(self, mode):
|
||||
# Note: This is for checking that all the different ways of specifying
|
||||
# 0660 mode work. It cannot be used to check that setting a mode that is
|
||||
|
@ -71,6 +74,7 @@ def _check_mode_changed_to_0660(self, mode):
|
|||
self.assertEqual(self.am.set_mode_if_different('/path/to/file', mode, False), True)
|
||||
m_lchmod.assert_called_with(b'/path/to/file', 0o660)
|
||||
|
||||
|
||||
def _check_mode_unchanged_when_already_0660(self, mode):
|
||||
# Note: This is for checking that all the different ways of specifying
|
||||
# 0660 mode work. It cannot be used to check that setting a mode that is
|
||||
|
@ -83,18 +87,12 @@ SYNONYMS_0660 = (
|
|||
[[0o660]],
|
||||
[['0o660']],
|
||||
[['660']],
|
||||
)
|
||||
)
|
||||
|
||||
@add_method(_check_no_mode_given_returns_previous_changes,
|
||||
[dict(previous_changes=True)],
|
||||
[dict(previous_changes=False)],
|
||||
)
|
||||
@add_method(_check_mode_changed_to_0660,
|
||||
*SYNONYMS_0660
|
||||
)
|
||||
@add_method(_check_mode_unchanged_when_already_0660,
|
||||
*SYNONYMS_0660
|
||||
)
|
||||
|
||||
@add_method(_check_no_mode_given_returns_previous_changes, [dict(previous_changes=True)], [dict(previous_changes=False)], )
|
||||
@add_method(_check_mode_changed_to_0660, *SYNONYMS_0660)
|
||||
@add_method(_check_mode_unchanged_when_already_0660, *SYNONYMS_0660)
|
||||
class TestSetModeIfDifferent(TestSetModeIfDifferentBase):
|
||||
def test_module_utils_basic_ansible_module_set_mode_if_different(self):
|
||||
with patch('os.lstat') as m:
|
||||
|
@ -105,6 +103,7 @@ class TestSetModeIfDifferent(TestSetModeIfDifferentBase):
|
|||
self.am.set_mode_if_different('/path/to/file', 'o+w,g+w,a-r', False)
|
||||
|
||||
original_hasattr = hasattr
|
||||
|
||||
def _hasattr(obj, name):
|
||||
if obj == os and name == 'lchmod':
|
||||
return False
|
||||
|
@ -131,16 +130,10 @@ def _check_knows_to_change_to_0660_in_check_mode(self, mode):
|
|||
with patch('os.lstat', side_effect=[self.mock_stat1, self.mock_stat2, self.mock_stat2]) as m_lstat:
|
||||
self.assertEqual(self.am.set_mode_if_different('/path/to/file', mode, False), True)
|
||||
|
||||
@add_method(_check_no_mode_given_returns_previous_changes,
|
||||
[dict(previous_changes=True)],
|
||||
[dict(previous_changes=False)],
|
||||
)
|
||||
@add_method(_check_knows_to_change_to_0660_in_check_mode,
|
||||
*SYNONYMS_0660
|
||||
)
|
||||
@add_method(_check_mode_unchanged_when_already_0660,
|
||||
*SYNONYMS_0660
|
||||
)
|
||||
|
||||
@add_method(_check_no_mode_given_returns_previous_changes, [dict(previous_changes=True)], [dict(previous_changes=False)],)
|
||||
@add_method(_check_knows_to_change_to_0660_in_check_mode, *SYNONYMS_0660)
|
||||
@add_method(_check_mode_unchanged_when_already_0660, *SYNONYMS_0660)
|
||||
class TestSetModeIfDifferentWithCheckMode(TestSetModeIfDifferentBase):
|
||||
def setUp(self):
|
||||
super(TestSetModeIfDifferentWithCheckMode, self).setUp()
|
||||
|
|
|
@ -69,7 +69,7 @@ class RetryTestCase(unittest.TestCase):
|
|||
self.counter += 1
|
||||
raise botocore.exceptions.ClientError(err_msg, 'toooo fast!!')
|
||||
|
||||
#with self.assertRaises(botocore.exceptions.ClientError):
|
||||
# with self.assertRaises(botocore.exceptions.ClientError):
|
||||
try:
|
||||
fail()
|
||||
except Exception as e:
|
||||
|
@ -85,7 +85,7 @@ class RetryTestCase(unittest.TestCase):
|
|||
self.counter += 1
|
||||
raise botocore.exceptions.ClientError(err_msg, 'unexpected error')
|
||||
|
||||
#with self.assertRaises(botocore.exceptions.ClientError):
|
||||
# with self.assertRaises(botocore.exceptions.ClientError):
|
||||
try:
|
||||
raise_unexpected_error()
|
||||
except Exception as e:
|
||||
|
|
|
@ -22,11 +22,12 @@ __metaclass__ = type
|
|||
|
||||
import json
|
||||
|
||||
from ansible.compat.tests import unittest
|
||||
from nose.tools import eq_, raises
|
||||
|
||||
from ansible.compat.tests import unittest
|
||||
from ansible.module_utils.json_utils import _filter_non_json_lines
|
||||
|
||||
|
||||
class TestAnsibleModuleExitJson(unittest.TestCase):
|
||||
single_line_json_dict = u"""{"key": "value", "olá": "mundo"}"""
|
||||
single_line_json_array = u"""["a","b","c"]"""
|
||||
|
@ -38,10 +39,12 @@ class TestAnsibleModuleExitJson(unittest.TestCase):
|
|||
"b",
|
||||
"c"]"""
|
||||
|
||||
all_inputs = [single_line_json_dict,
|
||||
single_line_json_array,
|
||||
multi_line_json_dict,
|
||||
multi_line_json_array]
|
||||
all_inputs = [
|
||||
single_line_json_dict,
|
||||
single_line_json_array,
|
||||
multi_line_json_dict,
|
||||
multi_line_json_array
|
||||
]
|
||||
|
||||
junk = [u"single line of junk", u"line 1/2 of junk\nline 2/2 of junk"]
|
||||
|
||||
|
@ -51,7 +54,7 @@ class TestAnsibleModuleExitJson(unittest.TestCase):
|
|||
u'{"No json": "ending"',
|
||||
u'{"wrong": "ending"]',
|
||||
u'["wrong": "ending"}',
|
||||
)
|
||||
)
|
||||
|
||||
def test_just_json(self):
|
||||
for i in self.all_inputs:
|
||||
|
@ -82,7 +85,8 @@ class TestAnsibleModuleExitJson(unittest.TestCase):
|
|||
|
||||
def test_unparsable_filter_non_json_lines(self):
|
||||
for i in self.unparsable_cases:
|
||||
self.assertRaises(ValueError,
|
||||
self.assertRaises(
|
||||
ValueError,
|
||||
lambda data: _filter_non_json_lines(data),
|
||||
data=i
|
||||
)
|
||||
|
|
|
@ -35,6 +35,7 @@ from ansible.module_utils.six.moves import builtins
|
|||
|
||||
realimport = builtins.__import__
|
||||
|
||||
|
||||
class TestModuleUtilsBasic(ModuleTestCase):
|
||||
|
||||
def clear_modules(self, mods):
|
||||
|
@ -95,10 +96,10 @@ class TestModuleUtilsBasic(ModuleTestCase):
|
|||
mod = builtins.__import__('ansible.module_utils.basic')
|
||||
|
||||
# FIXME: doesn't work yet
|
||||
#@patch.object(builtins, 'bytes')
|
||||
#def test_module_utils_basic_bytes(self, mock_bytes):
|
||||
# mock_bytes.side_effect = NameError()
|
||||
# from ansible.module_utils import basic
|
||||
# @patch.object(builtins, 'bytes')
|
||||
# def test_module_utils_basic_bytes(self, mock_bytes):
|
||||
# mock_bytes.side_effect = NameError()
|
||||
# from ansible.module_utils import basic
|
||||
|
||||
@patch.object(builtins, '__import__')
|
||||
@unittest.skipIf(sys.version_info[0] >= 3, "literal_eval is available in every version of Python3")
|
||||
|
@ -118,12 +119,12 @@ class TestModuleUtilsBasic(ModuleTestCase):
|
|||
self.assertEqual(mod.module_utils.basic.literal_eval("'1'"), "1")
|
||||
self.assertEqual(mod.module_utils.basic.literal_eval("1"), 1)
|
||||
self.assertEqual(mod.module_utils.basic.literal_eval("-1"), -1)
|
||||
self.assertEqual(mod.module_utils.basic.literal_eval("(1,2,3)"), (1,2,3))
|
||||
self.assertEqual(mod.module_utils.basic.literal_eval("(1,2,3)"), (1, 2, 3))
|
||||
self.assertEqual(mod.module_utils.basic.literal_eval("[1]"), [1])
|
||||
self.assertEqual(mod.module_utils.basic.literal_eval("True"), True)
|
||||
self.assertEqual(mod.module_utils.basic.literal_eval("False"), False)
|
||||
self.assertEqual(mod.module_utils.basic.literal_eval("None"), None)
|
||||
#self.assertEqual(mod.module_utils.basic.literal_eval('{"a": 1}'), dict(a=1))
|
||||
# self.assertEqual(mod.module_utils.basic.literal_eval('{"a": 1}'), dict(a=1))
|
||||
self.assertRaises(ValueError, mod.module_utils.basic.literal_eval, "asdfasdfasdf")
|
||||
|
||||
@patch.object(builtins, '__import__')
|
||||
|
@ -162,13 +163,13 @@ class TestModuleUtilsBasic(ModuleTestCase):
|
|||
self.assertEqual(get_distribution(), "Foo")
|
||||
|
||||
with patch('os.path.isfile', return_value=True):
|
||||
with patch('platform.linux_distribution', side_effect=[("AmazonFooBar",)]):
|
||||
with patch('platform.linux_distribution', side_effect=[("AmazonFooBar", )]):
|
||||
self.assertEqual(get_distribution(), "Amazonfoobar")
|
||||
|
||||
with patch('platform.linux_distribution', side_effect=(("",), ("AmazonFooBam",))):
|
||||
with patch('platform.linux_distribution', side_effect=(("", ), ("AmazonFooBam",))):
|
||||
self.assertEqual(get_distribution(), "Amazon")
|
||||
|
||||
with patch('platform.linux_distribution', side_effect=[("",),("",)]):
|
||||
with patch('platform.linux_distribution', side_effect=[("", ), ("", )]):
|
||||
self.assertEqual(get_distribution(), "OtherLinux")
|
||||
|
||||
def _dist(distname='', version='', id='', supported_dists=(), full_distribution_name=1):
|
||||
|
@ -241,11 +242,11 @@ class TestModuleUtilsBasic(ModuleTestCase):
|
|||
from ansible.module_utils.basic import json_dict_unicode_to_bytes, json_dict_bytes_to_unicode
|
||||
|
||||
test_data = dict(
|
||||
item1 = u"Fóo",
|
||||
item2 = [u"Bár", u"Bam"],
|
||||
item3 = dict(sub1=u"Súb"),
|
||||
item4 = (u"föo", u"bär", u"©"),
|
||||
item5 = 42,
|
||||
item1=u"Fóo",
|
||||
item2=[u"Bár", u"Bam"],
|
||||
item3=dict(sub1=u"Súb"),
|
||||
item4=(u"föo", u"bär", u"©"),
|
||||
item5=42,
|
||||
)
|
||||
res = json_dict_unicode_to_bytes(test_data)
|
||||
res2 = json_dict_bytes_to_unicode(res)
|
||||
|
@ -265,10 +266,10 @@ class TestModuleUtilsBasic(ModuleTestCase):
|
|||
)
|
||||
|
||||
arg_spec = dict(
|
||||
foo = dict(required=True),
|
||||
bar = dict(),
|
||||
bam = dict(),
|
||||
baz = dict(),
|
||||
foo=dict(required=True),
|
||||
bar=dict(),
|
||||
bam=dict(),
|
||||
baz=dict(),
|
||||
)
|
||||
mut_ex = (('bar', 'bam'),)
|
||||
req_to = (('bam', 'baz'),)
|
||||
|
@ -279,9 +280,9 @@ class TestModuleUtilsBasic(ModuleTestCase):
|
|||
with swap_stdin_and_argv(stdin_data=args):
|
||||
basic._ANSIBLE_ARGS = None
|
||||
am = basic.AnsibleModule(
|
||||
argument_spec = arg_spec,
|
||||
mutually_exclusive = mut_ex,
|
||||
required_together = req_to,
|
||||
argument_spec=arg_spec,
|
||||
mutually_exclusive=mut_ex,
|
||||
required_together=req_to,
|
||||
no_log=True,
|
||||
check_invalid_arguments=False,
|
||||
add_file_common_args=True,
|
||||
|
@ -298,9 +299,9 @@ class TestModuleUtilsBasic(ModuleTestCase):
|
|||
self.assertRaises(
|
||||
SystemExit,
|
||||
basic.AnsibleModule,
|
||||
argument_spec = arg_spec,
|
||||
mutually_exclusive = mut_ex,
|
||||
required_together = req_to,
|
||||
argument_spec=arg_spec,
|
||||
mutually_exclusive=mut_ex,
|
||||
required_together=req_to,
|
||||
no_log=True,
|
||||
check_invalid_arguments=False,
|
||||
add_file_common_args=True,
|
||||
|
@ -308,16 +309,16 @@ class TestModuleUtilsBasic(ModuleTestCase):
|
|||
)
|
||||
|
||||
# fail because of mutually exclusive parameters
|
||||
args = json.dumps(dict(ANSIBLE_MODULE_ARGS={"foo":"hello", "bar": "bad", "bam": "bad"}))
|
||||
args = json.dumps(dict(ANSIBLE_MODULE_ARGS={"foo": "hello", "bar": "bad", "bam": "bad"}))
|
||||
|
||||
with swap_stdin_and_argv(stdin_data=args):
|
||||
basic._ANSIBLE_ARGS = None
|
||||
self.assertRaises(
|
||||
SystemExit,
|
||||
basic.AnsibleModule,
|
||||
argument_spec = arg_spec,
|
||||
mutually_exclusive = mut_ex,
|
||||
required_together = req_to,
|
||||
argument_spec=arg_spec,
|
||||
mutually_exclusive=mut_ex,
|
||||
required_together=req_to,
|
||||
no_log=True,
|
||||
check_invalid_arguments=False,
|
||||
add_file_common_args=True,
|
||||
|
@ -332,9 +333,9 @@ class TestModuleUtilsBasic(ModuleTestCase):
|
|||
self.assertRaises(
|
||||
SystemExit,
|
||||
basic.AnsibleModule,
|
||||
argument_spec = arg_spec,
|
||||
mutually_exclusive = mut_ex,
|
||||
required_together = req_to,
|
||||
argument_spec=arg_spec,
|
||||
mutually_exclusive=mut_ex,
|
||||
required_together=req_to,
|
||||
no_log=True,
|
||||
check_invalid_arguments=False,
|
||||
add_file_common_args=True,
|
||||
|
@ -345,26 +346,26 @@ class TestModuleUtilsBasic(ModuleTestCase):
|
|||
from ansible.module_utils import basic
|
||||
|
||||
arg_spec = dict(
|
||||
foo = dict(type='float'),
|
||||
foo2 = dict(type='float'),
|
||||
foo3 = dict(type='float'),
|
||||
bar = dict(type='int'),
|
||||
bar2 = dict(type='int'),
|
||||
foo=dict(type='float'),
|
||||
foo2=dict(type='float'),
|
||||
foo3=dict(type='float'),
|
||||
bar=dict(type='int'),
|
||||
bar2=dict(type='int'),
|
||||
)
|
||||
|
||||
# should test ok
|
||||
args = json.dumps(dict(ANSIBLE_MODULE_ARGS={
|
||||
"foo": 123.0, # float
|
||||
"foo2": 123, # int
|
||||
"foo3": "123", # string
|
||||
"bar": 123, # int
|
||||
"bar2": "123", # string
|
||||
"foo": 123.0, # float
|
||||
"foo2": 123, # int
|
||||
"foo3": "123", # string
|
||||
"bar": 123, # int
|
||||
"bar2": "123", # string
|
||||
}))
|
||||
|
||||
with swap_stdin_and_argv(stdin_data=args):
|
||||
basic._ANSIBLE_ARGS = None
|
||||
am = basic.AnsibleModule(
|
||||
argument_spec = arg_spec,
|
||||
argument_spec=arg_spec,
|
||||
no_log=True,
|
||||
check_invalid_arguments=False,
|
||||
add_file_common_args=True,
|
||||
|
@ -379,7 +380,7 @@ class TestModuleUtilsBasic(ModuleTestCase):
|
|||
self.assertRaises(
|
||||
SystemExit,
|
||||
basic.AnsibleModule,
|
||||
argument_spec = arg_spec,
|
||||
argument_spec=arg_spec,
|
||||
no_log=True,
|
||||
check_invalid_arguments=False,
|
||||
add_file_common_args=True,
|
||||
|
@ -391,7 +392,7 @@ class TestModuleUtilsBasic(ModuleTestCase):
|
|||
basic._ANSIBLE_ARGS = None
|
||||
|
||||
am = basic.AnsibleModule(
|
||||
argument_spec = dict(),
|
||||
argument_spec=dict(),
|
||||
)
|
||||
|
||||
am.selinux_mls_enabled = MagicMock()
|
||||
|
@ -404,25 +405,25 @@ class TestModuleUtilsBasic(ModuleTestCase):
|
|||
self.assertEqual(res, dict())
|
||||
|
||||
base_params = dict(
|
||||
path = '/path/to/file',
|
||||
mode = 0o600,
|
||||
owner = 'root',
|
||||
group = 'root',
|
||||
seuser = '_default',
|
||||
serole = '_default',
|
||||
setype = '_default',
|
||||
selevel = '_default',
|
||||
path='/path/to/file',
|
||||
mode=0o600,
|
||||
owner='root',
|
||||
group='root',
|
||||
seuser='_default',
|
||||
serole='_default',
|
||||
setype='_default',
|
||||
selevel='_default',
|
||||
)
|
||||
|
||||
extended_params = base_params.copy()
|
||||
extended_params.update(dict(
|
||||
follow = True,
|
||||
foo = 'bar',
|
||||
follow=True,
|
||||
foo='bar',
|
||||
))
|
||||
|
||||
final_params = base_params.copy()
|
||||
final_params.update(dict(
|
||||
path = '/path/to/real_file',
|
||||
path='/path/to/real_file',
|
||||
secontext=['unconfined_u', 'object_r', 'default_t', 's0'],
|
||||
attributes=None,
|
||||
))
|
||||
|
@ -441,7 +442,7 @@ class TestModuleUtilsBasic(ModuleTestCase):
|
|||
basic._ANSIBLE_ARGS = None
|
||||
|
||||
am = basic.AnsibleModule(
|
||||
argument_spec = dict(),
|
||||
argument_spec=dict(),
|
||||
)
|
||||
|
||||
basic.HAVE_SELINUX = False
|
||||
|
@ -461,7 +462,7 @@ class TestModuleUtilsBasic(ModuleTestCase):
|
|||
basic._ANSIBLE_ARGS = None
|
||||
|
||||
am = basic.AnsibleModule(
|
||||
argument_spec = dict(),
|
||||
argument_spec=dict(),
|
||||
)
|
||||
|
||||
am.selinux_mls_enabled = MagicMock()
|
||||
|
@ -475,7 +476,7 @@ class TestModuleUtilsBasic(ModuleTestCase):
|
|||
basic._ANSIBLE_ARGS = None
|
||||
|
||||
am = basic.AnsibleModule(
|
||||
argument_spec = dict(),
|
||||
argument_spec=dict(),
|
||||
)
|
||||
|
||||
# we first test the cases where the python selinux lib is
|
||||
|
@ -486,7 +487,7 @@ class TestModuleUtilsBasic(ModuleTestCase):
|
|||
am.get_bin_path = MagicMock()
|
||||
am.get_bin_path.return_value = '/path/to/selinuxenabled'
|
||||
am.run_command = MagicMock()
|
||||
am.run_command.return_value=(0, '', '')
|
||||
am.run_command.return_value = (0, '', '')
|
||||
self.assertRaises(SystemExit, am.selinux_enabled)
|
||||
am.get_bin_path.return_value = None
|
||||
self.assertEqual(am.selinux_enabled(), False)
|
||||
|
@ -507,7 +508,7 @@ class TestModuleUtilsBasic(ModuleTestCase):
|
|||
basic._ANSIBLE_ARGS = None
|
||||
|
||||
am = basic.AnsibleModule(
|
||||
argument_spec = dict(),
|
||||
argument_spec=dict(),
|
||||
)
|
||||
|
||||
am.selinux_initial_context = MagicMock(return_value=[None, None, None, None])
|
||||
|
@ -543,7 +544,7 @@ class TestModuleUtilsBasic(ModuleTestCase):
|
|||
basic._ANSIBLE_ARGS = None
|
||||
|
||||
am = basic.AnsibleModule(
|
||||
argument_spec = dict(),
|
||||
argument_spec=dict(),
|
||||
)
|
||||
|
||||
am.selinux_initial_context = MagicMock(return_value=[None, None, None, None])
|
||||
|
@ -588,7 +589,7 @@ class TestModuleUtilsBasic(ModuleTestCase):
|
|||
with swap_stdin_and_argv(stdin_data=args):
|
||||
basic._ANSIBLE_ARGS = None
|
||||
am = basic.AnsibleModule(
|
||||
argument_spec = dict(),
|
||||
argument_spec=dict(),
|
||||
)
|
||||
|
||||
def _mock_find_mount_point(path):
|
||||
|
@ -628,7 +629,7 @@ class TestModuleUtilsBasic(ModuleTestCase):
|
|||
basic._ANSIBLE_ARGS = None
|
||||
|
||||
am = basic.AnsibleModule(
|
||||
argument_spec = dict(),
|
||||
argument_spec=dict(),
|
||||
)
|
||||
|
||||
mock_stat = MagicMock()
|
||||
|
@ -643,7 +644,7 @@ class TestModuleUtilsBasic(ModuleTestCase):
|
|||
basic._ANSIBLE_ARGS = None
|
||||
|
||||
am = basic.AnsibleModule(
|
||||
argument_spec = dict(),
|
||||
argument_spec=dict(),
|
||||
)
|
||||
|
||||
def _mock_ismount(path):
|
||||
|
@ -669,7 +670,7 @@ class TestModuleUtilsBasic(ModuleTestCase):
|
|||
basic._ANSIBLE_ARGS = None
|
||||
|
||||
am = basic.AnsibleModule(
|
||||
argument_spec = dict(),
|
||||
argument_spec=dict(),
|
||||
)
|
||||
|
||||
basic.HAVE_SELINUX = False
|
||||
|
@ -714,7 +715,7 @@ class TestModuleUtilsBasic(ModuleTestCase):
|
|||
basic._ANSIBLE_ARGS = None
|
||||
|
||||
am = basic.AnsibleModule(
|
||||
argument_spec = dict(),
|
||||
argument_spec=dict(),
|
||||
)
|
||||
|
||||
self.assertEqual(am.set_owner_if_different('/path/to/file', None, True), True)
|
||||
|
@ -753,7 +754,7 @@ class TestModuleUtilsBasic(ModuleTestCase):
|
|||
basic._ANSIBLE_ARGS = None
|
||||
|
||||
am = basic.AnsibleModule(
|
||||
argument_spec = dict(),
|
||||
argument_spec=dict(),
|
||||
)
|
||||
|
||||
self.assertEqual(am.set_group_if_different('/path/to/file', None, True), True)
|
||||
|
@ -824,7 +825,7 @@ class TestModuleUtilsBasic(ModuleTestCase):
|
|||
basic._ANSIBLE_ARGS = None
|
||||
|
||||
am = basic.AnsibleModule(
|
||||
argument_spec = dict(),
|
||||
argument_spec=dict(),
|
||||
)
|
||||
|
||||
environ = dict()
|
||||
|
@ -885,7 +886,7 @@ class TestModuleUtilsBasic(ModuleTestCase):
|
|||
stat1.st_mode = 0o0644
|
||||
stat1.st_uid = 0
|
||||
stat1.st_gid = 0
|
||||
_os_stat.side_effect = [stat1,]
|
||||
_os_stat.side_effect = [stat1, ]
|
||||
am.selinux_enabled.return_value = False
|
||||
_os_chmod.reset_mock()
|
||||
_os_chown.reset_mock()
|
||||
|
@ -904,7 +905,7 @@ class TestModuleUtilsBasic(ModuleTestCase):
|
|||
stat1.st_mode = 0o0644
|
||||
stat1.st_uid = 0
|
||||
stat1.st_gid = 0
|
||||
_os_stat.side_effect = [stat1,]
|
||||
_os_stat.side_effect = [stat1, ]
|
||||
mock_context = MagicMock()
|
||||
am.selinux_context.return_value = mock_context
|
||||
am.selinux_enabled.return_value = True
|
||||
|
@ -1000,7 +1001,7 @@ class TestModuleUtilsBasic(ModuleTestCase):
|
|||
basic._ANSIBLE_ARGS = None
|
||||
|
||||
am = basic.AnsibleModule(
|
||||
argument_spec = dict(),
|
||||
argument_spec=dict(),
|
||||
)
|
||||
|
||||
mock_stat = MagicMock()
|
||||
|
|
|
@ -41,125 +41,134 @@ TESTSETS = [
|
|||
"centos",
|
||||
"7.2.1511",
|
||||
"Core"
|
||||
],
|
||||
],
|
||||
"input": {
|
||||
"/etc/redhat-release": "CentOS Linux release 7.2.1511 (Core) \n",
|
||||
"/etc/os-release": ("NAME=\"CentOS Linux\"\nVERSION=\"7 (Core)\"\nID=\"centos\"\nID_LIKE=\"rhel fedora\"\nVERSION_ID=\"7\"\n"
|
||||
"PRETTY_NAME=\"CentOS Linux 7 (Core)\"\nANSI_COLOR=\"0;31\"\nCPE_NAME=\"cpe:/o:centos:centos:7\"\n"
|
||||
"HOME_URL=\"https://www.centos.org/\"\nBUG_REPORT_URL=\"https://bugs.centos.org/\"\n\nCENTOS_MANTISBT_PROJECT=\"CentOS-7\"\n"
|
||||
"CENTOS_MANTISBT_PROJECT_VERSION=\"7\"\nREDHAT_SUPPORT_PRODUCT=\"centos\"\nREDHAT_SUPPORT_PRODUCT_VERSION=\"7\"\n\n"),
|
||||
"/etc/os-release": (
|
||||
"NAME=\"CentOS Linux\"\nVERSION=\"7 (Core)\"\nID=\"centos\"\nID_LIKE=\"rhel fedora\"\nVERSION_ID=\"7\"\n"
|
||||
"PRETTY_NAME=\"CentOS Linux 7 (Core)\"\nANSI_COLOR=\"0;31\"\nCPE_NAME=\"cpe:/o:centos:centos:7\"\n"
|
||||
"HOME_URL=\"https://www.centos.org/\"\nBUG_REPORT_URL=\"https://bugs.centos.org/\"\n\nCENTOS_MANTISBT_PROJECT=\"CentOS-7\"\n"
|
||||
"CENTOS_MANTISBT_PROJECT_VERSION=\"7\"\nREDHAT_SUPPORT_PRODUCT=\"centos\"\nREDHAT_SUPPORT_PRODUCT_VERSION=\"7\"\n\n"
|
||||
),
|
||||
"/etc/system-release": "CentOS Linux release 7.2.1511 (Core) \n"
|
||||
},
|
||||
},
|
||||
"name": "CentOS 7.2.1511",
|
||||
"result": {
|
||||
"distribution_release": "Core",
|
||||
"distribution": "CentOS",
|
||||
"distribution_major_version": "7",
|
||||
"os_family": "RedHat",
|
||||
"distribution_version": "7.2.1511"
|
||||
}
|
||||
},
|
||||
"distribution_version": "7.2.1511",
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "CentOS 6.7",
|
||||
"platform.dist": [
|
||||
"centos",
|
||||
"6.7",
|
||||
"Final"
|
||||
],
|
||||
],
|
||||
"input": {
|
||||
"/etc/redhat-release": "CentOS release 6.7 (Final)\n",
|
||||
"/etc/lsb-release": ("LSB_VERSION=base-4.0-amd64:base-4.0-noarch:core-4.0-amd64:core-4.0-noarch:graphics-4.0-amd64:graphics-4.0-noarch:"
|
||||
"printing-4.0-amd64:printing-4.0-noarch\n"),
|
||||
"/etc/lsb-release": (
|
||||
"LSB_VERSION=base-4.0-amd64:base-4.0-noarch:core-4.0-amd64:core-4.0-noarch:graphics-4.0-amd64:graphics-4.0-noarch:"
|
||||
"printing-4.0-amd64:printing-4.0-noarch\n"
|
||||
),
|
||||
"/etc/system-release": "CentOS release 6.7 (Final)\n"
|
||||
},
|
||||
},
|
||||
"result": {
|
||||
"distribution_release": "Final",
|
||||
"distribution": "CentOS",
|
||||
"distribution_major_version": "6",
|
||||
"os_family": "RedHat",
|
||||
"distribution_version": "6.7"
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "RedHat 7.2",
|
||||
"platform.dist": [
|
||||
"redhat",
|
||||
"7.2",
|
||||
"Maipo"
|
||||
],
|
||||
],
|
||||
"input": {
|
||||
"/etc/redhat-release": "Red Hat Enterprise Linux Server release 7.2 (Maipo)\n",
|
||||
"/etc/os-release": ("NAME=\"Red Hat Enterprise Linux Server\"\nVERSION=\"7.2 (Maipo)\"\nID=\"rhel\"\nID_LIKE=\"fedora\"\nVERSION_ID=\"7.2\"\n"
|
||||
"PRETTY_NAME=\"Red Hat Enterprise Linux Server 7.2 (Maipo)\"\nANSI_COLOR=\"0;31\"\n"
|
||||
"CPE_NAME=\"cpe:/o:redhat:enterprise_linux:7.2:GA:server\"\nHOME_URL=\"https://www.redhat.com/\"\n"
|
||||
"BUG_REPORT_URL=\"https://bugzilla.redhat.com/\"\n\nREDHAT_BUGZILLA_PRODUCT=\"Red Hat Enterprise Linux 7\"\n"
|
||||
"REDHAT_BUGZILLA_PRODUCT_VERSION=7.2\nREDHAT_SUPPORT_PRODUCT=\"Red Hat Enterprise Linux\"\n"
|
||||
"REDHAT_SUPPORT_PRODUCT_VERSION=\"7.2\"\n"),
|
||||
"/etc/os-release": (
|
||||
"NAME=\"Red Hat Enterprise Linux Server\"\nVERSION=\"7.2 (Maipo)\"\nID=\"rhel\"\nID_LIKE=\"fedora\"\nVERSION_ID=\"7.2\"\n"
|
||||
"PRETTY_NAME=\"Red Hat Enterprise Linux Server 7.2 (Maipo)\"\nANSI_COLOR=\"0;31\"\n"
|
||||
"CPE_NAME=\"cpe:/o:redhat:enterprise_linux:7.2:GA:server\"\nHOME_URL=\"https://www.redhat.com/\"\n"
|
||||
"BUG_REPORT_URL=\"https://bugzilla.redhat.com/\"\n\nREDHAT_BUGZILLA_PRODUCT=\"Red Hat Enterprise Linux 7\"\n"
|
||||
"REDHAT_BUGZILLA_PRODUCT_VERSION=7.2\nREDHAT_SUPPORT_PRODUCT=\"Red Hat Enterprise Linux\"\n"
|
||||
"REDHAT_SUPPORT_PRODUCT_VERSION=\"7.2\"\n"
|
||||
),
|
||||
"/etc/system-release": "Red Hat Enterprise Linux Server release 7.2 (Maipo)\n"
|
||||
},
|
||||
},
|
||||
"result": {
|
||||
"distribution_release": "Maipo",
|
||||
"distribution": "RedHat",
|
||||
"distribution_major_version": "7",
|
||||
"os_family": "RedHat",
|
||||
"distribution_version": "7.2"
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "RedHat 6.7",
|
||||
"platform.dist": [
|
||||
"redhat",
|
||||
"6.7",
|
||||
"Santiago"
|
||||
],
|
||||
],
|
||||
"input": {
|
||||
"/etc/redhat-release": "Red Hat Enterprise Linux Server release 6.7 (Santiago)\n",
|
||||
"/etc/lsb-release": ("LSB_VERSION=base-4.0-amd64:base-4.0-noarch:core-4.0-amd64:core-4.0-noarch:graphics-4.0-amd64:graphics-4.0-noarch:"
|
||||
"printing-4.0-amd64:printing-4.0-noarch\n"),
|
||||
"/etc/lsb-release": (
|
||||
"LSB_VERSION=base-4.0-amd64:base-4.0-noarch:core-4.0-amd64:core-4.0-noarch:graphics-4.0-amd64:graphics-4.0-noarch:"
|
||||
"printing-4.0-amd64:printing-4.0-noarch\n"
|
||||
),
|
||||
"/etc/system-release": "Red Hat Enterprise Linux Server release 6.7 (Santiago)\n"
|
||||
},
|
||||
},
|
||||
"result": {
|
||||
"distribution_release": "Santiago",
|
||||
"distribution": "RedHat",
|
||||
"distribution_major_version": "6",
|
||||
"os_family": "RedHat",
|
||||
"distribution_version": "6.7"
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Virtuozzo 7.3",
|
||||
"platform.dist": [
|
||||
"redhat",
|
||||
"7.3",
|
||||
""
|
||||
],
|
||||
],
|
||||
"input": {
|
||||
"/etc/redhat-release": "Virtuozzo Linux release 7.3\n",
|
||||
"/etc/os-release": ("NAME=\"Virtuozzo\"\n"
|
||||
"VERSION=\"7.0.3\"\n"
|
||||
"ID=\"virtuozzo\"\n"
|
||||
"ID_LIKE=\"rhel fedora\"\n"
|
||||
"VERSION_ID=\"7\"\n"
|
||||
"PRETTY_NAME=\"Virtuozzo release 7.0.3\"\n"
|
||||
"ANSI_COLOR=\"0;31\"\n"
|
||||
"CPE_NAME=\"cpe:/o:virtuozzoproject:vz:7\"\n"
|
||||
"HOME_URL=\"http://www.virtuozzo.com\"\n"
|
||||
"BUG_REPORT_URL=\"https://bugs.openvz.org/\"\n"),
|
||||
"/etc/os-release": (
|
||||
"NAME=\"Virtuozzo\"\n"
|
||||
"VERSION=\"7.0.3\"\n"
|
||||
"ID=\"virtuozzo\"\n"
|
||||
"ID_LIKE=\"rhel fedora\"\n"
|
||||
"VERSION_ID=\"7\"\n"
|
||||
"PRETTY_NAME=\"Virtuozzo release 7.0.3\"\n"
|
||||
"ANSI_COLOR=\"0;31\"\n"
|
||||
"CPE_NAME=\"cpe:/o:virtuozzoproject:vz:7\"\n"
|
||||
"HOME_URL=\"http://www.virtuozzo.com\"\n"
|
||||
"BUG_REPORT_URL=\"https://bugs.openvz.org/\"\n"
|
||||
),
|
||||
"/etc/system-release": "Virtuozzo release 7.0.3 (640)\n"
|
||||
},
|
||||
},
|
||||
"result": {
|
||||
"distribution_release": "NA",
|
||||
"distribution": "Virtuozzo",
|
||||
"distribution_major_version": "7",
|
||||
"os_family": "RedHat",
|
||||
"distribution_version": "7.3"
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
{
|
||||
"name" : "openSUSE Leap 42.1",
|
||||
"name": "openSUSE Leap 42.1",
|
||||
"input": {
|
||||
"/etc/os-release":
|
||||
"""
|
||||
"/etc/os-release": """
|
||||
NAME="openSUSE Leap"
|
||||
VERSION="42.1"
|
||||
VERSION_ID="42.1"
|
||||
|
@ -170,16 +179,16 @@ CPE_NAME="cpe:/o:opensuse:opensuse:42.1"
|
|||
BUG_REPORT_URL="https://bugs.opensuse.org"
|
||||
HOME_URL="https://opensuse.org/"
|
||||
ID_LIKE="suse"
|
||||
""",
|
||||
"/etc/SuSE-release":"""
|
||||
""",
|
||||
"/etc/SuSE-release": """
|
||||
openSUSE 42.1 (x86_64)
|
||||
VERSION = 42.1
|
||||
CODENAME = Malachite
|
||||
# /etc/SuSE-release is deprecated and will be removed in the future, use /etc/os-release instead
|
||||
"""
|
||||
"""
|
||||
},
|
||||
"platform.dist": ['SuSE', '42.1', 'x86_64'],
|
||||
"result":{
|
||||
"result": {
|
||||
"distribution": "openSUSE Leap",
|
||||
"distribution_major_version": "42",
|
||||
"distribution_release": "1",
|
||||
|
@ -189,12 +198,13 @@ CODENAME = Malachite
|
|||
},
|
||||
{
|
||||
'name': 'openSUSE 13.2',
|
||||
'input': {'/etc/SuSE-release': """openSUSE 13.2 (x86_64)
|
||||
'input': {
|
||||
'/etc/SuSE-release': """openSUSE 13.2 (x86_64)
|
||||
VERSION = 13.2
|
||||
CODENAME = Harlequin
|
||||
# /etc/SuSE-release is deprecated and will be removed in the future, use /etc/os-release instead
|
||||
""",
|
||||
'/etc/os-release': """NAME=openSUSE
|
||||
'/etc/os-release': """NAME=openSUSE
|
||||
VERSION="13.2 (Harlequin)"
|
||||
VERSION_ID="13.2"
|
||||
PRETTY_NAME="openSUSE 13.2 (Harlequin) (x86_64)"
|
||||
|
@ -204,13 +214,16 @@ CPE_NAME="cpe:/o:opensuse:opensuse:13.2"
|
|||
BUG_REPORT_URL="https://bugs.opensuse.org"
|
||||
HOME_URL="https://opensuse.org/"
|
||||
ID_LIKE="suse"
|
||||
"""},
|
||||
"""
|
||||
},
|
||||
'platform.dist': ('SuSE', '13.2', 'x86_64'),
|
||||
'result': {'distribution': u'openSUSE',
|
||||
'distribution_major_version': u'13',
|
||||
'distribution_release': u'2',
|
||||
'os_family': u'Suse',
|
||||
'distribution_version': u'13.2'}
|
||||
'result': {
|
||||
'distribution': u'openSUSE',
|
||||
'distribution_major_version': u'13',
|
||||
'distribution_release': u'2',
|
||||
'os_family': u'Suse',
|
||||
'distribution_version': u'13.2'
|
||||
}
|
||||
},
|
||||
{
|
||||
"platform.dist": [
|
||||
|
@ -219,9 +232,11 @@ ID_LIKE="suse"
|
|||
""
|
||||
],
|
||||
"input": {
|
||||
"/etc/os-release": ("NAME=\"openSUSE Tumbleweed\"\n# VERSION=\"20160917\"\nID=opensuse\nID_LIKE=\"suse\"\nVERSION_ID=\"20160917\"\n"
|
||||
"PRETTY_NAME=\"openSUSE Tumbleweed\"\nANSI_COLOR=\"0;32\"\nCPE_NAME=\"cpe:/o:opensuse:tumbleweed:20160917\"\n"
|
||||
"BUG_REPORT_URL=\"https://bugs.opensuse.org\"\nHOME_URL=\"https://www.opensuse.org/\"\n")
|
||||
"/etc/os-release": (
|
||||
"NAME=\"openSUSE Tumbleweed\"\n# VERSION=\"20160917\"\nID=opensuse\nID_LIKE=\"suse\"\nVERSION_ID=\"20160917\"\n"
|
||||
"PRETTY_NAME=\"openSUSE Tumbleweed\"\nANSI_COLOR=\"0;32\"\nCPE_NAME=\"cpe:/o:opensuse:tumbleweed:20160917\"\n"
|
||||
"BUG_REPORT_URL=\"https://bugs.opensuse.org\"\nHOME_URL=\"https://www.opensuse.org/\"\n"
|
||||
)
|
||||
},
|
||||
"name": "openSUSE Tumbleweed 20160917",
|
||||
"result": {
|
||||
|
@ -235,14 +250,14 @@ ID_LIKE="suse"
|
|||
{ # see https://github.com/ansible/ansible/issues/14837
|
||||
"name": "SLES 11.3",
|
||||
"input": {
|
||||
"/etc/SuSE-release":"""
|
||||
"/etc/SuSE-release": """
|
||||
SUSE Linux Enterprise Server 11 (x86_64)
|
||||
VERSION = 11
|
||||
PATCHLEVEL = 3
|
||||
"""
|
||||
"""
|
||||
},
|
||||
"platform.dist": ['SuSE', '11', 'x86_64'],
|
||||
"result":{
|
||||
"result": {
|
||||
"distribution": "SLES",
|
||||
"distribution_major_version": "11",
|
||||
"distribution_release": "3",
|
||||
|
@ -253,12 +268,12 @@ PATCHLEVEL = 3
|
|||
{ # see https://github.com/ansible/ansible/issues/14837
|
||||
"name": "SLES 11.4",
|
||||
"input": {
|
||||
"/etc/SuSE-release":"""
|
||||
"/etc/SuSE-release": """
|
||||
SUSE Linux Enterprise Server 11 (x86_64)
|
||||
VERSION = 11
|
||||
PATCHLEVEL = 4
|
||||
""",
|
||||
"/etc/os-release":"""
|
||||
""",
|
||||
"/etc/os-release": """
|
||||
NAME="SLES"
|
||||
VERSION="11.4"
|
||||
VERSION_ID="11.4"
|
||||
|
@ -266,7 +281,7 @@ PRETTY_NAME="SUSE Linux Enterprise Server 11 SP4"
|
|||
ID="sles"
|
||||
ANSI_COLOR="0;32"
|
||||
CPE_NAME="cpe:/o:suse:sles:11:4"
|
||||
""",
|
||||
""",
|
||||
},
|
||||
"platform.dist": ['SuSE', '11', 'x86_64'],
|
||||
"result":{
|
||||
|
@ -280,14 +295,14 @@ CPE_NAME="cpe:/o:suse:sles:11:4"
|
|||
{ # see https://github.com/ansible/ansible/issues/14837
|
||||
"name": "SLES 12 SP0",
|
||||
"input": {
|
||||
"/etc/SuSE-release":"""
|
||||
"/etc/SuSE-release": """
|
||||
SUSE Linux Enterprise Server 12 (x86_64)
|
||||
VERSION = 12
|
||||
PATCHLEVEL = 0
|
||||
# This file is deprecated and will be removed in a future service pack or release.
|
||||
# Please check /etc/os-release for details about this release.
|
||||
""",
|
||||
"/etc/os-release":"""
|
||||
""",
|
||||
"/etc/os-release": """
|
||||
NAME="SLES"
|
||||
VERSION="12"
|
||||
VERSION_ID="12"
|
||||
|
@ -295,10 +310,10 @@ PRETTY_NAME="SUSE Linux Enterprise Server 12"
|
|||
ID="sles"
|
||||
ANSI_COLOR="0;32"
|
||||
CPE_NAME="cpe:/o:suse:sles:12"
|
||||
""",
|
||||
""",
|
||||
},
|
||||
"platform.dist": ['SuSE', '12', 'x86_64'],
|
||||
"result":{
|
||||
"result": {
|
||||
"distribution": "SLES",
|
||||
"distribution_major_version": "12",
|
||||
"distribution_release": "0",
|
||||
|
@ -306,18 +321,17 @@ CPE_NAME="cpe:/o:suse:sles:12"
|
|||
"distribution_version": "12",
|
||||
}
|
||||
},
|
||||
|
||||
{ # see https://github.com/ansible/ansible/issues/14837
|
||||
"name": "SLES 12 SP1",
|
||||
"input": {
|
||||
"/etc/SuSE-release":"""
|
||||
"/etc/SuSE-release": """
|
||||
SUSE Linux Enterprise Server 12 (x86_64)
|
||||
VERSION = 12
|
||||
PATCHLEVEL = 0
|
||||
# This file is deprecated and will be removed in a future service pack or release.
|
||||
# Please check /etc/os-release for details about this release.
|
||||
""",
|
||||
"/etc/os-release":"""
|
||||
""",
|
||||
"/etc/os-release": """
|
||||
NAME="SLES"
|
||||
VERSION="12-SP1"
|
||||
VERSION_ID="12.1"
|
||||
|
@ -340,20 +354,20 @@ CPE_NAME="cpe:/o:suse:sles:12:sp1"
|
|||
{
|
||||
"name": "Debian stretch/sid",
|
||||
"input": {
|
||||
"/etc/os-release":"""
|
||||
"/etc/os-release": """
|
||||
PRETTY_NAME="Debian GNU/Linux stretch/sid"
|
||||
NAME="Debian GNU/Linux"
|
||||
ID=debian
|
||||
HOME_URL="https://www.debian.org/"
|
||||
SUPPORT_URL="https://www.debian.org/support"
|
||||
BUG_REPORT_URL="https://bugs.debian.org/"
|
||||
""",
|
||||
"/etc/debian_version":"""
|
||||
""",
|
||||
"/etc/debian_version": """
|
||||
stretch/sid
|
||||
""",
|
||||
},
|
||||
"platform.dist": ('debian', 'stretch/sid', ''),
|
||||
"result":{
|
||||
"result": {
|
||||
"distribution": "Debian",
|
||||
"distribution_major_version": "stretch/sid",
|
||||
"distribution_release": "NA",
|
||||
|
@ -363,7 +377,8 @@ BUG_REPORT_URL="https://bugs.debian.org/"
|
|||
},
|
||||
{
|
||||
'name': "Debian 7.9",
|
||||
'input': {'/etc/os-release': """PRETTY_NAME="Debian GNU/Linux 7 (wheezy)"
|
||||
'input': {
|
||||
'/etc/os-release': """PRETTY_NAME="Debian GNU/Linux 7 (wheezy)"
|
||||
NAME="Debian GNU/Linux"
|
||||
VERSION_ID="7"
|
||||
VERSION="7 (wheezy)"
|
||||
|
@ -372,13 +387,16 @@ ANSI_COLOR="1;31"
|
|||
HOME_URL="http://www.debian.org/"
|
||||
SUPPORT_URL="http://www.debian.org/support/"
|
||||
BUG_REPORT_URL="http://bugs.debian.org/"
|
||||
"""},
|
||||
"""
|
||||
},
|
||||
'platform.dist': ('debian', '7.9', ''),
|
||||
'result': {'distribution': u'Debian',
|
||||
'distribution_major_version': u'7',
|
||||
'distribution_release': u'wheezy',
|
||||
"os_family": "Debian",
|
||||
'distribution_version': u'7.9'}
|
||||
'result': {
|
||||
'distribution': u'Debian',
|
||||
'distribution_major_version': u'7',
|
||||
'distribution_release': u'wheezy',
|
||||
"os_family": "Debian",
|
||||
'distribution_version': u'7.9'
|
||||
}
|
||||
},
|
||||
{
|
||||
"platform.dist": [
|
||||
|
@ -387,9 +405,11 @@ BUG_REPORT_URL="http://bugs.debian.org/"
|
|||
"xenial"
|
||||
],
|
||||
"input": {
|
||||
"/etc/os-release": ("NAME=\"Ubuntu\"\nVERSION=\"16.04 LTS (Xenial Xerus)\"\nID=ubuntu\nID_LIKE=debian\nPRETTY_NAME=\"Ubuntu 16.04 LTS\"\n"
|
||||
"VERSION_ID=\"16.04\"\nHOME_URL=\"http://www.ubuntu.com/\"\nSUPPORT_URL=\"http://help.ubuntu.com/\"\n"
|
||||
"BUG_REPORT_URL=\"http://bugs.launchpad.net/ubuntu/\"\nUBUNTU_CODENAME=xenial\n"),
|
||||
"/etc/os-release": (
|
||||
"NAME=\"Ubuntu\"\nVERSION=\"16.04 LTS (Xenial Xerus)\"\nID=ubuntu\nID_LIKE=debian\nPRETTY_NAME=\"Ubuntu 16.04 LTS\"\n"
|
||||
"VERSION_ID=\"16.04\"\nHOME_URL=\"http://www.ubuntu.com/\"\nSUPPORT_URL=\"http://help.ubuntu.com/\"\n"
|
||||
"BUG_REPORT_URL=\"http://bugs.launchpad.net/ubuntu/\"\nUBUNTU_CODENAME=xenial\n"
|
||||
),
|
||||
"/etc/lsb-release": "DISTRIB_ID=Ubuntu\nDISTRIB_RELEASE=16.04\nDISTRIB_CODENAME=xenial\nDISTRIB_DESCRIPTION=\"Ubuntu 16.04 LTS\"\n"
|
||||
},
|
||||
"name": "Ubuntu 16.04",
|
||||
|
@ -403,12 +423,13 @@ BUG_REPORT_URL="http://bugs.debian.org/"
|
|||
},
|
||||
{
|
||||
'name': "Ubuntu 14.04",
|
||||
'input': {'/etc/lsb-release': """DISTRIB_ID=Ubuntu
|
||||
'input': {
|
||||
'/etc/lsb-release': """DISTRIB_ID=Ubuntu
|
||||
DISTRIB_RELEASE=14.04
|
||||
DISTRIB_CODENAME=trusty
|
||||
DISTRIB_DESCRIPTION="Ubuntu 14.04.4 LTS"
|
||||
""",
|
||||
'/etc/os-release': """NAME="Ubuntu"
|
||||
'/etc/os-release': """NAME="Ubuntu"
|
||||
VERSION="14.04.4 LTS, Trusty Tahr"
|
||||
ID=ubuntu
|
||||
ID_LIKE=debian
|
||||
|
@ -417,13 +438,16 @@ VERSION_ID="14.04"
|
|||
HOME_URL="http://www.ubuntu.com/"
|
||||
SUPPORT_URL="http://help.ubuntu.com/"
|
||||
BUG_REPORT_URL="http://bugs.launchpad.net/ubuntu/"
|
||||
"""},
|
||||
"""
|
||||
},
|
||||
'platform.dist': ('Ubuntu', '14.04', 'trusty'),
|
||||
'result': {'distribution': u'Ubuntu',
|
||||
'distribution_major_version': u'14',
|
||||
'distribution_release': u'trusty',
|
||||
"os_family": "Debian",
|
||||
'distribution_version': u'14.04'}
|
||||
'result': {
|
||||
'distribution': u'Ubuntu',
|
||||
'distribution_major_version': u'14',
|
||||
'distribution_release': u'trusty',
|
||||
"os_family": "Debian",
|
||||
'distribution_version': u'14.04'
|
||||
}
|
||||
},
|
||||
{
|
||||
'name': "Ubuntu 12.04",
|
||||
|
@ -470,7 +494,7 @@ VERSION_ID="12.04"
|
|||
{
|
||||
'name': 'Core OS',
|
||||
'input': {
|
||||
'/etc/os-release':"""
|
||||
'/etc/os-release': """
|
||||
NAME=CoreOS
|
||||
ID=coreos
|
||||
VERSION=976.0.0
|
||||
|
@ -480,15 +504,15 @@ PRETTY_NAME="CoreOS 976.0.0 (Coeur Rouge)"
|
|||
ANSI_COLOR="1;32"
|
||||
HOME_URL="https://coreos.com/"
|
||||
BUG_REPORT_URL="https://github.com/coreos/bugs/issues"
|
||||
""",
|
||||
'/etc/lsb-release':"""DISTRIB_ID=CoreOS
|
||||
""",
|
||||
'/etc/lsb-release': """DISTRIB_ID=CoreOS
|
||||
DISTRIB_RELEASE=976.0.0
|
||||
DISTRIB_CODENAME="Coeur Rouge"
|
||||
DISTRIB_DESCRIPTION="CoreOS 976.0.0 (Coeur Rouge)"
|
||||
""",
|
||||
},
|
||||
'platform.dist': ('', '', ''),
|
||||
'result' : {
|
||||
'result': {
|
||||
"distribution": "CoreOS",
|
||||
"distribution_major_version": "NA",
|
||||
"distribution_release": "NA",
|
||||
|
@ -679,87 +703,90 @@ DISTRIB_DESCRIPTION="CoreOS 976.0.0 (Coeur Rouge)"
|
|||
},
|
||||
"platform.system": "SunOS"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "Solaris 11.3",
|
||||
"platform.dist": [
|
||||
"",
|
||||
"",
|
||||
""
|
||||
],
|
||||
],
|
||||
"input": {
|
||||
"/etc/release": (" Oracle Solaris 11.3 X86\n Copyright (c) 1983, 2015, Oracle and/or its affiliates. "
|
||||
"All rights reserved.\n Assembled 06 October 2015\n")
|
||||
},
|
||||
"/etc/release": (
|
||||
" Oracle Solaris 11.3 X86\n Copyright (c) 1983, 2015, Oracle and/or its affiliates. "
|
||||
"All rights reserved.\n Assembled 06 October 2015\n"
|
||||
)
|
||||
},
|
||||
"platform.system": "SunOS",
|
||||
"result": {
|
||||
"distribution_release": "Oracle Solaris 11.3 X86",
|
||||
"distribution": "Solaris",
|
||||
"os_family": "Solaris",
|
||||
"distribution_version": "11.3"
|
||||
}
|
||||
},
|
||||
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Solaris 10",
|
||||
"platform.dist": [
|
||||
"",
|
||||
"",
|
||||
""
|
||||
],
|
||||
],
|
||||
"input": {
|
||||
"/etc/release": (" Oracle Solaris 10 1/13 s10x_u11wos_24a X86\n Copyright (c) 1983, 2013, Oracle and/or its affiliates. "
|
||||
"All rights reserved.\n Assembled 17 January 2013\n")
|
||||
},
|
||||
},
|
||||
"platform.system": "SunOS",
|
||||
"result": {
|
||||
"distribution_release": "Oracle Solaris 10 1/13 s10x_u11wos_24a X86",
|
||||
"distribution": "Solaris",
|
||||
"os_family": "Solaris",
|
||||
"distribution_version": "10"
|
||||
}
|
||||
},
|
||||
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Fedora 22",
|
||||
"platform.dist": [
|
||||
"fedora",
|
||||
"22",
|
||||
"Twenty Two"
|
||||
],
|
||||
],
|
||||
"input": {
|
||||
"/etc/redhat-release": "Fedora release 22 (Twenty Two)\n",
|
||||
"/etc/os-release": ("NAME=Fedora\nVERSION=\"22 (Twenty Two)\"\nID=fedora\nVERSION_ID=22\nPRETTY_NAME=\"Fedora 22 (Twenty Two)\"\n"
|
||||
"ANSI_COLOR=\"0;34\"\nCPE_NAME=\"cpe:/o:fedoraproject:fedora:22\"\nHOME_URL=\"https://fedoraproject.org/\"\n"
|
||||
"BUG_REPORT_URL=\"https://bugzilla.redhat.com/\"\nREDHAT_BUGZILLA_PRODUCT=\"Fedora\"\nREDHAT_BUGZILLA_PRODUCT_VERSION=22\n"
|
||||
"REDHAT_SUPPORT_PRODUCT=\"Fedora\"\nREDHAT_SUPPORT_PRODUCT_VERSION=22\n"
|
||||
"PRIVACY_POLICY_URL=https://fedoraproject.org/wiki/Legal:PrivacyPolicy\n"),
|
||||
"/etc/os-release": (
|
||||
"NAME=Fedora\nVERSION=\"22 (Twenty Two)\"\nID=fedora\nVERSION_ID=22\nPRETTY_NAME=\"Fedora 22 (Twenty Two)\"\n"
|
||||
"ANSI_COLOR=\"0;34\"\nCPE_NAME=\"cpe:/o:fedoraproject:fedora:22\"\nHOME_URL=\"https://fedoraproject.org/\"\n"
|
||||
"BUG_REPORT_URL=\"https://bugzilla.redhat.com/\"\nREDHAT_BUGZILLA_PRODUCT=\"Fedora\"\nREDHAT_BUGZILLA_PRODUCT_VERSION=22\n"
|
||||
"REDHAT_SUPPORT_PRODUCT=\"Fedora\"\nREDHAT_SUPPORT_PRODUCT_VERSION=22\n"
|
||||
"PRIVACY_POLICY_URL=https://fedoraproject.org/wiki/Legal:PrivacyPolicy\n"
|
||||
),
|
||||
"/etc/system-release": "Fedora release 22 (Twenty Two)\n"
|
||||
},
|
||||
},
|
||||
"result": {
|
||||
"distribution_release": "Twenty Two",
|
||||
"distribution": "Fedora",
|
||||
"distribution_major_version": "22",
|
||||
"os_family": "RedHat",
|
||||
"distribution_version": "22"
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
{
|
||||
"platform.dist": [
|
||||
"fedora",
|
||||
"25",
|
||||
"Rawhide"
|
||||
],
|
||||
],
|
||||
"input": {
|
||||
"/etc/redhat-release": "Fedora release 25 (Rawhide)\n",
|
||||
"/etc/os-release": ("NAME=Fedora\nVERSION=\"25 (Workstation Edition)\"\nID=fedora\nVERSION_ID=25\n"
|
||||
"PRETTY_NAME=\"Fedora 25 (Workstation Edition)\"\nANSI_COLOR=\"0;34\"\nCPE_NAME=\"cpe:/o:fedoraproject:fedora:25\"\n"
|
||||
"HOME_URL=\"https://fedoraproject.org/\"\nBUG_REPORT_URL=\"https://bugzilla.redhat.com/\"\n"
|
||||
"REDHAT_BUGZILLA_PRODUCT=\"Fedora\"\nREDHAT_BUGZILLA_PRODUCT_VERSION=rawhide\nREDHAT_SUPPORT_PRODUCT=\"Fedora\"\n"
|
||||
"REDHAT_SUPPORT_PRODUCT_VERSION=rawhide\nPRIVACY_POLICY_URL=https://fedoraproject.org/wiki/Legal:PrivacyPolicy\n"
|
||||
"VARIANT=\"Workstation Edition\"\nVARIANT_ID=workstation\n"),
|
||||
"/etc/os-release": (
|
||||
"NAME=Fedora\nVERSION=\"25 (Workstation Edition)\"\nID=fedora\nVERSION_ID=25\n"
|
||||
"PRETTY_NAME=\"Fedora 25 (Workstation Edition)\"\nANSI_COLOR=\"0;34\"\nCPE_NAME=\"cpe:/o:fedoraproject:fedora:25\"\n"
|
||||
"HOME_URL=\"https://fedoraproject.org/\"\nBUG_REPORT_URL=\"https://bugzilla.redhat.com/\"\n"
|
||||
"REDHAT_BUGZILLA_PRODUCT=\"Fedora\"\nREDHAT_BUGZILLA_PRODUCT_VERSION=rawhide\nREDHAT_SUPPORT_PRODUCT=\"Fedora\"\n"
|
||||
"REDHAT_SUPPORT_PRODUCT_VERSION=rawhide\nPRIVACY_POLICY_URL=https://fedoraproject.org/wiki/Legal:PrivacyPolicy\n"
|
||||
"VARIANT=\"Workstation Edition\"\nVARIANT_ID=workstation\n"
|
||||
),
|
||||
"/etc/system-release": "Fedora release 25 (Rawhide)\n"
|
||||
},
|
||||
},
|
||||
"name": "Fedora 25",
|
||||
"result": {
|
||||
"distribution_release": "Rawhide",
|
||||
|
@ -767,9 +794,8 @@ DISTRIB_DESCRIPTION="CoreOS 976.0.0 (Coeur Rouge)"
|
|||
"distribution_major_version": "25",
|
||||
"os_family": "RedHat",
|
||||
"distribution_version": "25"
|
||||
}
|
||||
},
|
||||
|
||||
}
|
||||
},
|
||||
{
|
||||
"platform.dist": [
|
||||
"",
|
||||
|
@ -787,9 +813,8 @@ DISTRIB_DESCRIPTION="CoreOS 976.0.0 (Coeur Rouge)"
|
|||
"distribution_major_version": "NA",
|
||||
"os_family": "SMGL",
|
||||
"distribution_version": "NA"
|
||||
}
|
||||
},
|
||||
|
||||
}
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
|
@ -815,6 +840,7 @@ def test_distribution_version(testcase):
|
|||
|
||||
_test_one_distribution(facts, module, testcase)
|
||||
|
||||
|
||||
def _test_one_distribution(facts, module, testcase):
|
||||
"""run the test on one distribution testcase
|
||||
|
||||
|
@ -828,7 +854,7 @@ def _test_one_distribution(facts, module, testcase):
|
|||
data = default
|
||||
if fname in testcase['input']:
|
||||
# for debugging
|
||||
print('faked '+fname+' for '+testcase['name'])
|
||||
print('faked %s for %s' % (fname, testcase['name']))
|
||||
data = testcase['input'][fname].strip()
|
||||
if strip and data is not None:
|
||||
data = data.strip()
|
||||
|
|
|
@ -189,7 +189,7 @@ LSBLK_OUTPUT = b"""
|
|||
/dev/mapper/docker-253:1-1050967-pool
|
||||
"""
|
||||
|
||||
LSBLK_OUTPUT_2 = b"""
|
||||
LSBLK_OUTPUT_2 = b"""
|
||||
/dev/sda
|
||||
/dev/sda1 32caaec3-ef40-4691-a3b6-438c3f9bc1c0
|
||||
/dev/sda2 66Ojcd-ULtu-1cZa-Tywo-mx0d-RF4O-ysA9jK
|
||||
|
@ -244,166 +244,213 @@ grimlock.g.a:path_with'single_quotes /home/adrian/sshfs-grimlock-single-quote-2
|
|||
grimlock.g.a:/mnt/data/foto's /home/adrian/fotos fuse.sshfs rw,nosuid,nodev,relatime,user_id=1000,group_id=1000 0 0
|
||||
"""
|
||||
|
||||
MTAB_ENTRIES = \
|
||||
MTAB_ENTRIES = [
|
||||
[
|
||||
['sysfs',
|
||||
'/sys',
|
||||
'sysfs',
|
||||
'rw,seclabel,nosuid,nodev,noexec,relatime',
|
||||
'0',
|
||||
'0'],
|
||||
['proc', '/proc', 'proc', 'rw,nosuid,nodev,noexec,relatime', '0', '0'],
|
||||
['devtmpfs',
|
||||
'/dev',
|
||||
'devtmpfs',
|
||||
'rw,seclabel,nosuid,size=8044400k,nr_inodes=2011100,mode=755',
|
||||
'0',
|
||||
'0'],
|
||||
['securityfs',
|
||||
'/sys/kernel/security',
|
||||
'securityfs',
|
||||
'rw,nosuid,nodev,noexec,relatime',
|
||||
'0',
|
||||
'0'],
|
||||
['tmpfs', '/dev/shm', 'tmpfs', 'rw,seclabel,nosuid,nodev', '0', '0'],
|
||||
['devpts',
|
||||
'/dev/pts',
|
||||
'devpts',
|
||||
'rw,seclabel,nosuid,noexec,relatime,gid=5,mode=620,ptmxmode=000',
|
||||
'0',
|
||||
'0'],
|
||||
['tmpfs', '/run', 'tmpfs', 'rw,seclabel,nosuid,nodev,mode=755', '0', '0'],
|
||||
['tmpfs',
|
||||
'/sys/fs/cgroup',
|
||||
'tmpfs',
|
||||
'ro,seclabel,nosuid,nodev,noexec,mode=755',
|
||||
'0',
|
||||
'0'],
|
||||
['cgroup',
|
||||
'/sys/fs/cgroup/systemd',
|
||||
'cgroup',
|
||||
'rw,nosuid,nodev,noexec,relatime,xattr,release_agent=/usr/lib/systemd/systemd-cgroups-agent,name=systemd',
|
||||
'0',
|
||||
'0'],
|
||||
['pstore',
|
||||
'/sys/fs/pstore',
|
||||
'pstore',
|
||||
'rw,seclabel,nosuid,nodev,noexec,relatime',
|
||||
'0',
|
||||
'0'],
|
||||
['cgroup',
|
||||
'/sys/fs/cgroup/devices',
|
||||
'cgroup',
|
||||
'rw,nosuid,nodev,noexec,relatime,devices',
|
||||
'0',
|
||||
'0'],
|
||||
['cgroup',
|
||||
'sysfs',
|
||||
'/sys',
|
||||
'sysfs',
|
||||
'rw,seclabel,nosuid,nodev,noexec,relatime',
|
||||
'0',
|
||||
'0'
|
||||
],
|
||||
['proc', '/proc', 'proc', 'rw,nosuid,nodev,noexec,relatime', '0', '0'],
|
||||
[
|
||||
'devtmpfs',
|
||||
'/dev',
|
||||
'devtmpfs',
|
||||
'rw,seclabel,nosuid,size=8044400k,nr_inodes=2011100,mode=755',
|
||||
'0',
|
||||
'0'
|
||||
],
|
||||
[
|
||||
'securityfs',
|
||||
'/sys/kernel/security',
|
||||
'securityfs',
|
||||
'rw,nosuid,nodev,noexec,relatime',
|
||||
'0',
|
||||
'0'
|
||||
],
|
||||
['tmpfs', '/dev/shm', 'tmpfs', 'rw,seclabel,nosuid,nodev', '0', '0'],
|
||||
[
|
||||
'devpts',
|
||||
'/dev/pts',
|
||||
'devpts',
|
||||
'rw,seclabel,nosuid,noexec,relatime,gid=5,mode=620,ptmxmode=000',
|
||||
'0',
|
||||
'0'
|
||||
],
|
||||
['tmpfs', '/run', 'tmpfs', 'rw,seclabel,nosuid,nodev,mode=755', '0', '0'],
|
||||
[
|
||||
'tmpfs',
|
||||
'/sys/fs/cgroup',
|
||||
'tmpfs',
|
||||
'ro,seclabel,nosuid,nodev,noexec,mode=755',
|
||||
'0',
|
||||
'0'
|
||||
],
|
||||
[
|
||||
'cgroup',
|
||||
'/sys/fs/cgroup/systemd',
|
||||
'cgroup',
|
||||
'rw,nosuid,nodev,noexec,relatime,xattr,release_agent=/usr/lib/systemd/systemd-cgroups-agent,name=systemd',
|
||||
'0',
|
||||
'0'
|
||||
],
|
||||
[
|
||||
'pstore',
|
||||
'/sys/fs/pstore',
|
||||
'pstore',
|
||||
'rw,seclabel,nosuid,nodev,noexec,relatime',
|
||||
'0',
|
||||
'0'
|
||||
],
|
||||
[
|
||||
'cgroup',
|
||||
'/sys/fs/cgroup/devices',
|
||||
'cgroup',
|
||||
'rw,nosuid,nodev,noexec,relatime,devices',
|
||||
'0',
|
||||
'0'
|
||||
],
|
||||
[
|
||||
'cgroup',
|
||||
'/sys/fs/cgroup/freezer',
|
||||
'cgroup',
|
||||
'rw,nosuid,nodev,noexec,relatime,freezer',
|
||||
'0',
|
||||
'0'],
|
||||
['cgroup',
|
||||
'0'
|
||||
],
|
||||
[
|
||||
'cgroup',
|
||||
'/sys/fs/cgroup/memory',
|
||||
'cgroup',
|
||||
'rw,nosuid,nodev,noexec,relatime,memory',
|
||||
'0',
|
||||
'0'],
|
||||
['cgroup',
|
||||
'0'
|
||||
],
|
||||
[
|
||||
'cgroup',
|
||||
'/sys/fs/cgroup/pids',
|
||||
'cgroup',
|
||||
'rw,nosuid,nodev,noexec,relatime,pids',
|
||||
'0',
|
||||
'0'],
|
||||
['cgroup',
|
||||
'0'
|
||||
],
|
||||
[
|
||||
'cgroup',
|
||||
'/sys/fs/cgroup/blkio',
|
||||
'cgroup',
|
||||
'rw,nosuid,nodev,noexec,relatime,blkio',
|
||||
'0',
|
||||
'0'],
|
||||
['cgroup',
|
||||
'0'
|
||||
],
|
||||
[
|
||||
'cgroup',
|
||||
'/sys/fs/cgroup/cpuset',
|
||||
'cgroup',
|
||||
'rw,nosuid,nodev,noexec,relatime,cpuset',
|
||||
'0',
|
||||
'0'],
|
||||
['cgroup',
|
||||
'0'
|
||||
],
|
||||
[
|
||||
'cgroup',
|
||||
'/sys/fs/cgroup/cpu,cpuacct',
|
||||
'cgroup',
|
||||
'rw,nosuid,nodev,noexec,relatime,cpu,cpuacct',
|
||||
'0',
|
||||
'0'],
|
||||
['cgroup',
|
||||
'0'
|
||||
],
|
||||
[
|
||||
'cgroup',
|
||||
'/sys/fs/cgroup/hugetlb',
|
||||
'cgroup',
|
||||
'rw,nosuid,nodev,noexec,relatime,hugetlb',
|
||||
'0',
|
||||
'0'],
|
||||
['cgroup',
|
||||
'0'
|
||||
],
|
||||
[
|
||||
'cgroup',
|
||||
'/sys/fs/cgroup/perf_event',
|
||||
'cgroup',
|
||||
'rw,nosuid,nodev,noexec,relatime,perf_event',
|
||||
'0',
|
||||
'0'],
|
||||
['cgroup',
|
||||
'0'
|
||||
],
|
||||
[
|
||||
'cgroup',
|
||||
'/sys/fs/cgroup/net_cls,net_prio',
|
||||
'cgroup',
|
||||
'rw,nosuid,nodev,noexec,relatime,net_cls,net_prio',
|
||||
'0',
|
||||
'0'],
|
||||
['configfs', '/sys/kernel/config', 'configfs', 'rw,relatime', '0', '0'],
|
||||
['/dev/mapper/fedora_dhcp129--186-root',
|
||||
'0'
|
||||
],
|
||||
['configfs', '/sys/kernel/config', 'configfs', 'rw,relatime', '0', '0'],
|
||||
[
|
||||
'/dev/mapper/fedora_dhcp129--186-root',
|
||||
'/',
|
||||
'ext4',
|
||||
'rw,seclabel,relatime,data=ordered',
|
||||
'0',
|
||||
'0'],
|
||||
['selinuxfs', '/sys/fs/selinux', 'selinuxfs', 'rw,relatime', '0', '0'],
|
||||
['systemd-1',
|
||||
'0'
|
||||
],
|
||||
['selinuxfs', '/sys/fs/selinux', 'selinuxfs', 'rw,relatime', '0', '0'],
|
||||
[
|
||||
'systemd-1',
|
||||
'/proc/sys/fs/binfmt_misc',
|
||||
'autofs',
|
||||
'rw,relatime,fd=24,pgrp=1,timeout=0,minproto=5,maxproto=5,direct',
|
||||
'0',
|
||||
'0'],
|
||||
['debugfs', '/sys/kernel/debug', 'debugfs', 'rw,seclabel,relatime', '0', '0'],
|
||||
['hugetlbfs',
|
||||
'0'
|
||||
],
|
||||
['debugfs', '/sys/kernel/debug', 'debugfs', 'rw,seclabel,relatime', '0', '0'],
|
||||
[
|
||||
'hugetlbfs',
|
||||
'/dev/hugepages',
|
||||
'hugetlbfs',
|
||||
'rw,seclabel,relatime',
|
||||
'0',
|
||||
'0'],
|
||||
['tmpfs', '/tmp', 'tmpfs', 'rw,seclabel', '0', '0'],
|
||||
['mqueue', '/dev/mqueue', 'mqueue', 'rw,seclabel,relatime', '0', '0'],
|
||||
['/dev/loop0',
|
||||
'0'
|
||||
],
|
||||
['tmpfs', '/tmp', 'tmpfs', 'rw,seclabel', '0', '0'],
|
||||
['mqueue', '/dev/mqueue', 'mqueue', 'rw,seclabel,relatime', '0', '0'],
|
||||
[
|
||||
'/dev/loop0',
|
||||
'/var/lib/machines',
|
||||
'btrfs',
|
||||
'rw,seclabel,relatime,space_cache,subvolid=5,subvol=/',
|
||||
'0',
|
||||
'0'],
|
||||
['/dev/sda1', '/boot', 'ext4', 'rw,seclabel,relatime,data=ordered', '0', '0'],
|
||||
# A 'none' fstype
|
||||
['/dev/sdz3', '/not/a/real/device', 'none', 'rw,seclabel,relatime,data=ordered', '0', '0'],
|
||||
# lets assume this is a bindmount
|
||||
['/dev/sdz4', '/not/a/real/bind_mount', 'ext4', 'rw,seclabel,relatime,data=ordered', '0', '0'],
|
||||
['/dev/mapper/fedora_dhcp129--186-home',
|
||||
'0'
|
||||
],
|
||||
['/dev/sda1', '/boot', 'ext4', 'rw,seclabel,relatime,data=ordered', '0', '0'],
|
||||
# A 'none' fstype
|
||||
['/dev/sdz3', '/not/a/real/device', 'none', 'rw,seclabel,relatime,data=ordered', '0', '0'],
|
||||
# lets assume this is a bindmount
|
||||
['/dev/sdz4', '/not/a/real/bind_mount', 'ext4', 'rw,seclabel,relatime,data=ordered', '0', '0'],
|
||||
[
|
||||
'/dev/mapper/fedora_dhcp129--186-home',
|
||||
'/home',
|
||||
'ext4',
|
||||
'rw,seclabel,relatime,data=ordered',
|
||||
'0',
|
||||
'0'],
|
||||
['tmpfs',
|
||||
'0'
|
||||
],
|
||||
[
|
||||
'tmpfs',
|
||||
'/run/user/1000',
|
||||
'tmpfs',
|
||||
'rw,seclabel,nosuid,nodev,relatime,size=1611044k,mode=700,uid=1000,gid=1000',
|
||||
'0',
|
||||
'0'],
|
||||
['gvfsd-fuse',
|
||||
'0'
|
||||
],
|
||||
[
|
||||
'gvfsd-fuse',
|
||||
'/run/user/1000/gvfs',
|
||||
'fuse.gvfsd-fuse',
|
||||
'rw,nosuid,nodev,relatime,user_id=1000,group_id=1000',
|
||||
'0',
|
||||
'0'],
|
||||
['fusectl', '/sys/fs/fuse/connections', 'fusectl', 'rw,relatime', '0', '0']]
|
||||
'0'
|
||||
],
|
||||
['fusectl', '/sys/fs/fuse/connections', 'fusectl', 'rw,relatime', '0', '0']]
|
||||
|
||||
BIND_MOUNTS = ['/not/a/real/bind_mount']
|
||||
|
||||
|
@ -481,7 +528,7 @@ class TestFactsLinuxHardwareGetMountFacts(unittest.TestCase):
|
|||
self.assertIsInstance(bind_mounts, set)
|
||||
self.assertEqual(len(bind_mounts), 0)
|
||||
|
||||
@patch('ansible.module_utils.facts.LinuxHardware._run_lsblk', return_value=(0, LSBLK_OUTPUT,''))
|
||||
@patch('ansible.module_utils.facts.LinuxHardware._run_lsblk', return_value=(0, LSBLK_OUTPUT, ''))
|
||||
def test_lsblk_uuid(self, mock_run_lsblk):
|
||||
module = Mock()
|
||||
lh = facts.LinuxHardware(module=module, load_on_init=False)
|
||||
|
@ -492,7 +539,7 @@ class TestFactsLinuxHardwareGetMountFacts(unittest.TestCase):
|
|||
self.assertIn(b'/dev/sda1', lsblk_uuids)
|
||||
self.assertEquals(lsblk_uuids[b'/dev/sda1'], b'32caaec3-ef40-4691-a3b6-438c3f9bc1c0')
|
||||
|
||||
@patch('ansible.module_utils.facts.LinuxHardware._run_lsblk', return_value=(37, LSBLK_OUTPUT,''))
|
||||
@patch('ansible.module_utils.facts.LinuxHardware._run_lsblk', return_value=(37, LSBLK_OUTPUT, ''))
|
||||
def test_lsblk_uuid_non_zero(self, mock_run_lsblk):
|
||||
module = Mock()
|
||||
lh = facts.LinuxHardware(module=module, load_on_init=False)
|
||||
|
@ -510,7 +557,7 @@ class TestFactsLinuxHardwareGetMountFacts(unittest.TestCase):
|
|||
self.assertIsInstance(lsblk_uuids, dict)
|
||||
self.assertEquals(len(lsblk_uuids), 0)
|
||||
|
||||
@patch('ansible.module_utils.facts.LinuxHardware._run_lsblk', return_value=(0, LSBLK_OUTPUT_2,''))
|
||||
@patch('ansible.module_utils.facts.LinuxHardware._run_lsblk', return_value=(0, LSBLK_OUTPUT_2, ''))
|
||||
def test_lsblk_uuid_dev_with_space_in_name(self, mock_run_lsblk):
|
||||
module = Mock()
|
||||
lh = facts.LinuxHardware(module=module, load_on_init=False)
|
||||
|
|
|
@ -28,50 +28,73 @@ from units.mock.procenv import swap_stdin_and_argv
|
|||
|
||||
class TestAnsibleModuleKnownHosts(unittest.TestCase):
|
||||
urls = {
|
||||
'ssh://one.example.org/example.git':
|
||||
{'is_ssh_url': True, 'get_fqdn': 'one.example.org',
|
||||
'add_host_key_cmd': " -t rsa one.example.org",
|
||||
'port': None},
|
||||
'ssh+git://two.example.org/example.git':
|
||||
{'is_ssh_url': True, 'get_fqdn': 'two.example.org',
|
||||
'add_host_key_cmd': " -t rsa two.example.org",
|
||||
'port': None},
|
||||
'rsync://three.example.org/user/example.git':
|
||||
{'is_ssh_url': False, 'get_fqdn': 'three.example.org',
|
||||
'add_host_key_cmd': None, # not called for non-ssh urls
|
||||
'port': None},
|
||||
'git@four.example.org:user/example.git':
|
||||
{'is_ssh_url': True, 'get_fqdn': 'four.example.org',
|
||||
'add_host_key_cmd': " -t rsa four.example.org",
|
||||
'port': None},
|
||||
'git+ssh://five.example.org/example.git':
|
||||
{'is_ssh_url': True, 'get_fqdn': 'five.example.org',
|
||||
'add_host_key_cmd': " -t rsa five.example.org",
|
||||
'port': None},
|
||||
'ssh://six.example.org:21/example.org': # ssh on FTP Port?
|
||||
{'is_ssh_url': True, 'get_fqdn': 'six.example.org',
|
||||
'add_host_key_cmd': " -t rsa -p 21 six.example.org",
|
||||
'port': '21'},
|
||||
'ssh://[2001:DB8::abcd:abcd]/example.git':
|
||||
{'is_ssh_url': True, 'get_fqdn': '[2001:DB8::abcd:abcd]',
|
||||
'add_host_key_cmd': " -t rsa [2001:DB8::abcd:abcd]",
|
||||
'port': None},
|
||||
'ssh://[2001:DB8::abcd:abcd]:22/example.git':
|
||||
{'is_ssh_url': True, 'get_fqdn': '[2001:DB8::abcd:abcd]',
|
||||
'add_host_key_cmd': " -t rsa -p 22 [2001:DB8::abcd:abcd]",
|
||||
'port': '22'},
|
||||
'username@[2001:DB8::abcd:abcd]/example.git':
|
||||
{'is_ssh_url': True, 'get_fqdn': '[2001:DB8::abcd:abcd]',
|
||||
'add_host_key_cmd': " -t rsa [2001:DB8::abcd:abcd]",
|
||||
'port': None},
|
||||
'username@[2001:DB8::abcd:abcd]:path/example.git':
|
||||
{'is_ssh_url': True, 'get_fqdn': '[2001:DB8::abcd:abcd]',
|
||||
'add_host_key_cmd': " -t rsa [2001:DB8::abcd:abcd]",
|
||||
'port': None},
|
||||
'ssh://internal.git.server:7999/repos/repo.git':
|
||||
{'is_ssh_url': True, 'get_fqdn': 'internal.git.server',
|
||||
'add_host_key_cmd': " -t rsa -p 7999 internal.git.server",
|
||||
'port': '7999'}
|
||||
'ssh://one.example.org/example.git': {
|
||||
'is_ssh_url': True,
|
||||
'get_fqdn': 'one.example.org',
|
||||
'add_host_key_cmd': " -t rsa one.example.org",
|
||||
'port': None,
|
||||
},
|
||||
'ssh+git://two.example.org/example.git': {
|
||||
'is_ssh_url': True,
|
||||
'get_fqdn': 'two.example.org',
|
||||
'add_host_key_cmd': " -t rsa two.example.org",
|
||||
'port': None,
|
||||
},
|
||||
'rsync://three.example.org/user/example.git': {
|
||||
'is_ssh_url': False,
|
||||
'get_fqdn': 'three.example.org',
|
||||
'add_host_key_cmd': None, # not called for non-ssh urls
|
||||
'port': None,
|
||||
},
|
||||
'git@four.example.org:user/example.git': {
|
||||
'is_ssh_url': True,
|
||||
'get_fqdn': 'four.example.org',
|
||||
'add_host_key_cmd': " -t rsa four.example.org",
|
||||
'port': None,
|
||||
},
|
||||
'git+ssh://five.example.org/example.git': {
|
||||
'is_ssh_url': True,
|
||||
'get_fqdn': 'five.example.org',
|
||||
'add_host_key_cmd': " -t rsa five.example.org",
|
||||
'port': None,
|
||||
},
|
||||
'ssh://six.example.org:21/example.org': {
|
||||
# ssh on FTP Port?
|
||||
'is_ssh_url': True,
|
||||
'get_fqdn': 'six.example.org',
|
||||
'add_host_key_cmd': " -t rsa -p 21 six.example.org",
|
||||
'port': '21',
|
||||
},
|
||||
'ssh://[2001:DB8::abcd:abcd]/example.git': {
|
||||
'is_ssh_url': True,
|
||||
'get_fqdn': '[2001:DB8::abcd:abcd]',
|
||||
'add_host_key_cmd': " -t rsa [2001:DB8::abcd:abcd]",
|
||||
'port': None,
|
||||
},
|
||||
'ssh://[2001:DB8::abcd:abcd]:22/example.git': {
|
||||
'is_ssh_url': True,
|
||||
'get_fqdn': '[2001:DB8::abcd:abcd]',
|
||||
'add_host_key_cmd': " -t rsa -p 22 [2001:DB8::abcd:abcd]",
|
||||
'port': '22',
|
||||
},
|
||||
'username@[2001:DB8::abcd:abcd]/example.git': {
|
||||
'is_ssh_url': True,
|
||||
'get_fqdn': '[2001:DB8::abcd:abcd]',
|
||||
'add_host_key_cmd': " -t rsa [2001:DB8::abcd:abcd]",
|
||||
'port': None,
|
||||
},
|
||||
'username@[2001:DB8::abcd:abcd]:path/example.git': {
|
||||
'is_ssh_url': True,
|
||||
'get_fqdn': '[2001:DB8::abcd:abcd]',
|
||||
'add_host_key_cmd': " -t rsa [2001:DB8::abcd:abcd]",
|
||||
'port': None,
|
||||
},
|
||||
'ssh://internal.git.server:7999/repos/repo.git': {
|
||||
'is_ssh_url': True,
|
||||
'get_fqdn': 'internal.git.server',
|
||||
'add_host_key_cmd': " -t rsa -p 7999 internal.git.server",
|
||||
'port': '7999',
|
||||
},
|
||||
}
|
||||
|
||||
def test_is_ssh_url(self):
|
||||
|
|
|
@ -14,6 +14,7 @@ import pprint
|
|||
|
||||
realimport = builtins.__import__
|
||||
|
||||
|
||||
class TestPostgres(unittest.TestCase):
|
||||
def clear_modules(self, mods):
|
||||
for mod in mods:
|
||||
|
@ -72,4 +73,3 @@ class TestPostgres(unittest.TestCase):
|
|||
with self.assertRaises(mod.module_utils.postgres.LibraryError) as context:
|
||||
mod.module_utils.postgres.ensure_libs(sslrootcert='yes')
|
||||
self.assertIn('psycopg2 must be at least 2.4.3 in order to use', to_native(context.exception))
|
||||
|
||||
|
|
|
@ -37,16 +37,19 @@ VALID_STRINGS = (
|
|||
# u'くらとみ'
|
||||
(b'\xe3\x81\x8f\xe3\x82\x89\xe3\x81\xa8\xe3\x81\xbf', u'\u304f\u3089\u3068\u307f', 'utf-8'),
|
||||
(b'\x82\xad\x82\xe7\x82\xc6\x82\xdd', u'\u304f\u3089\u3068\u307f', 'shift-jis'),
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def _check_to_text(self, in_string, encoding, expected):
|
||||
"""test happy path of decoding to text"""
|
||||
self.assertEqual(to_text(in_string, encoding), expected)
|
||||
|
||||
|
||||
def _check_to_bytes(self, in_string, encoding, expected):
|
||||
"""test happy path of encoding to bytes"""
|
||||
self.assertEqual(to_bytes(in_string, encoding), expected)
|
||||
|
||||
|
||||
def _check_to_native(self, in_string, encoding, py2_expected, py3_expected):
|
||||
"""test happy path of encoding to native strings"""
|
||||
if PY3:
|
||||
|
|
|
@ -1,9 +1,6 @@
|
|||
import pytest
|
||||
import unittest
|
||||
|
||||
boto3 = pytest.importorskip("boto3")
|
||||
botocore = pytest.importorskip("botocore")
|
||||
|
||||
from collections import namedtuple
|
||||
from ansible.parsing.dataloader import DataLoader
|
||||
from ansible.vars.manager import VariableManager
|
||||
|
@ -13,6 +10,10 @@ from ansible.executor.task_queue_manager import TaskQueueManager
|
|||
|
||||
import ansible.modules.cloud.amazon.ec2_vpc_nat_gateway as ng
|
||||
|
||||
|
||||
boto3 = pytest.importorskip("boto3")
|
||||
botocore = pytest.importorskip("botocore")
|
||||
|
||||
Options = (
|
||||
namedtuple(
|
||||
'Options', [
|
||||
|
@ -44,6 +45,7 @@ aws_region = 'us-west-2'
|
|||
inventory = InventoryManager(loader=loader)
|
||||
variable_manager.set_inventory(inventory)
|
||||
|
||||
|
||||
def run(play):
|
||||
tqm = None
|
||||
results = None
|
||||
|
@ -62,14 +64,15 @@ def run(play):
|
|||
tqm.cleanup()
|
||||
return tqm, results
|
||||
|
||||
|
||||
class AnsibleVpcNatGatewayTasks(unittest.TestCase):
|
||||
|
||||
def test_create_gateway_using_allocation_id(self):
|
||||
play_source = dict(
|
||||
name = "Create new nat gateway with eip allocation-id",
|
||||
hosts = 'localhost',
|
||||
gather_facts = 'no',
|
||||
tasks = [
|
||||
play_source = dict(
|
||||
name="Create new nat gateway with eip allocation-id",
|
||||
hosts='localhost',
|
||||
gather_facts='no',
|
||||
tasks=[
|
||||
dict(
|
||||
action=dict(
|
||||
module='ec2_vpc_nat_gateway',
|
||||
|
@ -98,11 +101,11 @@ class AnsibleVpcNatGatewayTasks(unittest.TestCase):
|
|||
self.failUnless(tqm._stats.changed['localhost'] == 1)
|
||||
|
||||
def test_create_gateway_using_allocation_id_idempotent(self):
|
||||
play_source = dict(
|
||||
name = "Create new nat gateway with eip allocation-id",
|
||||
hosts = 'localhost',
|
||||
gather_facts = 'no',
|
||||
tasks = [
|
||||
play_source = dict(
|
||||
name="Create new nat gateway with eip allocation-id",
|
||||
hosts='localhost',
|
||||
gather_facts='no',
|
||||
tasks=[
|
||||
dict(
|
||||
action=dict(
|
||||
module='ec2_vpc_nat_gateway',
|
||||
|
@ -131,11 +134,11 @@ class AnsibleVpcNatGatewayTasks(unittest.TestCase):
|
|||
self.assertFalse('localhost' in tqm._stats.changed)
|
||||
|
||||
def test_create_gateway_using_eip_address(self):
|
||||
play_source = dict(
|
||||
name = "Create new nat gateway with eip address",
|
||||
hosts = 'localhost',
|
||||
gather_facts = 'no',
|
||||
tasks = [
|
||||
play_source = dict(
|
||||
name="Create new nat gateway with eip address",
|
||||
hosts='localhost',
|
||||
gather_facts='no',
|
||||
tasks=[
|
||||
dict(
|
||||
action=dict(
|
||||
module='ec2_vpc_nat_gateway',
|
||||
|
@ -164,11 +167,11 @@ class AnsibleVpcNatGatewayTasks(unittest.TestCase):
|
|||
self.failUnless(tqm._stats.changed['localhost'] == 1)
|
||||
|
||||
def test_create_gateway_using_eip_address_idempotent(self):
|
||||
play_source = dict(
|
||||
name = "Create new nat gateway with eip address",
|
||||
hosts = 'localhost',
|
||||
gather_facts = 'no',
|
||||
tasks = [
|
||||
play_source = dict(
|
||||
name="Create new nat gateway with eip address",
|
||||
hosts='localhost',
|
||||
gather_facts='no',
|
||||
tasks=[
|
||||
dict(
|
||||
action=dict(
|
||||
module='ec2_vpc_nat_gateway',
|
||||
|
@ -197,11 +200,11 @@ class AnsibleVpcNatGatewayTasks(unittest.TestCase):
|
|||
self.assertFalse('localhost' in tqm._stats.changed)
|
||||
|
||||
def test_create_gateway_in_subnet_only_if_one_does_not_exist_already(self):
|
||||
play_source = dict(
|
||||
name = "Create new nat gateway only if one does not exist already",
|
||||
hosts = 'localhost',
|
||||
gather_facts = 'no',
|
||||
tasks = [
|
||||
play_source = dict(
|
||||
name="Create new nat gateway only if one does not exist already",
|
||||
hosts='localhost',
|
||||
gather_facts='no',
|
||||
tasks=[
|
||||
dict(
|
||||
action=dict(
|
||||
module='ec2_vpc_nat_gateway',
|
||||
|
@ -230,11 +233,11 @@ class AnsibleVpcNatGatewayTasks(unittest.TestCase):
|
|||
self.assertFalse('localhost' in tqm._stats.changed)
|
||||
|
||||
def test_delete_gateway(self):
|
||||
play_source = dict(
|
||||
name = "Delete Nat Gateway",
|
||||
hosts = 'localhost',
|
||||
gather_facts = 'no',
|
||||
tasks = [
|
||||
play_source = dict(
|
||||
name="Delete Nat Gateway",
|
||||
hosts='localhost',
|
||||
gather_facts='no',
|
||||
tasks=[
|
||||
dict(
|
||||
action=dict(
|
||||
module='ec2_vpc_nat_gateway',
|
||||
|
@ -262,6 +265,7 @@ class AnsibleVpcNatGatewayTasks(unittest.TestCase):
|
|||
self.failUnless(tqm._stats.ok['localhost'] == 2)
|
||||
self.assertTrue('localhost' in tqm._stats.changed)
|
||||
|
||||
|
||||
class AnsibleEc2VpcNatGatewayFunctions(unittest.TestCase):
|
||||
|
||||
def test_get_nat_gateways(self):
|
||||
|
@ -326,7 +330,7 @@ class AnsibleEc2VpcNatGatewayFunctions(unittest.TestCase):
|
|||
client = boto3.client('ec2', region_name=aws_region)
|
||||
gws, err_msg = (
|
||||
ng.gateway_in_subnet_exists(
|
||||
client, 'subnet-123456789', check_mode=True
|
||||
client, 'subnet-123456789', check_mode=True
|
||||
)
|
||||
)
|
||||
should_return = ng.DRY_RUN_GATEWAYS
|
||||
|
@ -336,7 +340,7 @@ class AnsibleEc2VpcNatGatewayFunctions(unittest.TestCase):
|
|||
client = boto3.client('ec2', region_name=aws_region)
|
||||
allocation_id, _ = (
|
||||
ng.get_eip_allocation_id_by_address(
|
||||
client, '55.55.55.55', check_mode=True
|
||||
client, '55.55.55.55', check_mode=True
|
||||
)
|
||||
)
|
||||
should_return = 'eipalloc-1234567'
|
||||
|
@ -346,7 +350,7 @@ class AnsibleEc2VpcNatGatewayFunctions(unittest.TestCase):
|
|||
client = boto3.client('ec2', region_name=aws_region)
|
||||
allocation_id, err_msg = (
|
||||
ng.get_eip_allocation_id_by_address(
|
||||
client, '52.52.52.52', check_mode=True
|
||||
client, '52.52.52.52', check_mode=True
|
||||
)
|
||||
)
|
||||
self.assertEqual(err_msg, 'EIP 52.52.52.52 does not exist')
|
||||
|
|
|
@ -19,42 +19,45 @@
|
|||
# Make coding more python3-ish
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
|
||||
import pytest
|
||||
boto3 = pytest.importorskip("boto3")
|
||||
|
||||
import json
|
||||
import copy
|
||||
from ansible.module_utils._text import to_bytes
|
||||
from ansible.module_utils import basic
|
||||
import json
|
||||
import pytest
|
||||
|
||||
from ansible.compat.tests.mock import MagicMock, Mock, patch
|
||||
from ansible.module_utils import basic
|
||||
from ansible.module_utils._text import to_bytes
|
||||
|
||||
|
||||
boto3 = pytest.importorskip("boto3")
|
||||
|
||||
# lambda is a keyword so we have to hack this.
|
||||
_temp = __import__("ansible.modules.cloud.amazon.lambda")
|
||||
lda = getattr(_temp.modules.cloud.amazon,"lambda")
|
||||
lda = getattr(_temp.modules.cloud.amazon, "lambda")
|
||||
|
||||
|
||||
def set_module_args(args):
|
||||
args = json.dumps({'ANSIBLE_MODULE_ARGS': args})
|
||||
basic._ANSIBLE_ARGS = to_bytes(args)
|
||||
|
||||
base_lambda_config={
|
||||
'FunctionName' : 'lambda_name',
|
||||
'Role' : 'arn:aws:iam::987654321012:role/lambda_basic_execution',
|
||||
'Handler' : 'lambda_python.my_handler',
|
||||
'Description' : 'this that the other',
|
||||
'Timeout' : 3,
|
||||
'MemorySize' : 128,
|
||||
'Runtime' : 'python2.7',
|
||||
'CodeSha256' : 'AqMZ+xptM7aC9VXu+5jyp1sqO+Nj4WFMNzQxtPMP2n8=',
|
||||
base_lambda_config = {
|
||||
'FunctionName': 'lambda_name',
|
||||
'Role': 'arn:aws:iam::987654321012:role/lambda_basic_execution',
|
||||
'Handler': 'lambda_python.my_handler',
|
||||
'Description': 'this that the other',
|
||||
'Timeout': 3,
|
||||
'MemorySize': 128,
|
||||
'Runtime': 'python2.7',
|
||||
'CodeSha256': 'AqMZ+xptM7aC9VXu+5jyp1sqO+Nj4WFMNzQxtPMP2n8=',
|
||||
}
|
||||
|
||||
one_change_lambda_config=copy.copy(base_lambda_config)
|
||||
one_change_lambda_config['Timeout']=4
|
||||
two_change_lambda_config=copy.copy(one_change_lambda_config)
|
||||
two_change_lambda_config['Role']='arn:aws:iam::987654321012:role/lambda_advanced_execution'
|
||||
code_change_lambda_config=copy.copy(base_lambda_config)
|
||||
code_change_lambda_config['CodeSha256']='P+Zy8U4T4RiiHWElhL10VBKj9jw4rSJ5bm/TiW+4Rts='
|
||||
one_change_lambda_config = copy.copy(base_lambda_config)
|
||||
one_change_lambda_config['Timeout'] = 4
|
||||
two_change_lambda_config = copy.copy(one_change_lambda_config)
|
||||
two_change_lambda_config['Role'] = 'arn:aws:iam::987654321012:role/lambda_advanced_execution'
|
||||
code_change_lambda_config = copy.copy(base_lambda_config)
|
||||
code_change_lambda_config['CodeSha256'] = 'P+Zy8U4T4RiiHWElhL10VBKj9jw4rSJ5bm/TiW+4Rts='
|
||||
|
||||
base_module_args={
|
||||
base_module_args = {
|
||||
"region": "us-west-1",
|
||||
"name": "lambda_name",
|
||||
"state": "present",
|
||||
|
@ -62,10 +65,10 @@ base_module_args={
|
|||
"runtime": 'python2.7',
|
||||
"role": 'arn:aws:iam::987654321012:role/lambda_basic_execution',
|
||||
"memory_size": 128,
|
||||
"timeout" : 3,
|
||||
"timeout": 3,
|
||||
"handler": 'lambda_python.my_handler'
|
||||
}
|
||||
module_args_with_environment=dict(base_module_args, environment_variables={
|
||||
module_args_with_environment = dict(base_module_args, environment_variables={
|
||||
"variable_name": "variable_value"
|
||||
})
|
||||
|
||||
|
@ -78,26 +81,27 @@ def make_mock_no_connection_connection(config):
|
|||
)
|
||||
lambda_client_double.update_function_configuration.configure_mock(
|
||||
return_value={
|
||||
'Version' : 1
|
||||
'Version': 1
|
||||
}
|
||||
)
|
||||
fake_boto3_conn=Mock(return_value=lambda_client_double)
|
||||
fake_boto3_conn = Mock(return_value=lambda_client_double)
|
||||
return (fake_boto3_conn, lambda_client_double)
|
||||
|
||||
|
||||
def make_mock_connection(config):
|
||||
"""return a mock of ansible's boto3_conn ready to return a mock AWS API client"""
|
||||
lambda_client_double = MagicMock()
|
||||
lambda_client_double.get_function.configure_mock(
|
||||
return_value={
|
||||
'Configuration' : config
|
||||
'Configuration': config
|
||||
}
|
||||
)
|
||||
lambda_client_double.update_function_configuration.configure_mock(
|
||||
return_value={
|
||||
'Version' : 1
|
||||
'Version': 1
|
||||
}
|
||||
)
|
||||
fake_boto3_conn=Mock(return_value=lambda_client_double)
|
||||
fake_boto3_conn = Mock(return_value=lambda_client_double)
|
||||
return (fake_boto3_conn, lambda_client_double)
|
||||
|
||||
|
||||
|
@ -111,13 +115,13 @@ def fail_json_double(*args, **kwargs):
|
|||
raise AnsibleFailJson(kwargs)
|
||||
|
||||
|
||||
#TODO: def test_handle_different_types_in_config_params():
|
||||
# TODO: def test_handle_different_types_in_config_params():
|
||||
|
||||
|
||||
def test_create_lambda_if_not_exist():
|
||||
|
||||
set_module_args(base_module_args)
|
||||
(boto3_conn_double, lambda_client_double)=make_mock_no_connection_connection(code_change_lambda_config)
|
||||
(boto3_conn_double, lambda_client_double) = make_mock_no_connection_connection(code_change_lambda_config)
|
||||
|
||||
with patch.object(lda, 'boto3_conn', boto3_conn_double):
|
||||
try:
|
||||
|
@ -133,7 +137,7 @@ def test_create_lambda_if_not_exist():
|
|||
"update lambda function code when function should have been created only"
|
||||
assert(len(lambda_client_double.create_function.mock_calls) > 0), \
|
||||
"failed to call create_function "
|
||||
(create_args, create_kwargs)=lambda_client_double.create_function.call_args
|
||||
(create_args, create_kwargs) = lambda_client_double.create_function.call_args
|
||||
assert (len(create_kwargs) > 0), "expected create called with keyword args, none found"
|
||||
|
||||
try:
|
||||
|
@ -143,12 +147,13 @@ def test_create_lambda_if_not_exist():
|
|||
create_kwargs["Environment"]
|
||||
raise(Exception("Environment sent to boto when none expected"))
|
||||
except KeyError:
|
||||
pass #We are happy, no environment is fine
|
||||
pass # We are happy, no environment is fine
|
||||
|
||||
|
||||
def test_update_lambda_if_code_changed():
|
||||
|
||||
set_module_args(base_module_args)
|
||||
(boto3_conn_double, lambda_client_double)=make_mock_connection(code_change_lambda_config)
|
||||
(boto3_conn_double, lambda_client_double) = make_mock_connection(code_change_lambda_config)
|
||||
|
||||
with patch.object(lda, 'boto3_conn', boto3_conn_double):
|
||||
try:
|
||||
|
@ -169,10 +174,11 @@ def test_update_lambda_if_code_changed():
|
|||
assert(len(lambda_client_double.update_function_code.mock_calls) < 3), \
|
||||
"lambda function code update called multiple times when only one time should be needed"
|
||||
|
||||
|
||||
def test_update_lambda_if_config_changed():
|
||||
|
||||
set_module_args(base_module_args)
|
||||
(boto3_conn_double,lambda_client_double)=make_mock_connection(two_change_lambda_config)
|
||||
(boto3_conn_double, lambda_client_double) = make_mock_connection(two_change_lambda_config)
|
||||
|
||||
with patch.object(lda, 'boto3_conn', boto3_conn_double):
|
||||
try:
|
||||
|
@ -189,10 +195,11 @@ def test_update_lambda_if_config_changed():
|
|||
assert(len(lambda_client_double.update_function_code.mock_calls) == 0), \
|
||||
"updated lambda code when no change should have happened"
|
||||
|
||||
|
||||
def test_update_lambda_if_only_one_config_item_changed():
|
||||
|
||||
set_module_args(base_module_args)
|
||||
(boto3_conn_double,lambda_client_double)=make_mock_connection(one_change_lambda_config)
|
||||
(boto3_conn_double, lambda_client_double) = make_mock_connection(one_change_lambda_config)
|
||||
|
||||
with patch.object(lda, 'boto3_conn', boto3_conn_double):
|
||||
try:
|
||||
|
@ -209,10 +216,11 @@ def test_update_lambda_if_only_one_config_item_changed():
|
|||
assert(len(lambda_client_double.update_function_code.mock_calls) == 0), \
|
||||
"updated lambda code when no change should have happened"
|
||||
|
||||
|
||||
def test_update_lambda_if_added_environment_variable():
|
||||
|
||||
set_module_args(module_args_with_environment)
|
||||
(boto3_conn_double,lambda_client_double)=make_mock_connection(base_lambda_config)
|
||||
(boto3_conn_double, lambda_client_double) = make_mock_connection(base_lambda_config)
|
||||
|
||||
with patch.object(lda, 'boto3_conn', boto3_conn_double):
|
||||
try:
|
||||
|
@ -229,14 +237,14 @@ def test_update_lambda_if_added_environment_variable():
|
|||
assert(len(lambda_client_double.update_function_code.mock_calls) == 0), \
|
||||
"updated lambda code when no change should have happened"
|
||||
|
||||
(update_args, update_kwargs)=lambda_client_double.update_function_configuration.call_args
|
||||
(update_args, update_kwargs) = lambda_client_double.update_function_configuration.call_args
|
||||
assert (len(update_kwargs) > 0), "expected update configuration called with keyword args, none found"
|
||||
assert update_kwargs['Environment']['Variables'] == module_args_with_environment['environment_variables']
|
||||
|
||||
def test_dont_update_lambda_if_nothing_changed():
|
||||
|
||||
def test_dont_update_lambda_if_nothing_changed():
|
||||
set_module_args(base_module_args)
|
||||
(boto3_conn_double,lambda_client_double)=make_mock_connection(base_lambda_config)
|
||||
(boto3_conn_double, lambda_client_double) = make_mock_connection(base_lambda_config)
|
||||
|
||||
with patch.object(lda, 'boto3_conn', boto3_conn_double):
|
||||
try:
|
||||
|
@ -248,9 +256,10 @@ def test_dont_update_lambda_if_nothing_changed():
|
|||
assert(len(boto3_conn_double.mock_calls) == 1), "multiple boto connections used unexpectedly"
|
||||
assert(len(lambda_client_double.update_function_configuration.mock_calls) == 0), \
|
||||
"updated lambda function when no configuration changed"
|
||||
assert(len(lambda_client_double.update_function_code.mock_calls) == 0 ), \
|
||||
assert(len(lambda_client_double.update_function_code.mock_calls) == 0), \
|
||||
"updated lambda code when no change should have happened"
|
||||
|
||||
|
||||
def test_warn_region_not_specified():
|
||||
|
||||
set_module_args({
|
||||
|
@ -263,7 +272,7 @@ def test_warn_region_not_specified():
|
|||
"role": 'arn:aws:iam::987654321012:role/lambda_basic_execution',
|
||||
"handler": 'lambda_python.my_handler'})
|
||||
|
||||
get_aws_connection_info_double=Mock(return_value=(None,None,None))
|
||||
get_aws_connection_info_double = Mock(return_value=(None, None, None))
|
||||
|
||||
with patch.object(lda, 'get_aws_connection_info', get_aws_connection_info_double):
|
||||
with patch.object(basic.AnsibleModule, 'fail_json', fail_json_double):
|
||||
|
|
|
@ -1,10 +1,12 @@
|
|||
import pytest
|
||||
boto = pytest.importorskip("boto")
|
||||
|
||||
import unittest
|
||||
|
||||
import ansible.modules.cloud.amazon.s3 as s3
|
||||
from ansible.module_utils.six.moves.urllib.parse import urlparse
|
||||
|
||||
boto = pytest.importorskip("boto")
|
||||
|
||||
|
||||
class TestUrlparse(unittest.TestCase):
|
||||
|
||||
def test_urlparse(self):
|
||||
|
@ -19,16 +21,16 @@ class TestUrlparse(unittest.TestCase):
|
|||
|
||||
def test_is_walrus(self):
|
||||
actual = s3.is_walrus("trulywalrus_but_invalid_url")
|
||||
#I don't know if this makes sense, but this is the current behaviour...
|
||||
# I don't know if this makes sense, but this is the current behaviour...
|
||||
self.assertEqual(True, actual)
|
||||
actual = s3.is_walrus("http://notwalrus.amazonaws.com")
|
||||
self.assertEqual(False, actual)
|
||||
|
||||
def test_get_s3_connection(self):
|
||||
aws_connect_kwargs = dict(aws_access_key_id="access_key",
|
||||
aws_secret_access_key="secret_key")
|
||||
location=None
|
||||
rgw=True
|
||||
s3_url="http://bla.blubb"
|
||||
aws_secret_access_key="secret_key")
|
||||
location = None
|
||||
rgw = True
|
||||
s3_url = "http://bla.blubb"
|
||||
actual = s3.get_s3_connection(aws_connect_kwargs, location, rgw, s3_url)
|
||||
self.assertEqual("bla.blubb", actual.host)
|
||||
|
|
|
@ -4,6 +4,7 @@ import unittest
|
|||
|
||||
from ansible.modules.cloud.docker._docker import get_split_image_tag
|
||||
|
||||
|
||||
class DockerSplitImageTagTestCase(unittest.TestCase):
|
||||
|
||||
def test_trivial(self):
|
||||
|
|
|
@ -2,6 +2,7 @@ import unittest
|
|||
|
||||
from ansible.modules.cloud.google.gce_tag import _get_changed_items, _intersect_items, _union_items
|
||||
|
||||
|
||||
class TestGCETag(unittest.TestCase):
|
||||
"""Unit tests for gce_tag module."""
|
||||
|
||||
|
@ -29,7 +30,7 @@ class TestGCETag(unittest.TestCase):
|
|||
# tags removed
|
||||
new_tags = ['one', 'two']
|
||||
existing_tags = ['two']
|
||||
want = ['two'] # only remove the tag that was present
|
||||
want = ['two'] # only remove the tag that was present
|
||||
got = _intersect_items(existing_tags, new_tags)
|
||||
self.assertEqual(want, got)
|
||||
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import collections
|
||||
import inspect
|
||||
import mock
|
||||
import pytest
|
||||
import yaml
|
||||
import inspect
|
||||
import collections
|
||||
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible.modules.cloud.openstack import os_server
|
||||
|
@ -188,12 +188,9 @@ class TestCreateServer(object):
|
|||
os_server._create_server(self.module, self.cloud)
|
||||
|
||||
assert(self.cloud.create_server.call_count == 1)
|
||||
assert(self.cloud.create_server.call_args[1]['image']
|
||||
== self.cloud.get_image_id('cirros'))
|
||||
assert(self.cloud.create_server.call_args[1]['flavor']
|
||||
== self.cloud.get_flavor('m1.tiny')['id'])
|
||||
assert(self.cloud.create_server.call_args[1]['nics'][0]['net-id']
|
||||
== self.cloud.get_network('network1')['id'])
|
||||
assert(self.cloud.create_server.call_args[1]['image'] == self.cloud.get_image_id('cirros'))
|
||||
assert(self.cloud.create_server.call_args[1]['flavor'] == self.cloud.get_flavor('m1.tiny')['id'])
|
||||
assert(self.cloud.create_server.call_args[1]['nics'][0]['net-id'] == self.cloud.get_network('network1')['id'])
|
||||
|
||||
def test_create_server_bad_flavor(self):
|
||||
'''
|
||||
|
|
|
@ -14,13 +14,12 @@
|
|||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import unittest
|
||||
from ansible.modules.network.cumulus import nclu
|
||||
|
||||
import sys
|
||||
import time
|
||||
from ansible.module_utils.basic import *
|
||||
import unittest
|
||||
|
||||
from ansible.module_utils.basic import *
|
||||
from ansible.modules.network.cumulus import nclu
|
||||
|
||||
|
||||
class FakeModule(object):
|
||||
|
@ -172,7 +171,6 @@ class TestNclu(unittest.TestCase):
|
|||
self.assertEqual(module.fail_code, {})
|
||||
self.assertEqual(changed, True)
|
||||
|
||||
|
||||
def test_command_atomic(self):
|
||||
module = FakeModule()
|
||||
changed, output = nclu.run_nclu(module,
|
||||
|
|
|
@ -19,8 +19,8 @@
|
|||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import os
|
||||
import json
|
||||
import os
|
||||
|
||||
from ansible.compat.tests import unittest
|
||||
from ansible.compat.tests.mock import patch
|
||||
|
@ -35,6 +35,7 @@ def set_module_args(args):
|
|||
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
|
||||
fixture_data = {}
|
||||
|
||||
|
||||
def load_fixture(name):
|
||||
path = os.path.join(fixture_path, name)
|
||||
|
||||
|
@ -56,13 +57,14 @@ def load_fixture(name):
|
|||
class AnsibleExitJson(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class AnsibleFailJson(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class TestEosModule(unittest.TestCase):
|
||||
|
||||
def execute_module(self, failed=False, changed=False, commands=None,
|
||||
sort=True, defaults=False, transport='cli'):
|
||||
def execute_module(self, failed=False, changed=False, commands=None, sort=True, defaults=False, transport='cli'):
|
||||
|
||||
self.load_fixtures(commands, transport=transport)
|
||||
|
||||
|
@ -110,4 +112,3 @@ class TestEosModule(unittest.TestCase):
|
|||
|
||||
def load_fixtures(self, commands=None, transport='cli'):
|
||||
pass
|
||||
|
||||
|
|
|
@ -25,6 +25,7 @@ from ansible.compat.tests.mock import patch
|
|||
from ansible.modules.network.eos import eos_command
|
||||
from .eos_module import TestEosModule, load_fixture, set_module_args
|
||||
|
||||
|
||||
class TestEosCommandModule(TestEosModule):
|
||||
|
||||
module = eos_command
|
||||
|
|
|
@ -71,7 +71,7 @@ class TestEosConfigModule(TestEosModule):
|
|||
|
||||
def test_eos_config_before(self):
|
||||
args = dict(lines=['hostname switch01', 'ip domain-name eng.ansible.com'],
|
||||
before=['before command'])
|
||||
before=['before command'])
|
||||
|
||||
set_module_args(args)
|
||||
|
||||
|
@ -83,7 +83,7 @@ class TestEosConfigModule(TestEosModule):
|
|||
|
||||
def test_eos_config_after(self):
|
||||
args = dict(lines=['hostname switch01', 'ip domain-name eng.ansible.com'],
|
||||
after=['after command'])
|
||||
after=['after command'])
|
||||
|
||||
set_module_args(args)
|
||||
|
||||
|
|
|
@ -25,6 +25,7 @@ from ansible.compat.tests.mock import patch
|
|||
from ansible.modules.network.eos import eos_system
|
||||
from .eos_module import TestEosModule, load_fixture, set_module_args
|
||||
|
||||
|
||||
class TestEosSystemModule(TestEosModule):
|
||||
|
||||
module = eos_system
|
||||
|
@ -75,20 +76,20 @@ class TestEosSystemModule(TestEosModule):
|
|||
'ip domain lookup source-interface Ethernet1']
|
||||
self.execute_module(changed=True, commands=commands)
|
||||
|
||||
#def test_eos_system_name_servers(self):
|
||||
# name_servers = ['8.8.8.8', '8.8.4.4']
|
||||
# set_module_args(dict(name_servers=name_servers))
|
||||
# commands = ['ip name-server 8.8.4.4',
|
||||
# 'no ip name-server vrf mgmt 8.8.4.4']
|
||||
# self.execute_module(changed=True, commands=commands)
|
||||
# def test_eos_system_name_servers(self):
|
||||
# name_servers = ['8.8.8.8', '8.8.4.4']
|
||||
# set_module_args(dict(name_servers=name_servers))
|
||||
# commands = ['ip name-server 8.8.4.4',
|
||||
# 'no ip name-server vrf mgmt 8.8.4.4']
|
||||
# self.execute_module(changed=True, commands=commands)
|
||||
|
||||
#def rest_eos_system_name_servers_complex(self):
|
||||
# name_servers = dict(server='8.8.8.8', vrf='test')
|
||||
# set_module_args(dict(name_servers=name_servers))
|
||||
# commands = ['ip name-server vrf test 8.8.8.8',
|
||||
# 'no ip name-server vrf default 8.8.8.8',
|
||||
# 'no ip name-server vrf mgmt 8.8.4.4']
|
||||
# self.execute_module(changed=True, commands=commands)
|
||||
# def rest_eos_system_name_servers_complex(self):
|
||||
# name_servers = dict(server='8.8.8.8', vrf='test')
|
||||
# set_module_args(dict(name_servers=name_servers))
|
||||
# commands = ['ip name-server vrf test 8.8.8.8',
|
||||
# 'no ip name-server vrf default 8.8.8.8',
|
||||
# 'no ip name-server vrf mgmt 8.8.4.4']
|
||||
# self.execute_module(changed=True, commands=commands)
|
||||
|
||||
def test_eos_system_state_absent(self):
|
||||
set_module_args(dict(state='absent'))
|
||||
|
@ -104,4 +105,3 @@ class TestEosSystemModule(TestEosModule):
|
|||
name_servers = dict(server='8.8.8.8', vrf='missing')
|
||||
set_module_args(dict(name_servers=name_servers))
|
||||
result = self.execute_module(failed=True)
|
||||
|
||||
|
|
|
@ -95,5 +95,3 @@ class TestEosUserModule(TestEosModule):
|
|||
set_module_args(dict(username='ansible', password='test', update_password='always'))
|
||||
commands = ['username ansible secret test']
|
||||
self.execute_module(changed=True, commands=commands)
|
||||
|
||||
|
||||
|
|
|
@ -35,6 +35,7 @@ def set_module_args(args):
|
|||
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
|
||||
fixture_data = {}
|
||||
|
||||
|
||||
def load_fixture(name):
|
||||
path = os.path.join(fixture_path, name)
|
||||
|
||||
|
@ -56,13 +57,14 @@ def load_fixture(name):
|
|||
class AnsibleExitJson(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class AnsibleFailJson(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class TestIosModule(unittest.TestCase):
|
||||
|
||||
def execute_module(self, failed=False, changed=False, commands=None,
|
||||
sort=True, defaults=False):
|
||||
def execute_module(self, failed=False, changed=False, commands=None, sort=True, defaults=False):
|
||||
|
||||
self.load_fixtures(commands)
|
||||
|
||||
|
@ -110,4 +112,3 @@ class TestIosModule(unittest.TestCase):
|
|||
|
||||
def load_fixtures(self, commands=None):
|
||||
pass
|
||||
|
||||
|
|
|
@ -40,8 +40,7 @@ class TestIosBannerModule(TestIosModule):
|
|||
self.mock_load_config.stop()
|
||||
|
||||
def load_fixtures(self, commands=None):
|
||||
self.exec_command.return_value = (0,
|
||||
load_fixture('ios_banner_show_banner.txt').strip(), None)
|
||||
self.exec_command.return_value = (0, load_fixture('ios_banner_show_banner.txt').strip(), None)
|
||||
self.load_config.return_value = dict(diff=None, session='session')
|
||||
|
||||
def test_ios_banner_create(self):
|
||||
|
|
|
@ -25,6 +25,7 @@ from ansible.compat.tests.mock import patch
|
|||
from ansible.modules.network.ios import ios_command
|
||||
from .ios_module import TestIosModule, load_fixture, set_module_args
|
||||
|
||||
|
||||
class TestIosCommandModule(TestIosModule):
|
||||
|
||||
module = ios_command
|
||||
|
|
|
@ -89,19 +89,19 @@ class TestIosConfigModule(TestIosModule):
|
|||
self.execute_module(changed=True, commands=commands)
|
||||
|
||||
def test_ios_config_before(self):
|
||||
set_module_args(dict(lines=['hostname foo'], before=['test1','test2']))
|
||||
set_module_args(dict(lines=['hostname foo'], before=['test1', 'test2']))
|
||||
commands = ['test1', 'test2', 'hostname foo']
|
||||
self.execute_module(changed=True, commands=commands, sort=False)
|
||||
|
||||
def test_ios_config_after(self):
|
||||
set_module_args(dict(lines=['hostname foo'], after=['test1','test2']))
|
||||
set_module_args(dict(lines=['hostname foo'], after=['test1', 'test2']))
|
||||
commands = ['hostname foo', 'test1', 'test2']
|
||||
self.execute_module(changed=True, commands=commands, sort=False)
|
||||
|
||||
def test_ios_config_before_after_no_change(self):
|
||||
set_module_args(dict(lines=['hostname router'],
|
||||
before=['test1', 'test2'],
|
||||
after=['test3','test4']))
|
||||
after=['test3', 'test4']))
|
||||
self.execute_module()
|
||||
|
||||
def test_ios_config_config(self):
|
||||
|
|
|
@ -51,7 +51,6 @@ class TestIosSystemModule(TestIosModule):
|
|||
commands = ['hostname foo']
|
||||
self.execute_module(changed=True, commands=commands)
|
||||
|
||||
|
||||
def test_ios_system_domain_name(self):
|
||||
set_module_args(dict(domain_name=['test.com']))
|
||||
commands = ['ip domain name test.com',
|
||||
|
@ -120,4 +119,3 @@ class TestIosSystemModule(TestIosModule):
|
|||
name_servers = dict(server='8.8.8.8', vrf='missing')
|
||||
set_module_args(dict(name_servers=name_servers))
|
||||
self.execute_module(failed=True)
|
||||
|
||||
|
|
|
@ -26,6 +26,7 @@ from ansible.compat.tests.mock import patch
|
|||
from ansible.modules.network.ios import _ios_template
|
||||
from .ios_module import TestIosModule, load_fixture, set_module_args
|
||||
|
||||
|
||||
class TestIosTemplateModule(TestIosModule):
|
||||
|
||||
module = _ios_template
|
||||
|
|
|
@ -122,5 +122,3 @@ class TestIosVrfModule(TestIosModule):
|
|||
commands = ['no vrf definition test_1', 'vrf definition test_2',
|
||||
'description test string']
|
||||
self.execute_module(changed=True, commands=commands, sort=False)
|
||||
|
||||
|
||||
|
|
|
@ -35,6 +35,7 @@ def set_module_args(args):
|
|||
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
|
||||
fixture_data = {}
|
||||
|
||||
|
||||
def load_fixture(name):
|
||||
path = os.path.join(fixture_path, name)
|
||||
|
||||
|
@ -56,13 +57,14 @@ def load_fixture(name):
|
|||
class AnsibleExitJson(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class AnsibleFailJson(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class TestIosxrModule(unittest.TestCase):
|
||||
|
||||
def execute_module(self, failed=False, changed=False, commands=None,
|
||||
sort=True, defaults=False):
|
||||
def execute_module(self, failed=False, changed=False, commands=None, sort=True, defaults=False):
|
||||
|
||||
self.load_fixtures(commands)
|
||||
|
||||
|
@ -110,4 +112,3 @@ class TestIosxrModule(unittest.TestCase):
|
|||
|
||||
def load_fixtures(self, commands=None):
|
||||
pass
|
||||
|
||||
|
|
|
@ -26,6 +26,7 @@ from ansible.compat.tests.mock import patch
|
|||
from ansible.modules.network.iosxr import iosxr_config
|
||||
from .iosxr_module import TestIosxrModule, load_fixture, set_module_args
|
||||
|
||||
|
||||
class TestIosxrConfigModule(TestIosxrModule):
|
||||
|
||||
module = iosxr_config
|
||||
|
@ -73,19 +74,19 @@ class TestIosxrConfigModule(TestIosxrModule):
|
|||
self.execute_module(changed=True, commands=commands)
|
||||
|
||||
def test_iosxr_config_before(self):
|
||||
set_module_args(dict(lines=['hostname foo'], before=['test1','test2']))
|
||||
set_module_args(dict(lines=['hostname foo'], before=['test1', 'test2']))
|
||||
commands = ['test1', 'test2', 'hostname foo']
|
||||
self.execute_module(changed=True, commands=commands, sort=False)
|
||||
|
||||
def test_iosxr_config_after(self):
|
||||
set_module_args(dict(lines=['hostname foo'], after=['test1','test2']))
|
||||
set_module_args(dict(lines=['hostname foo'], after=['test1', 'test2']))
|
||||
commands = ['hostname foo', 'test1', 'test2']
|
||||
self.execute_module(changed=True, commands=commands, sort=False)
|
||||
|
||||
def test_iosxr_config_before_after_no_change(self):
|
||||
set_module_args(dict(lines=['hostname router'],
|
||||
before=['test1', 'test2'],
|
||||
after=['test3','test4']))
|
||||
after=['test3', 'test4']))
|
||||
self.execute_module()
|
||||
|
||||
def test_iosxr_config_config(self):
|
||||
|
|
|
@ -66,8 +66,7 @@ class TestIosxrFacts(TestIosxrModule):
|
|||
self.assertIn('interfaces', ansible_facts['ansible_net_gather_subset'])
|
||||
self.assertEquals('iosxr01', ansible_facts['ansible_net_hostname'])
|
||||
self.assertEquals(['disk0:', 'flash0:'], ansible_facts['ansible_net_filesystems'])
|
||||
self.assertIn('GigabitEthernet0/0/0/0',
|
||||
ansible_facts['ansible_net_interfaces'].keys())
|
||||
self.assertIn('GigabitEthernet0/0/0/0', ansible_facts['ansible_net_interfaces'].keys())
|
||||
self.assertEquals('3095', ansible_facts['ansible_net_memtotal_mb'])
|
||||
self.assertEquals('1499', ansible_facts['ansible_net_memfree_mb'])
|
||||
|
||||
|
|
|
@ -57,7 +57,7 @@ class TestIosxrSystemModule(TestIosxrModule):
|
|||
|
||||
def test_iosxr_system_domain_search(self):
|
||||
set_module_args(dict(domain_search=['ansible.com', 'redhat.com']))
|
||||
commands=['domain list ansible.com', 'no domain list cisco.com']
|
||||
commands = ['domain list ansible.com', 'no domain list cisco.com']
|
||||
self.execute_module(changed=True, commands=commands)
|
||||
|
||||
def test_iosxr_system_lookup_source(self):
|
||||
|
@ -78,14 +78,18 @@ class TestIosxrSystemModule(TestIosxrModule):
|
|||
|
||||
def test_iosxr_system_state_absent(self):
|
||||
set_module_args(dict(state='absent'))
|
||||
commands = ['no hostname', 'no domain name',
|
||||
'no domain lookup disable',
|
||||
'no domain lookup source-interface MgmtEth0/0/CPU0/0',
|
||||
'no domain list redhat.com', 'no domain list cisco.com',
|
||||
'no domain name-server 8.8.8.8', 'no domain name-server 8.8.4.4']
|
||||
commands = [
|
||||
'no hostname',
|
||||
'no domain name',
|
||||
'no domain lookup disable',
|
||||
'no domain lookup source-interface MgmtEth0/0/CPU0/0',
|
||||
'no domain list redhat.com',
|
||||
'no domain list cisco.com',
|
||||
'no domain name-server 8.8.8.8',
|
||||
'no domain name-server 8.8.4.4'
|
||||
]
|
||||
self.execute_module(changed=True, commands=commands)
|
||||
|
||||
def test_iosxr_system_no_change(self):
|
||||
set_module_args(dict(hostname='iosxr01', domain_name='eng.ansible.com'))
|
||||
self.execute_module()
|
||||
|
||||
|
|
|
@ -35,6 +35,7 @@ def set_module_args(args):
|
|||
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
|
||||
fixture_data = {}
|
||||
|
||||
|
||||
def load_fixture(name):
|
||||
path = os.path.join(fixture_path, name)
|
||||
|
||||
|
@ -56,13 +57,14 @@ def load_fixture(name):
|
|||
class AnsibleExitJson(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class AnsibleFailJson(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class TestNxosModule(unittest.TestCase):
|
||||
|
||||
def execute_module(self, failed=False, changed=False, commands=None,
|
||||
sort=True, defaults=False):
|
||||
def execute_module(self, failed=False, changed=False, commands=None, sort=True, defaults=False):
|
||||
|
||||
self.load_fixtures(commands)
|
||||
|
||||
|
@ -110,4 +112,3 @@ class TestNxosModule(unittest.TestCase):
|
|||
|
||||
def load_fixtures(self, commands=None):
|
||||
pass
|
||||
|
||||
|
|
|
@ -25,6 +25,7 @@ from ansible.compat.tests.mock import patch
|
|||
from ansible.modules.network.nxos import nxos_command
|
||||
from .nxos_module import TestNxosModule, load_fixture, set_module_args
|
||||
|
||||
|
||||
class TestNxosCommandModule(TestNxosModule):
|
||||
|
||||
module = nxos_command
|
||||
|
|
|
@ -73,7 +73,7 @@ class TestNxosConfigModule(TestNxosModule):
|
|||
|
||||
def test_nxos_config_before(self):
|
||||
args = dict(lines=['hostname switch01', 'ip domain-name eng.ansible.com'],
|
||||
before=['before command'])
|
||||
before=['before command'])
|
||||
|
||||
set_module_args(args)
|
||||
|
||||
|
@ -85,7 +85,7 @@ class TestNxosConfigModule(TestNxosModule):
|
|||
|
||||
def test_nxos_config_after(self):
|
||||
args = dict(lines=['hostname switch01', 'ip domain-name eng.ansible.com'],
|
||||
after=['after command'])
|
||||
after=['after command'])
|
||||
|
||||
set_module_args(args)
|
||||
|
||||
|
@ -134,6 +134,3 @@ class TestNxosConfigModule(TestNxosModule):
|
|||
set_module_args(args)
|
||||
result = self.execute_module()
|
||||
self.assertIn('__backup__', result)
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -61,4 +61,3 @@ class TestNxosEvpnGlobalModule(TestNxosModule):
|
|||
set_module_args(dict(nv_overlay_evpn=False))
|
||||
commands = ['no nv overlay evpn']
|
||||
self.start_configured(changed=True, commands=commands)
|
||||
|
||||
|
|
|
@ -126,5 +126,3 @@ class TestNxosSystemModule(TestNxosModule):
|
|||
'vrf context management', 'no ip name-server 172.26.1.1', 'exit',
|
||||
'no system jumbomtu']
|
||||
self.execute_module(changed=True, commands=commands)
|
||||
|
||||
|
||||
|
|
|
@ -25,6 +25,7 @@ from ansible.compat.tests.mock import patch
|
|||
from ansible.modules.network.vyos import vyos_command
|
||||
from .vyos_module import TestVyosModule, load_fixture, set_module_args
|
||||
|
||||
|
||||
class TestVyosCommandModule(TestVyosModule):
|
||||
|
||||
module = vyos_command
|
||||
|
|
|
@ -35,6 +35,7 @@ def set_module_args(args):
|
|||
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
|
||||
fixture_data = {}
|
||||
|
||||
|
||||
def load_fixture(name):
|
||||
path = os.path.join(fixture_path, name)
|
||||
|
||||
|
@ -56,14 +57,14 @@ def load_fixture(name):
|
|||
class AnsibleExitJson(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class AnsibleFailJson(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class TestVyosModule(unittest.TestCase):
|
||||
|
||||
def execute_module(self, failed=False, changed=False, commands=None,
|
||||
sort=True, defaults=False):
|
||||
|
||||
def execute_module(self, failed=False, changed=False, commands=None, sort=True, defaults=False):
|
||||
self.load_fixtures(commands)
|
||||
|
||||
if failed:
|
||||
|
@ -110,4 +111,3 @@ class TestVyosModule(unittest.TestCase):
|
|||
|
||||
def load_fixtures(self, commands=None):
|
||||
pass
|
||||
|
||||
|
|
|
@ -20,9 +20,10 @@ class AptExpandPkgspecTestCase(unittest.TestCase):
|
|||
|
||||
def setUp(self):
|
||||
FakePackage = collections.namedtuple("Package", ("name",))
|
||||
self.fake_cache = [ FakePackage("apt"),
|
||||
FakePackage("apt-utils"),
|
||||
FakePackage("not-selected"),
|
||||
self.fake_cache = [
|
||||
FakePackage("apt"),
|
||||
FakePackage("apt-utils"),
|
||||
FakePackage("not-selected"),
|
||||
]
|
||||
|
||||
def test_trivial(self):
|
||||
|
|
|
@ -36,11 +36,8 @@ class KnownHostsDiffTestCase(unittest.TestCase):
|
|||
self.assertEqual(diff, {
|
||||
'before_header': path,
|
||||
'after_header': path,
|
||||
'before':
|
||||
'two.example.com ssh-rsa BBBBetc\n',
|
||||
'after':
|
||||
'two.example.com ssh-rsa BBBBetc\n'
|
||||
'one.example.com ssh-rsa AAAAetc\n',
|
||||
'before': 'two.example.com ssh-rsa BBBBetc\n',
|
||||
'after': 'two.example.com ssh-rsa BBBBetc\none.example.com ssh-rsa AAAAetc\n',
|
||||
})
|
||||
|
||||
def test_no_change(self):
|
||||
|
@ -53,12 +50,8 @@ class KnownHostsDiffTestCase(unittest.TestCase):
|
|||
self.assertEqual(diff, {
|
||||
'before_header': path,
|
||||
'after_header': path,
|
||||
'before':
|
||||
'one.example.com ssh-rsa AAAAetc\n'
|
||||
'two.example.com ssh-rsa BBBBetc\n',
|
||||
'after':
|
||||
'one.example.com ssh-rsa AAAAetc\n'
|
||||
'two.example.com ssh-rsa BBBBetc\n',
|
||||
'before': 'one.example.com ssh-rsa AAAAetc\ntwo.example.com ssh-rsa BBBBetc\n',
|
||||
'after': 'one.example.com ssh-rsa AAAAetc\ntwo.example.com ssh-rsa BBBBetc\n',
|
||||
})
|
||||
|
||||
def test_key_change(self):
|
||||
|
@ -71,12 +64,8 @@ class KnownHostsDiffTestCase(unittest.TestCase):
|
|||
self.assertEqual(diff, {
|
||||
'before_header': path,
|
||||
'after_header': path,
|
||||
'before':
|
||||
'one.example.com ssh-rsa AAAaetc\n'
|
||||
'two.example.com ssh-rsa BBBBetc\n',
|
||||
'after':
|
||||
'two.example.com ssh-rsa BBBBetc\n'
|
||||
'one.example.com ssh-rsa AAAAetc\n',
|
||||
'before': 'one.example.com ssh-rsa AAAaetc\ntwo.example.com ssh-rsa BBBBetc\n',
|
||||
'after': 'two.example.com ssh-rsa BBBBetc\none.example.com ssh-rsa AAAAetc\n',
|
||||
})
|
||||
|
||||
def test_key_removal(self):
|
||||
|
@ -89,11 +78,8 @@ class KnownHostsDiffTestCase(unittest.TestCase):
|
|||
self.assertEqual(diff, {
|
||||
'before_header': path,
|
||||
'after_header': path,
|
||||
'before':
|
||||
'one.example.com ssh-rsa AAAAetc\n'
|
||||
'two.example.com ssh-rsa BBBBetc\n',
|
||||
'after':
|
||||
'two.example.com ssh-rsa BBBBetc\n',
|
||||
'before': 'one.example.com ssh-rsa AAAAetc\ntwo.example.com ssh-rsa BBBBetc\n',
|
||||
'after': 'two.example.com ssh-rsa BBBBetc\n',
|
||||
})
|
||||
|
||||
def test_key_removal_no_change(self):
|
||||
|
@ -105,8 +91,6 @@ class KnownHostsDiffTestCase(unittest.TestCase):
|
|||
self.assertEqual(diff, {
|
||||
'before_header': path,
|
||||
'after_header': path,
|
||||
'before':
|
||||
'two.example.com ssh-rsa BBBBetc\n',
|
||||
'after':
|
||||
'two.example.com ssh-rsa BBBBetc\n',
|
||||
'before': 'two.example.com ssh-rsa BBBBetc\n',
|
||||
'after': 'two.example.com ssh-rsa BBBBetc\n',
|
||||
})
|
||||
|
|
|
@ -21,12 +21,11 @@ __metaclass__ = type
|
|||
|
||||
from ansible.compat.tests import unittest
|
||||
from ansible.compat.tests.mock import patch, mock_open
|
||||
from ansible.errors import AnsibleParserError
|
||||
from ansible.errors import yaml_strings
|
||||
from ansible.errors import AnsibleParserError, yaml_strings
|
||||
from ansible.module_utils.six import PY3
|
||||
|
||||
from ansible.parsing.dataloader import DataLoader
|
||||
|
||||
|
||||
class TestDataLoader(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
|
@ -39,7 +38,7 @@ class TestDataLoader(unittest.TestCase):
|
|||
def test_parse_json_from_file(self, mock_def):
|
||||
mock_def.return_value = (b"""{"a": 1, "b": 2, "c": 3}""", True)
|
||||
output = self._loader.load_from_file('dummy_json.txt')
|
||||
self.assertEqual(output, dict(a=1,b=2,c=3))
|
||||
self.assertEqual(output, dict(a=1, b=2, c=3))
|
||||
|
||||
@patch.object(DataLoader, '_get_file_contents')
|
||||
def test_parse_yaml_from_file(self, mock_def):
|
||||
|
@ -49,7 +48,7 @@ class TestDataLoader(unittest.TestCase):
|
|||
c: 3
|
||||
""", True)
|
||||
output = self._loader.load_from_file('dummy_yaml.txt')
|
||||
self.assertEqual(output, dict(a=1,b=2,c=3))
|
||||
self.assertEqual(output, dict(a=1, b=2, c=3))
|
||||
|
||||
@patch.object(DataLoader, '_get_file_contents')
|
||||
def test_parse_fail_from_file(self, mock_def):
|
||||
|
|
|
@ -19,10 +19,10 @@
|
|||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
from ansible.parsing.mod_args import ModuleArgsParser
|
||||
from ansible.errors import AnsibleParserError
|
||||
|
||||
from ansible.compat.tests import unittest
|
||||
from ansible.errors import AnsibleParserError
|
||||
from ansible.parsing.mod_args import ModuleArgsParser
|
||||
|
||||
|
||||
class TestModArgsDwim(unittest.TestCase):
|
||||
|
||||
|
@ -47,9 +47,9 @@ class TestModArgsDwim(unittest.TestCase):
|
|||
self._debug(mod, args, to)
|
||||
self.assertEqual(mod, 'command')
|
||||
self.assertEqual(args, dict(
|
||||
_raw_params = 'echo hi',
|
||||
_uses_shell = True,
|
||||
))
|
||||
_raw_params='echo hi',
|
||||
_uses_shell=True,
|
||||
))
|
||||
self.assertIsNone(to)
|
||||
|
||||
def test_basic_command(self):
|
||||
|
@ -58,8 +58,8 @@ class TestModArgsDwim(unittest.TestCase):
|
|||
self._debug(mod, args, to)
|
||||
self.assertEqual(mod, 'command')
|
||||
self.assertEqual(args, dict(
|
||||
_raw_params = 'echo hi',
|
||||
))
|
||||
_raw_params='echo hi',
|
||||
))
|
||||
self.assertIsNone(to)
|
||||
|
||||
def test_shell_with_modifiers(self):
|
||||
|
@ -68,11 +68,11 @@ class TestModArgsDwim(unittest.TestCase):
|
|||
self._debug(mod, args, to)
|
||||
self.assertEqual(mod, 'command')
|
||||
self.assertEqual(args, dict(
|
||||
creates = '/tmp/baz',
|
||||
removes = '/tmp/bleep',
|
||||
_raw_params = '/bin/foo',
|
||||
_uses_shell = True,
|
||||
))
|
||||
creates='/tmp/baz',
|
||||
removes='/tmp/bleep',
|
||||
_raw_params='/bin/foo',
|
||||
_uses_shell=True,
|
||||
))
|
||||
self.assertIsNone(to)
|
||||
|
||||
def test_normal_usage(self):
|
||||
|
@ -127,4 +127,3 @@ class TestModArgsDwim(unittest.TestCase):
|
|||
|
||||
m = ModuleArgsParser(dict(ping='data=hi', shell='echo hi'))
|
||||
self.assertRaises(AnsibleParserError, m.parse)
|
||||
|
||||
|
|
|
@ -4,6 +4,7 @@ import unittest
|
|||
|
||||
from ansible.parsing.utils.addresses import parse_address
|
||||
|
||||
|
||||
class TestParseAddress(unittest.TestCase):
|
||||
|
||||
tests = {
|
||||
|
|
|
@ -22,6 +22,7 @@ __metaclass__ = type
|
|||
from ansible.compat.tests import unittest
|
||||
from ansible.parsing.utils.jsonify import jsonify
|
||||
|
||||
|
||||
class TestJsonify(unittest.TestCase):
|
||||
def test_jsonify_simple(self):
|
||||
self.assertEqual(jsonify(dict(a=1, b=2, c=3)), '{"a": 1, "b": 2, "c": 3}')
|
||||
|
|
|
@ -77,11 +77,11 @@ class TestVaultIsEncrypted(unittest.TestCase):
|
|||
self.assertTrue(vault.is_encrypted(b_data))
|
||||
|
||||
def test_invalid_text_not_ascii(self):
|
||||
data = u"$ANSIBLE_VAULT;9.9;TEST\n%s"% u"ァ ア ィ イ ゥ ウ ェ エ ォ オ カ ガ キ ギ ク グ ケ "
|
||||
data = u"$ANSIBLE_VAULT;9.9;TEST\n%s" % u"ァ ア ィ イ ゥ ウ ェ エ ォ オ カ ガ キ ギ ク グ ケ "
|
||||
self.assertFalse(vault.is_encrypted(data))
|
||||
|
||||
def test_invalid_bytes_not_ascii(self):
|
||||
data = u"$ANSIBLE_VAULT;9.9;TEST\n%s"% u"ァ ア ィ イ ゥ ウ ェ エ ォ オ カ ガ キ ギ ク グ ケ "
|
||||
data = u"$ANSIBLE_VAULT;9.9;TEST\n%s" % u"ァ ア ィ イ ゥ ウ ェ エ ォ オ カ ガ キ ギ ク グ ケ "
|
||||
b_data = to_bytes(data, encoding='utf-8')
|
||||
self.assertFalse(vault.is_encrypted(b_data))
|
||||
|
||||
|
|
|
@ -74,7 +74,7 @@ class TestVaultEditor(unittest.TestCase):
|
|||
def tearDown(self):
|
||||
if self._test_dir:
|
||||
pass
|
||||
#shutil.rmtree(self._test_dir)
|
||||
# shutil.rmtree(self._test_dir)
|
||||
self._test_dir = None
|
||||
|
||||
def test_methods_exist(self):
|
||||
|
@ -361,7 +361,7 @@ class TestVaultEditor(unittest.TestCase):
|
|||
|
||||
self.assertEqual(src_file_plaintext, new_src_contents)
|
||||
|
||||
#self.assertEqual(src_file_plaintext, new_src_contents,
|
||||
# self.assertEqual(src_file_plaintext, new_src_contents,
|
||||
# 'The decrypted plaintext of the editted file is not the expected contents.')
|
||||
|
||||
@patch('ansible.parsing.vault.call')
|
||||
|
|
|
@ -26,15 +26,14 @@ try:
|
|||
except ImportError:
|
||||
from yaml.parser import ParserError
|
||||
|
||||
from ansible.parsing.yaml import dumper
|
||||
from ansible.compat.tests import unittest
|
||||
from ansible.parsing import vault
|
||||
from ansible.parsing.yaml import dumper, objects
|
||||
from ansible.parsing.yaml.loader import AnsibleLoader
|
||||
|
||||
from ansible.compat.tests import unittest
|
||||
from ansible.parsing.yaml import objects
|
||||
from ansible.parsing import vault
|
||||
|
||||
from units.mock.yaml_helper import YamlTestUtils
|
||||
|
||||
|
||||
class TestAnsibleDumper(unittest.TestCase, YamlTestUtils):
|
||||
def setUp(self):
|
||||
self.vault_password = "hunter42"
|
||||
|
@ -43,7 +42,7 @@ class TestAnsibleDumper(unittest.TestCase, YamlTestUtils):
|
|||
self.stream = self._build_stream()
|
||||
self.dumper = dumper.AnsibleDumper
|
||||
|
||||
def _build_stream(self,yaml_text=None):
|
||||
def _build_stream(self, yaml_text=None):
|
||||
text = yaml_text or u''
|
||||
stream = io.StringIO(text)
|
||||
return stream
|
||||
|
|
|
@ -341,16 +341,17 @@ class TestAnsibleLoaderPlay(unittest.TestCase):
|
|||
self.assertEqual(self.data[0][u'vars'][u'number'], 1)
|
||||
self.assertEqual(self.data[0][u'vars'][u'string'], u'Ansible')
|
||||
self.assertEqual(self.data[0][u'vars'][u'utf8_string'], u'Cafè Eñyei')
|
||||
self.assertEqual(self.data[0][u'vars'][u'dictionary'],
|
||||
{u'webster': u'daniel',
|
||||
u'oed': u'oxford'})
|
||||
self.assertEqual(self.data[0][u'vars'][u'dictionary'], {
|
||||
u'webster': u'daniel',
|
||||
u'oed': u'oxford'
|
||||
})
|
||||
self.assertEqual(self.data[0][u'vars'][u'list'], [u'a', u'b', 1, 2])
|
||||
|
||||
self.assertEqual(self.data[0][u'tasks'],
|
||||
[{u'name': u'Test case', u'ping': {u'data': u'{{ utf8_string }}'}},
|
||||
{u'name': u'Test 2', u'ping': {u'data': u'Cafè Eñyei'}},
|
||||
{u'name': u'Test 3', u'command': u'printf \'Cafè Eñyei\n\''},
|
||||
])
|
||||
self.assertEqual(self.data[0][u'tasks'], [
|
||||
{u'name': u'Test case', u'ping': {u'data': u'{{ utf8_string }}'}},
|
||||
{u'name': u'Test 2', u'ping': {u'data': u'Cafè Eñyei'}},
|
||||
{u'name': u'Test 3', u'command': u'printf \'Cafè Eñyei\n\''},
|
||||
])
|
||||
|
||||
def walk(self, data):
|
||||
# Make sure there's no str in the data
|
||||
|
|
|
@ -104,22 +104,22 @@ class TestAnsibleVaultEncryptedUnicode(unittest.TestCase, YamlTestUtils):
|
|||
def test_empty_string_init_from_plaintext(self):
|
||||
seq = ''
|
||||
avu = self._from_plaintext(seq)
|
||||
self.assert_values(avu,seq)
|
||||
self.assert_values(avu, seq)
|
||||
|
||||
def test_empty_unicode_init_from_plaintext(self):
|
||||
seq = u''
|
||||
avu = self._from_plaintext(seq)
|
||||
self.assert_values(avu,seq)
|
||||
self.assert_values(avu, seq)
|
||||
|
||||
def test_string_from_plaintext(self):
|
||||
seq = 'some letters'
|
||||
avu = self._from_plaintext(seq)
|
||||
self.assert_values(avu,seq)
|
||||
self.assert_values(avu, seq)
|
||||
|
||||
def test_unicode_from_plaintext(self):
|
||||
seq = u'some letters'
|
||||
avu = self._from_plaintext(seq)
|
||||
self.assert_values(avu,seq)
|
||||
self.assert_values(avu, seq)
|
||||
|
||||
def test_unicode_from_plaintext_encode(self):
|
||||
seq = u'some text here'
|
||||
|
|
|
@ -274,4 +274,3 @@ class TestRole(unittest.TestCase):
|
|||
r = Role.load(i, play=mock_play)
|
||||
|
||||
self.assertEqual(r.get_name(), "foo_complex")
|
||||
|
||||
|
|
|
@ -52,4 +52,3 @@ class TestAttribute(unittest.TestCase):
|
|||
self.assertTrue(self.one >= self.one)
|
||||
self.assertFalse(self.one >= self.two)
|
||||
self.assertTrue(self.two >= self.one)
|
||||
|
||||
|
|
|
@ -114,7 +114,7 @@ class TestBase(unittest.TestCase):
|
|||
data = {'no_log': False,
|
||||
'remote_user': None,
|
||||
'vars': self.assorted_vars,
|
||||
#'check_mode': False,
|
||||
# 'check_mode': False,
|
||||
'always_run': False,
|
||||
'environment': [],
|
||||
'run_once': False,
|
||||
|
@ -340,8 +340,7 @@ class BaseSubClass(base.Base):
|
|||
_test_attr_unknown_isa = FieldAttribute(isa='not_a_real_isa', always_post_validate=True)
|
||||
_test_attr_example = FieldAttribute(isa='string', default='the_default',
|
||||
always_post_validate=True)
|
||||
_test_attr_none = FieldAttribute(isa='string',
|
||||
always_post_validate=True)
|
||||
_test_attr_none = FieldAttribute(isa='string', always_post_validate=True)
|
||||
_test_attr_preprocess = FieldAttribute(isa='string', default='the default for preprocess')
|
||||
_test_attr_method = FieldAttribute(isa='string', default='some attr with a getter',
|
||||
always_post_validate=True)
|
||||
|
|
|
@ -19,9 +19,10 @@
|
|||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
from ansible.compat.tests import unittest
|
||||
from ansible.playbook.block import Block
|
||||
from ansible.playbook.task import Task
|
||||
from ansible.compat.tests import unittest
|
||||
|
||||
|
||||
class TestBlock(unittest.TestCase):
|
||||
|
||||
|
@ -39,24 +40,24 @@ class TestBlock(unittest.TestCase):
|
|||
|
||||
def test_load_block_simple(self):
|
||||
ds = dict(
|
||||
block = [],
|
||||
rescue = [],
|
||||
always = [],
|
||||
#otherwise = [],
|
||||
block=[],
|
||||
rescue=[],
|
||||
always=[],
|
||||
# otherwise=[],
|
||||
)
|
||||
b = Block.load(ds)
|
||||
self.assertEqual(b.block, [])
|
||||
self.assertEqual(b.rescue, [])
|
||||
self.assertEqual(b.always, [])
|
||||
# not currently used
|
||||
#self.assertEqual(b.otherwise, [])
|
||||
# self.assertEqual(b.otherwise, [])
|
||||
|
||||
def test_load_block_with_tasks(self):
|
||||
ds = dict(
|
||||
block = [dict(action='block')],
|
||||
rescue = [dict(action='rescue')],
|
||||
always = [dict(action='always')],
|
||||
#otherwise = [dict(action='otherwise')],
|
||||
block=[dict(action='block')],
|
||||
rescue=[dict(action='rescue')],
|
||||
always=[dict(action='always')],
|
||||
# otherwise=[dict(action='otherwise')],
|
||||
)
|
||||
b = Block.load(ds)
|
||||
self.assertEqual(len(b.block), 1)
|
||||
|
@ -66,8 +67,8 @@ class TestBlock(unittest.TestCase):
|
|||
self.assertEqual(len(b.always), 1)
|
||||
self.assertIsInstance(b.always[0], Task)
|
||||
# not currently used
|
||||
#self.assertEqual(len(b.otherwise), 1)
|
||||
#self.assertIsInstance(b.otherwise[0], Task)
|
||||
# self.assertEqual(len(b.otherwise), 1)
|
||||
# self.assertIsInstance(b.otherwise[0], Task)
|
||||
|
||||
def test_load_implicit_block(self):
|
||||
ds = [dict(action='foo')]
|
||||
|
@ -77,12 +78,11 @@ class TestBlock(unittest.TestCase):
|
|||
|
||||
def test_deserialize(self):
|
||||
ds = dict(
|
||||
block = [dict(action='block')],
|
||||
rescue = [dict(action='rescue')],
|
||||
always = [dict(action='always')],
|
||||
block=[dict(action='block')],
|
||||
rescue=[dict(action='rescue')],
|
||||
always=[dict(action='always')],
|
||||
)
|
||||
b = Block.load(ds)
|
||||
data = dict(parent = ds, parent_type = 'Block')
|
||||
data = dict(parent=ds, parent_type='Block')
|
||||
b.deserialize(data)
|
||||
self.assertIsInstance(b._parent, Block)
|
||||
|
||||
|
|
|
@ -97,11 +97,11 @@ class TestConditional(unittest.TestCase):
|
|||
}
|
||||
|
||||
when = [u"some_dict.some_dict_key1 == hostvars['host3']"]
|
||||
#self._eval_con(when, variables)
|
||||
# self._eval_con(when, variables)
|
||||
self.assertRaisesRegexp(errors.AnsibleError,
|
||||
"The conditional check 'some_dict.some_dict_key1 == hostvars\['host3'\]' failed",
|
||||
#"The conditional check 'some_dict.some_dict_key1 == hostvars['host3']' failed",
|
||||
#"The conditional check 'some_dict.some_dict_key1 == hostvars['host3']' failed.",
|
||||
# "The conditional check 'some_dict.some_dict_key1 == hostvars['host3']' failed",
|
||||
# "The conditional check 'some_dict.some_dict_key1 == hostvars['host3']' failed.",
|
||||
self._eval_con,
|
||||
when, variables)
|
||||
|
||||
|
|
|
@ -222,7 +222,7 @@ class TestLoadListOfTasks(unittest.TestCase, MixinForMocks):
|
|||
# TODO/FIXME: do this non deprecated way
|
||||
def test_one_parent_include_tags(self):
|
||||
ds = [{'include': '/dev/null/includes/test_include.yml',
|
||||
#'vars': {'tags': ['test_one_parent_include_tags_tag1', 'and_another_tag2']}
|
||||
# 'vars': {'tags': ['test_one_parent_include_tags_tag1', 'and_another_tag2']}
|
||||
'tags': ['test_one_parent_include_tags_tag1', 'and_another_tag2']
|
||||
}
|
||||
]
|
||||
|
@ -281,7 +281,7 @@ class TestLoadListOfTasks(unittest.TestCase, MixinForMocks):
|
|||
'include': '/dev/null/includes/static_test_include.yml',
|
||||
'static': False
|
||||
}]
|
||||
#a_block = Block()
|
||||
# a_block = Block()
|
||||
ti_ds = {'include': '/dev/null/includes/ssdftatic_test_include.yml'}
|
||||
a_task_include = TaskInclude()
|
||||
ti = a_task_include.load(ti_ds)
|
||||
|
|
|
@ -21,30 +21,30 @@ __metaclass__ = type
|
|||
|
||||
import os
|
||||
|
||||
from ansible.compat.tests import unittest
|
||||
from ansible.compat.tests.mock import patch, MagicMock
|
||||
|
||||
from ansible import constants as C
|
||||
from ansible.cli import CLI
|
||||
from ansible.compat.tests import unittest
|
||||
from ansible.compat.tests.mock import patch, MagicMock
|
||||
from ansible.errors import AnsibleError, AnsibleParserError
|
||||
from ansible.module_utils.six.moves import shlex_quote
|
||||
from ansible.playbook.play_context import PlayContext
|
||||
|
||||
from units.mock.loader import DictDataLoader
|
||||
|
||||
|
||||
class TestPlayContext(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self._parser = CLI.base_parser(
|
||||
runas_opts = True,
|
||||
meta_opts = True,
|
||||
runtask_opts = True,
|
||||
vault_opts = True,
|
||||
async_opts = True,
|
||||
connect_opts = True,
|
||||
subset_opts = True,
|
||||
check_opts = True,
|
||||
inventory_opts = True,
|
||||
runas_opts=True,
|
||||
meta_opts=True,
|
||||
runtask_opts=True,
|
||||
vault_opts=True,
|
||||
async_opts=True,
|
||||
connect_opts=True,
|
||||
subset_opts=True,
|
||||
check_opts=True,
|
||||
inventory_opts=True,
|
||||
)
|
||||
|
||||
def tearDown(self):
|
||||
|
@ -66,13 +66,13 @@ class TestPlayContext(unittest.TestCase):
|
|||
self.assertEqual(play_context.no_log, None)
|
||||
|
||||
mock_play = MagicMock()
|
||||
mock_play.connection = 'mock'
|
||||
mock_play.remote_user = 'mock'
|
||||
mock_play.port = 1234
|
||||
mock_play.become = True
|
||||
mock_play.connection = 'mock'
|
||||
mock_play.remote_user = 'mock'
|
||||
mock_play.port = 1234
|
||||
mock_play.become = True
|
||||
mock_play.become_method = 'mock'
|
||||
mock_play.become_user = 'mockroot'
|
||||
mock_play.no_log = True
|
||||
mock_play.become_user = 'mockroot'
|
||||
mock_play.no_log = True
|
||||
|
||||
play_context = PlayContext(play=mock_play, options=options)
|
||||
self.assertEqual(play_context.connection, 'mock')
|
||||
|
@ -84,19 +84,19 @@ class TestPlayContext(unittest.TestCase):
|
|||
self.assertEqual(play_context.become_user, "mockroot")
|
||||
|
||||
mock_task = MagicMock()
|
||||
mock_task.connection = 'mocktask'
|
||||
mock_task.remote_user = 'mocktask'
|
||||
mock_task.no_log = mock_play.no_log
|
||||
mock_task.become = True
|
||||
mock_task.connection = 'mocktask'
|
||||
mock_task.remote_user = 'mocktask'
|
||||
mock_task.no_log = mock_play.no_log
|
||||
mock_task.become = True
|
||||
mock_task.become_method = 'mocktask'
|
||||
mock_task.become_user = 'mocktaskroot'
|
||||
mock_task.become_pass = 'mocktaskpass'
|
||||
mock_task.become_user = 'mocktaskroot'
|
||||
mock_task.become_pass = 'mocktaskpass'
|
||||
mock_task._local_action = False
|
||||
mock_task.delegate_to = None
|
||||
mock_task.delegate_to = None
|
||||
|
||||
all_vars = dict(
|
||||
ansible_connection = 'mock_inventory',
|
||||
ansible_ssh_port = 4321,
|
||||
ansible_connection='mock_inventory',
|
||||
ansible_ssh_port=4321,
|
||||
)
|
||||
|
||||
mock_templar = MagicMock()
|
||||
|
@ -112,7 +112,7 @@ class TestPlayContext(unittest.TestCase):
|
|||
self.assertEqual(play_context.become_user, "mocktaskroot")
|
||||
self.assertEqual(play_context.become_pass, "mocktaskpass")
|
||||
|
||||
mock_task.no_log = False
|
||||
mock_task.no_log = False
|
||||
play_context = play_context.set_task_and_variable_override(task=mock_task, variables=all_vars, templar=mock_templar)
|
||||
self.assertEqual(play_context.no_log, False)
|
||||
|
||||
|
@ -122,19 +122,19 @@ class TestPlayContext(unittest.TestCase):
|
|||
|
||||
default_cmd = "/bin/foo"
|
||||
default_exe = "/bin/bash"
|
||||
sudo_exe = C.DEFAULT_SUDO_EXE or 'sudo'
|
||||
sudo_flags = C.DEFAULT_SUDO_FLAGS
|
||||
su_exe = C.DEFAULT_SU_EXE or 'su'
|
||||
su_flags = C.DEFAULT_SU_FLAGS or ''
|
||||
pbrun_exe = 'pbrun'
|
||||
sudo_exe = C.DEFAULT_SUDO_EXE or 'sudo'
|
||||
sudo_flags = C.DEFAULT_SUDO_FLAGS
|
||||
su_exe = C.DEFAULT_SU_EXE or 'su'
|
||||
su_flags = C.DEFAULT_SU_FLAGS or ''
|
||||
pbrun_exe = 'pbrun'
|
||||
pbrun_flags = ''
|
||||
pfexec_exe = 'pfexec'
|
||||
pfexec_exe = 'pfexec'
|
||||
pfexec_flags = ''
|
||||
doas_exe = 'doas'
|
||||
doas_flags = ' -n -u foo '
|
||||
doas_exe = 'doas'
|
||||
doas_flags = ' -n -u foo '
|
||||
ksu_exe = 'ksu'
|
||||
ksu_flags = ''
|
||||
dzdo_exe = 'dzdo'
|
||||
dzdo_exe = 'dzdo'
|
||||
|
||||
cmd = play_context.make_become_cmd(cmd=default_cmd, executable=default_exe)
|
||||
self.assertEqual(cmd, default_cmd)
|
||||
|
@ -152,7 +152,7 @@ class TestPlayContext(unittest.TestCase):
|
|||
cmd = play_context.make_become_cmd(cmd=default_cmd, executable=default_exe)
|
||||
self.assertEqual(
|
||||
cmd,
|
||||
"""%s %s -p "%s" -u %s %s -c 'echo %s; %s'""" % (sudo_exe, sudo_flags.replace('-n',''), play_context.prompt, play_context.become_user, default_exe,
|
||||
"""%s %s -p "%s" -u %s %s -c 'echo %s; %s'""" % (sudo_exe, sudo_flags.replace('-n', ''), play_context.prompt, play_context.become_user, default_exe,
|
||||
play_context.success_key, default_cmd)
|
||||
)
|
||||
|
||||
|
@ -207,103 +207,129 @@ class TestPlayContext(unittest.TestCase):
|
|||
class TestTaskAndVariableOverrride(unittest.TestCase):
|
||||
|
||||
inventory_vars = (
|
||||
('preferred_names',
|
||||
dict(ansible_connection='local',
|
||||
ansible_user='ansibull',
|
||||
ansible_become_user='ansibull',
|
||||
ansible_become_method='su',
|
||||
ansible_become_pass='ansibullwuzhere',),
|
||||
dict(connection='local',
|
||||
remote_user='ansibull',
|
||||
become_user='ansibull',
|
||||
become_method='su',
|
||||
become_pass='ansibullwuzhere',)
|
||||
(
|
||||
'preferred_names',
|
||||
dict(
|
||||
ansible_connection='local',
|
||||
ansible_user='ansibull',
|
||||
ansible_become_user='ansibull',
|
||||
ansible_become_method='su',
|
||||
ansible_become_pass='ansibullwuzhere',
|
||||
),
|
||||
('alternate_names',
|
||||
dict(ansible_become_password='ansibullwuzhere',),
|
||||
dict(become_pass='ansibullwuzhere',)
|
||||
dict(
|
||||
connection='local',
|
||||
remote_user='ansibull',
|
||||
become_user='ansibull',
|
||||
become_method='su',
|
||||
become_pass='ansibullwuzhere',
|
||||
)
|
||||
),
|
||||
(
|
||||
'alternate_names',
|
||||
dict(ansible_become_password='ansibullwuzhere',),
|
||||
dict(become_pass='ansibullwuzhere',)
|
||||
),
|
||||
(
|
||||
'deprecated_names',
|
||||
dict(
|
||||
ansible_ssh_user='ansibull',
|
||||
ansible_sudo_user='ansibull',
|
||||
ansible_sudo_pass='ansibullwuzhere',
|
||||
),
|
||||
('deprecated_names',
|
||||
dict(ansible_ssh_user='ansibull',
|
||||
ansible_sudo_user='ansibull',
|
||||
ansible_sudo_pass='ansibullwuzhere',),
|
||||
dict(remote_user='ansibull',
|
||||
become_method='sudo',
|
||||
become_user='ansibull',
|
||||
become_pass='ansibullwuzhere',)
|
||||
dict(
|
||||
remote_user='ansibull',
|
||||
become_method='sudo',
|
||||
become_user='ansibull',
|
||||
become_pass='ansibullwuzhere',
|
||||
)
|
||||
),
|
||||
(
|
||||
'deprecated_names2',
|
||||
dict(
|
||||
ansible_ssh_user='ansibull',
|
||||
ansible_su_user='ansibull',
|
||||
ansible_su_pass='ansibullwuzhere',
|
||||
),
|
||||
('deprecated_names2',
|
||||
dict(ansible_ssh_user='ansibull',
|
||||
ansible_su_user='ansibull',
|
||||
ansible_su_pass='ansibullwuzhere',),
|
||||
dict(remote_user='ansibull',
|
||||
become_method='su',
|
||||
become_user='ansibull',
|
||||
become_pass='ansibullwuzhere',)
|
||||
),
|
||||
('deprecated_alt_names',
|
||||
dict(ansible_sudo_password='ansibullwuzhere',),
|
||||
dict(become_method='sudo',
|
||||
become_pass='ansibullwuzhere',)
|
||||
),
|
||||
('deprecated_alt_names2',
|
||||
dict(ansible_su_password='ansibullwuzhere',),
|
||||
dict(become_method='su',
|
||||
become_pass='ansibullwuzhere',)
|
||||
),
|
||||
('deprecated_and_preferred_names',
|
||||
dict(ansible_user='ansibull',
|
||||
ansible_ssh_user='badbull',
|
||||
ansible_become_user='ansibull',
|
||||
ansible_sudo_user='badbull',
|
||||
ansible_become_method='su',
|
||||
ansible_become_pass='ansibullwuzhere',
|
||||
ansible_sudo_pass='badbull',
|
||||
),
|
||||
dict(connection='local',
|
||||
remote_user='ansibull',
|
||||
become_user='ansibull',
|
||||
become_method='su',
|
||||
become_pass='ansibullwuzhere',)
|
||||
dict(
|
||||
remote_user='ansibull',
|
||||
become_method='su',
|
||||
become_user='ansibull',
|
||||
become_pass='ansibullwuzhere',
|
||||
)
|
||||
),
|
||||
(
|
||||
'deprecated_alt_names',
|
||||
dict(ansible_sudo_password='ansibullwuzhere',),
|
||||
dict(
|
||||
become_method='sudo',
|
||||
become_pass='ansibullwuzhere',
|
||||
)
|
||||
),
|
||||
(
|
||||
'deprecated_alt_names2',
|
||||
dict(ansible_su_password='ansibullwuzhere',),
|
||||
dict(
|
||||
become_method='su',
|
||||
become_pass='ansibullwuzhere',
|
||||
)
|
||||
),
|
||||
(
|
||||
'deprecated_and_preferred_names',
|
||||
dict(
|
||||
ansible_user='ansibull',
|
||||
ansible_ssh_user='badbull',
|
||||
ansible_become_user='ansibull',
|
||||
ansible_sudo_user='badbull',
|
||||
ansible_become_method='su',
|
||||
ansible_become_pass='ansibullwuzhere',
|
||||
ansible_sudo_pass='badbull',
|
||||
),
|
||||
dict(
|
||||
connection='local',
|
||||
remote_user='ansibull',
|
||||
become_user='ansibull',
|
||||
become_method='su',
|
||||
become_pass='ansibullwuzhere',
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
def setUp(self):
|
||||
parser = CLI.base_parser(
|
||||
runas_opts = True,
|
||||
meta_opts = True,
|
||||
runtask_opts = True,
|
||||
vault_opts = True,
|
||||
async_opts = True,
|
||||
connect_opts = True,
|
||||
subset_opts = True,
|
||||
check_opts = True,
|
||||
inventory_opts = True,
|
||||
runas_opts=True,
|
||||
meta_opts=True,
|
||||
runtask_opts=True,
|
||||
vault_opts=True,
|
||||
async_opts=True,
|
||||
connect_opts=True,
|
||||
subset_opts=True,
|
||||
check_opts=True,
|
||||
inventory_opts=True,
|
||||
)
|
||||
|
||||
(options, args) = parser.parse_args(['-vv', '--check'])
|
||||
|
||||
mock_play = MagicMock()
|
||||
mock_play.connection = 'mock'
|
||||
mock_play.remote_user = 'mock'
|
||||
mock_play.port = 1234
|
||||
mock_play.become = True
|
||||
mock_play.connection = 'mock'
|
||||
mock_play.remote_user = 'mock'
|
||||
mock_play.port = 1234
|
||||
mock_play.become = True
|
||||
mock_play.become_method = 'mock'
|
||||
mock_play.become_user = 'mockroot'
|
||||
mock_play.no_log = True
|
||||
mock_play.become_user = 'mockroot'
|
||||
mock_play.no_log = True
|
||||
|
||||
self.play_context = PlayContext(play=mock_play, options=options)
|
||||
|
||||
mock_task = MagicMock()
|
||||
mock_task.connection = mock_play.connection
|
||||
mock_task.remote_user = mock_play.remote_user
|
||||
mock_task.no_log = mock_play.no_log
|
||||
mock_task.become = mock_play.become
|
||||
mock_task.connection = mock_play.connection
|
||||
mock_task.remote_user = mock_play.remote_user
|
||||
mock_task.no_log = mock_play.no_log
|
||||
mock_task.become = mock_play.become
|
||||
mock_task.become_method = mock_play.becom_method
|
||||
mock_task.become_user = mock_play.become_user
|
||||
mock_task.become_pass = 'mocktaskpass'
|
||||
mock_task.become_user = mock_play.become_user
|
||||
mock_task.become_pass = 'mocktaskpass'
|
||||
mock_task._local_action = False
|
||||
mock_task.delegate_to = None
|
||||
mock_task.delegate_to = None
|
||||
|
||||
self.mock_task = mock_task
|
||||
|
||||
|
@ -322,19 +348,19 @@ class TestTaskAndVariableOverrride(unittest.TestCase):
|
|||
self.assertEqual(play_context.become_user, "mocktaskroot")
|
||||
self.assertEqual(play_context.become_pass, "mocktaskpass")
|
||||
|
||||
mock_task.no_log = False
|
||||
mock_task.no_log = False
|
||||
play_context = play_context.set_task_and_variable_override(task=mock_task, variables=all_vars, templar=mock_templar)
|
||||
self.assertEqual(play_context.no_log, False)
|
||||
|
||||
def test_override_magic_variables(self):
|
||||
play_context = play_context.set_task_and_variable_override(task=self.mock_task, variables=all_vars, templar=self.mock_templar)
|
||||
|
||||
mock_play.connection = 'mock'
|
||||
mock_play.remote_user = 'mock'
|
||||
mock_play.port = 1234
|
||||
mock_play.connection = 'mock'
|
||||
mock_play.remote_user = 'mock'
|
||||
mock_play.port = 1234
|
||||
mock_play.become_method = 'mock'
|
||||
mock_play.become_user = 'mockroot'
|
||||
mock_task.become_pass = 'mocktaskpass'
|
||||
mock_play.become_user = 'mockroot'
|
||||
mock_task.become_pass = 'mocktaskpass'
|
||||
# Inventory vars override things set from cli vars (--become, -user,
|
||||
# etc... [notably, not --extravars])
|
||||
for test_name, all_vars, expected in self.inventory_vars:
|
||||
|
|
|
@ -21,13 +21,13 @@ __metaclass__ = type
|
|||
|
||||
from ansible.compat.tests import unittest
|
||||
from ansible.compat.tests.mock import patch, MagicMock
|
||||
|
||||
from ansible.errors import AnsibleError, AnsibleParserError
|
||||
from ansible.playbook import Playbook
|
||||
from ansible.vars.manager import VariableManager
|
||||
|
||||
from units.mock.loader import DictDataLoader
|
||||
|
||||
|
||||
class TestPlaybook(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
|
@ -42,7 +42,7 @@ class TestPlaybook(unittest.TestCase):
|
|||
|
||||
def test_basic_playbook(self):
|
||||
fake_loader = DictDataLoader({
|
||||
"test_file.yml":"""
|
||||
"test_file.yml": """
|
||||
- hosts: all
|
||||
""",
|
||||
})
|
||||
|
|
|
@ -23,6 +23,7 @@ from ansible.compat.tests import unittest
|
|||
from ansible.playbook.taggable import Taggable
|
||||
from units.mock.loader import DictDataLoader
|
||||
|
||||
|
||||
class TaggableTestObj(Taggable):
|
||||
|
||||
def __init__(self):
|
||||
|
|
|
@ -19,18 +19,20 @@
|
|||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
from ansible.playbook.task import Task
|
||||
from ansible.compat.tests import unittest
|
||||
from ansible.playbook.task import Task
|
||||
|
||||
|
||||
basic_shell_task = dict(
|
||||
name = 'Test Task',
|
||||
shell = 'echo hi'
|
||||
name='Test Task',
|
||||
shell='echo hi'
|
||||
)
|
||||
|
||||
kv_shell_task = dict(
|
||||
action = 'shell echo hi'
|
||||
action='shell echo hi'
|
||||
)
|
||||
|
||||
|
||||
class TestTask(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
|
@ -66,7 +68,7 @@ class TestTask(unittest.TestCase):
|
|||
def test_task_auto_name(self):
|
||||
assert 'name' not in kv_shell_task
|
||||
t = Task.load(kv_shell_task)
|
||||
#self.assertEqual(t.name, 'shell echo hi')
|
||||
# self.assertEqual(t.name, 'shell echo hi')
|
||||
|
||||
def test_task_auto_name_with_role(self):
|
||||
pass
|
||||
|
|
|
@ -436,10 +436,19 @@ class TestActionBase(unittest.TestCase):
|
|||
action_base._make_tmp_path.return_value = '/the/tmp/path'
|
||||
action_base._low_level_execute_command.return_value = dict(stdout='{"rc": 0, "stdout": "ok"}')
|
||||
self.assertEqual(action_base._execute_module(module_name=None, module_args=None), dict(_ansible_parsed=True, rc=0, stdout="ok", stdout_lines=['ok']))
|
||||
self.assertEqual(action_base._execute_module(module_name='foo',
|
||||
module_args=dict(z=9, y=8, x=7), task_vars=dict(a=1)),
|
||||
dict(_ansible_parsed=True, rc=0, stdout="ok",
|
||||
stdout_lines=['ok']))
|
||||
self.assertEqual(
|
||||
action_base._execute_module(
|
||||
module_name='foo',
|
||||
module_args=dict(z=9, y=8, x=7),
|
||||
task_vars=dict(a=1)
|
||||
),
|
||||
dict(
|
||||
_ansible_parsed=True,
|
||||
rc=0,
|
||||
stdout="ok",
|
||||
stdout_lines=['ok'],
|
||||
)
|
||||
)
|
||||
|
||||
# test with needing/removing a remote tmp path
|
||||
action_base._configure_module.return_value = ('old', '#!/usr/bin/python', 'this is the module data', 'path')
|
||||
|
@ -525,9 +534,9 @@ class TestActionBaseCleanReturnedData(unittest.TestCase):
|
|||
|
||||
mock_shared_loader_obj.connection_loader = mock_connection_loader
|
||||
mock_connection = MagicMock()
|
||||
#mock_connection._shell.env_prefix.side_effect = env_prefix
|
||||
# mock_connection._shell.env_prefix.side_effect = env_prefix
|
||||
|
||||
#action_base = DerivedActionBase(mock_task, mock_connection, play_context, None, None, None)
|
||||
# action_base = DerivedActionBase(mock_task, mock_connection, play_context, None, None, None)
|
||||
action_base = DerivedActionBase(task=None,
|
||||
connection=mock_connection,
|
||||
play_context=None,
|
||||
|
@ -535,7 +544,7 @@ class TestActionBaseCleanReturnedData(unittest.TestCase):
|
|||
templar=None,
|
||||
shared_loader_obj=mock_shared_loader_obj)
|
||||
data = {'ansible_playbook_python': '/usr/bin/python',
|
||||
#'ansible_rsync_path': '/usr/bin/rsync',
|
||||
# 'ansible_rsync_path': '/usr/bin/rsync',
|
||||
'ansible_python_interpreter': '/usr/bin/python',
|
||||
'ansible_ssh_some_var': 'whatever',
|
||||
'ansible_ssh_host_key_somehost': 'some key here',
|
||||
|
@ -592,7 +601,7 @@ class TestActionBaseParseReturnedData(unittest.TestCase):
|
|||
'stdout_lines': stdout.splitlines(),
|
||||
'stderr': err}
|
||||
res = action_base._parse_returned_data(returned_data)
|
||||
del res['_ansible_parsed'] # we always have _ansible_parsed
|
||||
del res['_ansible_parsed'] # we always have _ansible_parsed
|
||||
self.assertEqual(len(res), 0)
|
||||
self.assertFalse(res)
|
||||
|
||||
|
@ -610,7 +619,7 @@ class TestActionBaseParseReturnedData(unittest.TestCase):
|
|||
self.assertTrue(res['ansible_facts'])
|
||||
self.assertIn('ansible_blip', res['ansible_facts'])
|
||||
# TODO: Should this be an AnsibleUnsafe?
|
||||
#self.assertIsInstance(res['ansible_facts'], AnsibleUnsafe)
|
||||
# self.assertIsInstance(res['ansible_facts'], AnsibleUnsafe)
|
||||
|
||||
def test_json_facts_add_host(self):
|
||||
action_base = self._action_base()
|
||||
|
@ -630,4 +639,4 @@ class TestActionBaseParseReturnedData(unittest.TestCase):
|
|||
self.assertIn('ansible_blip', res['ansible_facts'])
|
||||
self.assertIn('add_host', res)
|
||||
# TODO: Should this be an AnsibleUnsafe?
|
||||
#self.assertIsInstance(res['ansible_facts'], AnsibleUnsafe)
|
||||
# self.assertIsInstance(res['ansible_facts'], AnsibleUnsafe)
|
||||
|
|
|
@ -24,6 +24,7 @@ from ansible.compat.tests.mock import patch, MagicMock, Mock
|
|||
from ansible.plugins.action.raw import ActionModule
|
||||
from ansible.playbook.task import Task
|
||||
|
||||
|
||||
class TestCopyResultExclude(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
|
@ -38,7 +39,6 @@ class TestCopyResultExclude(unittest.TestCase):
|
|||
# Issue: https://github.com/ansible/ansible/issues/16054
|
||||
# PR: https://github.com/ansible/ansible/pull/16085
|
||||
|
||||
|
||||
def test_raw_executable_is_not_empty_string(self):
|
||||
|
||||
play_context = Mock()
|
||||
|
@ -50,7 +50,7 @@ class TestCopyResultExclude(unittest.TestCase):
|
|||
play_context.check_mode = False
|
||||
|
||||
self.mock_am = ActionModule(task, connection, play_context, loader=None, templar=None, shared_loader_obj=None)
|
||||
self.mock_am._low_level_execute_command = Mock(return_value = {})
|
||||
self.mock_am._low_level_execute_command = Mock(return_value={})
|
||||
self.mock_am.display = Mock()
|
||||
|
||||
self.mock_am.run()
|
||||
|
@ -83,7 +83,7 @@ class TestCopyResultExclude(unittest.TestCase):
|
|||
play_context.check_mode = False
|
||||
|
||||
self.mock_am = ActionModule(task, connection, play_context, loader=None, templar=None, shared_loader_obj=None)
|
||||
self.mock_am._low_level_execute_command = Mock(return_value = {})
|
||||
self.mock_am._low_level_execute_command = Mock(return_value={})
|
||||
self.mock_am.display = Mock()
|
||||
|
||||
self.assertEqual(task.environment, None)
|
||||
|
@ -100,10 +100,8 @@ class TestCopyResultExclude(unittest.TestCase):
|
|||
play_context.check_mode = False
|
||||
|
||||
self.mock_am = ActionModule(task, connection, play_context, loader=None, templar=None, shared_loader_obj=None)
|
||||
self.mock_am._low_level_execute_command = Mock(return_value = {})
|
||||
self.mock_am._low_level_execute_command = Mock(return_value={})
|
||||
self.mock_am.display = Mock()
|
||||
|
||||
self.mock_am.run(task_vars={'a': 'b'})
|
||||
self.assertEqual(task.environment, None)
|
||||
|
||||
|
||||
|
|
|
@ -17,11 +17,12 @@ import unittest
|
|||
import yaml
|
||||
|
||||
from pprint import pprint
|
||||
from ansible import plugins
|
||||
|
||||
import ansible.plugins
|
||||
from ansible.compat.tests.mock import patch, MagicMock
|
||||
from ansible.plugins.action.synchronize import ActionModule
|
||||
|
||||
|
||||
# Getting the incoming and outgoing task vars from the plugin's run method
|
||||
|
||||
'''
|
||||
|
@ -41,8 +42,6 @@ with open('task_vars.json', 'wb') as f:
|
|||
'''
|
||||
|
||||
|
||||
|
||||
|
||||
class TaskMock(object):
|
||||
args = {'src': u'/tmp/deleteme',
|
||||
'dest': '/tmp/deleteme',
|
||||
|
@ -52,16 +51,19 @@ class TaskMock(object):
|
|||
become_user = None
|
||||
become_method = None
|
||||
|
||||
|
||||
class StdinMock(object):
|
||||
shell = None
|
||||
|
||||
|
||||
class ConnectionMock(object):
|
||||
ismock = True
|
||||
_play_context = None
|
||||
#transport = 'ssh'
|
||||
# transport = 'ssh'
|
||||
transport = None
|
||||
_new_stdin = StdinMock()
|
||||
|
||||
|
||||
class PlayContextMock(object):
|
||||
shell = None
|
||||
private_key_file = None
|
||||
|
@ -75,13 +77,16 @@ class PlayContextMock(object):
|
|||
remote_user = None
|
||||
password = None
|
||||
|
||||
|
||||
class ModuleLoaderMock(object):
|
||||
def find_plugin(self, module_name, mod_type):
|
||||
pass
|
||||
|
||||
|
||||
class SharedLoaderMock(object):
|
||||
module_loader = ModuleLoaderMock()
|
||||
|
||||
|
||||
class SynchronizeTester(object):
|
||||
|
||||
''' A wrapper for mocking out synchronize environments '''
|
||||
|
@ -96,7 +101,6 @@ class SynchronizeTester(object):
|
|||
final_task_vars = None
|
||||
execute_called = False
|
||||
|
||||
|
||||
def _execute_module(self, module_name, module_args=None, task_vars=None):
|
||||
self.execute_called = True
|
||||
self.final_module_args = module_args
|
||||
|
@ -114,7 +118,7 @@ class SynchronizeTester(object):
|
|||
if '_play_context' in test_meta:
|
||||
if test_meta['_play_context']:
|
||||
self.task.args = {}
|
||||
for k,v in test_meta['_play_context'].items():
|
||||
for (k, v) in test_meta['_play_context'].items():
|
||||
if v == 'None':
|
||||
v = None
|
||||
setattr(self._play_context, k, v)
|
||||
|
@ -123,8 +127,8 @@ class SynchronizeTester(object):
|
|||
if '_task' in test_meta:
|
||||
if test_meta['_task']:
|
||||
self.task.args = {}
|
||||
for k,v in test_meta['_task'].items():
|
||||
#import epdb; epdb.st()
|
||||
for (k, v) in test_meta['_task'].items():
|
||||
# import epdb; epdb.st()
|
||||
if v == 'None':
|
||||
v = None
|
||||
setattr(self.task, k, v)
|
||||
|
@ -133,32 +137,30 @@ class SynchronizeTester(object):
|
|||
if 'task_args' in test_meta:
|
||||
if test_meta['task_args']:
|
||||
self.task.args = {}
|
||||
for k,v in test_meta['task_args'].items():
|
||||
for (k, v) in test_meta['task_args'].items():
|
||||
self.task.args[k] = v
|
||||
|
||||
# load inital task vars
|
||||
invarspath = os.path.join(fixturepath,
|
||||
test_meta.get('fixtures', {}).get('taskvars_in', 'taskvars_in.json'))
|
||||
invarspath = os.path.join(fixturepath, test_meta.get('fixtures', {}).get('taskvars_in', 'taskvars_in.json'))
|
||||
with open(invarspath, 'rb') as f:
|
||||
fdata = f.read()
|
||||
fdata = fdata.decode("utf-8")
|
||||
in_task_vars = json.loads(fdata)
|
||||
|
||||
# load expected final task vars
|
||||
outvarspath = os.path.join(fixturepath,
|
||||
test_meta.get('fixtures', {}).get('taskvars_out', 'taskvars_out.json'))
|
||||
outvarspath = os.path.join(fixturepath, test_meta.get('fixtures', {}).get('taskvars_out', 'taskvars_out.json'))
|
||||
with open(outvarspath, 'rb') as f:
|
||||
fdata = f.read()
|
||||
fdata = fdata.decode("utf-8")
|
||||
out_task_vars = json.loads(fdata)
|
||||
|
||||
# fixup the connection
|
||||
for k,v in test_meta['connection'].items():
|
||||
for (k, v) in test_meta['connection'].items():
|
||||
setattr(self.connection, k, v)
|
||||
|
||||
# fixup the hostvars
|
||||
if test_meta['hostvars']:
|
||||
for k,v in test_meta['hostvars'].items():
|
||||
for (k, v) in test_meta['hostvars'].items():
|
||||
in_task_vars['hostvars'][k] = v
|
||||
|
||||
# initalize and run the module
|
||||
|
@ -170,9 +172,9 @@ class SynchronizeTester(object):
|
|||
# run assertions
|
||||
for check in test_meta['asserts']:
|
||||
value = eval(check)
|
||||
#if not value:
|
||||
# print(check, value)
|
||||
# import epdb; epdb.st()
|
||||
# if not value:
|
||||
# print(check, value)
|
||||
# import epdb; epdb.st()
|
||||
assert value, check
|
||||
|
||||
|
||||
|
@ -190,54 +192,52 @@ class FakePluginLoader(object):
|
|||
|
||||
class TestSynchronizeAction(unittest.TestCase):
|
||||
|
||||
|
||||
fixturedir = os.path.dirname(__file__)
|
||||
fixturedir = os.path.join(fixturedir, 'fixtures', 'synchronize')
|
||||
#print(basedir)
|
||||
|
||||
# print(basedir)
|
||||
|
||||
@patch('ansible.plugins.action.synchronize.connection_loader', FakePluginLoader)
|
||||
def test_basic(self):
|
||||
x = SynchronizeTester()
|
||||
x.runtest(fixturepath=os.path.join(self.fixturedir,'basic'))
|
||||
x.runtest(fixturepath=os.path.join(self.fixturedir, 'basic'))
|
||||
|
||||
@patch('ansible.plugins.action.synchronize.connection_loader', FakePluginLoader)
|
||||
def test_basic_become(self):
|
||||
x = SynchronizeTester()
|
||||
x.runtest(fixturepath=os.path.join(self.fixturedir,'basic_become'))
|
||||
x.runtest(fixturepath=os.path.join(self.fixturedir, 'basic_become'))
|
||||
|
||||
@patch('ansible.plugins.action.synchronize.connection_loader', FakePluginLoader)
|
||||
def test_basic_become_cli(self):
|
||||
# --become on the cli sets _play_context.become
|
||||
x = SynchronizeTester()
|
||||
x.runtest(fixturepath=os.path.join(self.fixturedir,'basic_become_cli'))
|
||||
x.runtest(fixturepath=os.path.join(self.fixturedir, 'basic_become_cli'))
|
||||
|
||||
@patch('ansible.plugins.action.synchronize.connection_loader', FakePluginLoader)
|
||||
def test_basic_vagrant(self):
|
||||
# simple vagrant example
|
||||
x = SynchronizeTester()
|
||||
x.runtest(fixturepath=os.path.join(self.fixturedir,'basic_vagrant'))
|
||||
x.runtest(fixturepath=os.path.join(self.fixturedir, 'basic_vagrant'))
|
||||
|
||||
@patch('ansible.plugins.action.synchronize.connection_loader', FakePluginLoader)
|
||||
def test_basic_vagrant_sudo(self):
|
||||
# vagrant plus sudo
|
||||
x = SynchronizeTester()
|
||||
x.runtest(fixturepath=os.path.join(self.fixturedir,'basic_vagrant_sudo'))
|
||||
x.runtest(fixturepath=os.path.join(self.fixturedir, 'basic_vagrant_sudo'))
|
||||
|
||||
@patch('ansible.plugins.action.synchronize.connection_loader', FakePluginLoader)
|
||||
def test_basic_vagrant_become_cli(self):
|
||||
# vagrant plus sudo
|
||||
x = SynchronizeTester()
|
||||
x.runtest(fixturepath=os.path.join(self.fixturedir,'basic_vagrant_become_cli'))
|
||||
x.runtest(fixturepath=os.path.join(self.fixturedir, 'basic_vagrant_become_cli'))
|
||||
|
||||
@patch('ansible.plugins.action.synchronize.connection_loader', FakePluginLoader)
|
||||
def test_delegate_remote(self):
|
||||
# delegate to other remote host
|
||||
x = SynchronizeTester()
|
||||
x.runtest(fixturepath=os.path.join(self.fixturedir,'delegate_remote'))
|
||||
x.runtest(fixturepath=os.path.join(self.fixturedir, 'delegate_remote'))
|
||||
|
||||
@patch('ansible.plugins.action.synchronize.connection_loader', FakePluginLoader)
|
||||
def test_delegate_remote_su(self):
|
||||
# delegate to other remote host with su enabled
|
||||
x = SynchronizeTester()
|
||||
x.runtest(fixturepath=os.path.join(self.fixturedir,'delegate_remote_su'))
|
||||
x.runtest(fixturepath=os.path.join(self.fixturedir, 'delegate_remote_su'))
|
||||
|
|
|
@ -25,19 +25,18 @@ from ansible.compat.tests import mock
|
|||
from ansible.compat.tests import unittest
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.playbook.play_context import PlayContext
|
||||
|
||||
from ansible.plugins.connection import ConnectionBase
|
||||
#from ansible.plugins.connection.accelerate import Connection as AccelerateConnection
|
||||
#from ansible.plugins.connection.chroot import Connection as ChrootConnection
|
||||
#from ansible.plugins.connection.funcd import Connection as FuncdConnection
|
||||
#from ansible.plugins.connection.jail import Connection as JailConnection
|
||||
#from ansible.plugins.connection.libvirt_lxc import Connection as LibvirtLXCConnection
|
||||
# from ansible.plugins.connection.accelerate import Connection as AccelerateConnection
|
||||
# from ansible.plugins.connection.chroot import Connection as ChrootConnection
|
||||
# from ansible.plugins.connection.funcd import Connection as FuncdConnection
|
||||
# from ansible.plugins.connection.jail import Connection as JailConnection
|
||||
# from ansible.plugins.connection.libvirt_lxc import Connection as LibvirtLXCConnection
|
||||
from ansible.plugins.connection.lxc import Connection as LxcConnection
|
||||
from ansible.plugins.connection.local import Connection as LocalConnection
|
||||
from ansible.plugins.connection.paramiko_ssh import Connection as ParamikoConnection
|
||||
from ansible.plugins.connection.ssh import Connection as SSHConnection
|
||||
from ansible.plugins.connection.docker import Connection as DockerConnection
|
||||
#from ansible.plugins.connection.winrm import Connection as WinRmConnection
|
||||
# from ansible.plugins.connection.winrm import Connection as WinRmConnection
|
||||
from ansible.plugins.connection.network_cli import Connection as NetworkCliConnection
|
||||
|
||||
|
||||
|
@ -68,19 +67,26 @@ class TestConnectionBaseClass(unittest.TestCase):
|
|||
|
||||
def test_subclass_success(self):
|
||||
class ConnectionModule3(ConnectionBase):
|
||||
|
||||
@property
|
||||
def transport(self):
|
||||
pass
|
||||
|
||||
def _connect(self):
|
||||
pass
|
||||
|
||||
def exec_command(self):
|
||||
pass
|
||||
|
||||
def put_file(self):
|
||||
pass
|
||||
|
||||
def fetch_file(self):
|
||||
pass
|
||||
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
self.assertIsInstance(ConnectionModule3(self.play_context, self.in_stream), ConnectionModule3)
|
||||
|
||||
# def test_accelerate_connection_module(self):
|
||||
|
@ -190,17 +196,23 @@ debug1: Sending command: /bin/sh -c 'sudo -H -S -p "[sudo via ansible, key=ouzm
|
|||
'''
|
||||
|
||||
class ConnectionFoo(ConnectionBase):
|
||||
|
||||
@property
|
||||
def transport(self):
|
||||
pass
|
||||
|
||||
def _connect(self):
|
||||
pass
|
||||
|
||||
def exec_command(self):
|
||||
pass
|
||||
|
||||
def put_file(self):
|
||||
pass
|
||||
|
||||
def fetch_file(self):
|
||||
pass
|
||||
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
|
|
|
@ -28,7 +28,6 @@ from io import StringIO
|
|||
|
||||
from ansible.compat.tests import unittest
|
||||
from ansible.compat.tests.mock import patch, MagicMock, PropertyMock
|
||||
|
||||
from ansible.errors import AnsibleConnectionFailure
|
||||
from ansible.playbook.play_context import PlayContext
|
||||
|
||||
|
@ -38,6 +37,7 @@ builtin_import = __import__
|
|||
|
||||
mock_ncclient = MagicMock(name='ncclient')
|
||||
|
||||
|
||||
def import_mock(name, *args):
|
||||
if name.startswith('ncclient'):
|
||||
return mock_ncclient
|
||||
|
@ -50,6 +50,7 @@ else:
|
|||
with patch('__builtin__.__import__', side_effect=import_mock):
|
||||
from ansible.plugins.connection import netconf
|
||||
|
||||
|
||||
class TestNetconfConnectionClass(unittest.TestCase):
|
||||
|
||||
def test_netconf_init(self):
|
||||
|
@ -117,5 +118,3 @@ class TestNetconfConnectionClass(unittest.TestCase):
|
|||
self.assertEqual(1, rc)
|
||||
self.assertEqual('', out)
|
||||
self.assertEqual('unable to parse request', err)
|
||||
|
||||
|
||||
|
|
|
@ -143,7 +143,6 @@ class TestConnectionClass(unittest.TestCase):
|
|||
self.assertFalse(mock_open_shell.called)
|
||||
mock_send.assert_called_with({'command': b'command'})
|
||||
|
||||
|
||||
def test_network_cli_send(self):
|
||||
pc = PlayContext()
|
||||
new_stdin = StringIO()
|
||||
|
|
|
@ -21,19 +21,18 @@ from __future__ import (absolute_import, division, print_function)
|
|||
__metaclass__ = type
|
||||
|
||||
from io import StringIO
|
||||
|
||||
import pytest
|
||||
|
||||
from ansible.compat.tests import unittest
|
||||
from ansible.compat.tests.mock import patch, MagicMock, PropertyMock
|
||||
|
||||
from ansible import constants as C
|
||||
from ansible.compat.selectors import SelectorKey, EVENT_READ
|
||||
from ansible.compat.tests import unittest
|
||||
from ansible.compat.tests.mock import patch, MagicMock, PropertyMock
|
||||
from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleFileNotFound
|
||||
from ansible.module_utils.six.moves import shlex_quote
|
||||
from ansible.module_utils._text import to_bytes
|
||||
from ansible.playbook.play_context import PlayContext
|
||||
from ansible.plugins.connection import ssh
|
||||
from ansible.module_utils._text import to_bytes
|
||||
|
||||
|
||||
class TestConnectionBaseClass(unittest.TestCase):
|
||||
|
@ -91,10 +90,10 @@ class TestConnectionBaseClass(unittest.TestCase):
|
|||
|
||||
conn = ssh.Connection(pc, new_stdin)
|
||||
|
||||
conn.check_password_prompt = MagicMock()
|
||||
conn.check_become_success = MagicMock()
|
||||
conn.check_password_prompt = MagicMock()
|
||||
conn.check_become_success = MagicMock()
|
||||
conn.check_incorrect_password = MagicMock()
|
||||
conn.check_missing_password = MagicMock()
|
||||
conn.check_missing_password = MagicMock()
|
||||
|
||||
def _check_password_prompt(line):
|
||||
if b'foo' in line:
|
||||
|
@ -116,17 +115,17 @@ class TestConnectionBaseClass(unittest.TestCase):
|
|||
return True
|
||||
return False
|
||||
|
||||
conn.check_password_prompt.side_effect = _check_password_prompt
|
||||
conn.check_become_success.side_effect = _check_become_success
|
||||
conn.check_password_prompt.side_effect = _check_password_prompt
|
||||
conn.check_become_success.side_effect = _check_become_success
|
||||
conn.check_incorrect_password.side_effect = _check_incorrect_password
|
||||
conn.check_missing_password.side_effect = _check_missing_password
|
||||
conn.check_missing_password.side_effect = _check_missing_password
|
||||
|
||||
# test examining output for prompt
|
||||
conn._flags = dict(
|
||||
become_prompt = False,
|
||||
become_success = False,
|
||||
become_error = False,
|
||||
become_nopasswd_error = False,
|
||||
become_prompt=False,
|
||||
become_success=False,
|
||||
become_error=False,
|
||||
become_nopasswd_error=False,
|
||||
)
|
||||
|
||||
pc.prompt = True
|
||||
|
@ -140,10 +139,10 @@ class TestConnectionBaseClass(unittest.TestCase):
|
|||
|
||||
# test examining output for become prompt
|
||||
conn._flags = dict(
|
||||
become_prompt = False,
|
||||
become_success = False,
|
||||
become_error = False,
|
||||
become_nopasswd_error = False,
|
||||
become_prompt=False,
|
||||
become_success=False,
|
||||
become_error=False,
|
||||
become_nopasswd_error=False,
|
||||
)
|
||||
|
||||
pc.prompt = False
|
||||
|
@ -158,10 +157,10 @@ class TestConnectionBaseClass(unittest.TestCase):
|
|||
|
||||
# test examining output for become failure
|
||||
conn._flags = dict(
|
||||
become_prompt = False,
|
||||
become_success = False,
|
||||
become_error = False,
|
||||
become_nopasswd_error = False,
|
||||
become_prompt=False,
|
||||
become_success=False,
|
||||
become_error=False,
|
||||
become_nopasswd_error=False,
|
||||
)
|
||||
|
||||
pc.prompt = False
|
||||
|
@ -176,10 +175,10 @@ class TestConnectionBaseClass(unittest.TestCase):
|
|||
|
||||
# test examining output for missing password
|
||||
conn._flags = dict(
|
||||
become_prompt = False,
|
||||
become_success = False,
|
||||
become_error = False,
|
||||
become_nopasswd_error = False,
|
||||
become_prompt=False,
|
||||
become_success=False,
|
||||
become_error=False,
|
||||
become_nopasswd_error=False,
|
||||
)
|
||||
|
||||
pc.prompt = False
|
||||
|
@ -236,8 +235,8 @@ class TestConnectionBaseClass(unittest.TestCase):
|
|||
conn._run.assert_called_with('some command to run', expected_in_data, checkrc=False)
|
||||
|
||||
expected_in_data = b' '.join((b'put',
|
||||
to_bytes(shlex_quote('/path/to/in/file/with/unicode-fö〩')),
|
||||
to_bytes(shlex_quote('/path/to/dest/file/with/unicode-fö〩')))) + b'\n'
|
||||
to_bytes(shlex_quote('/path/to/in/file/with/unicode-fö〩')),
|
||||
to_bytes(shlex_quote('/path/to/dest/file/with/unicode-fö〩')))) + b'\n'
|
||||
conn.put_file(u'/path/to/in/file/with/unicode-fö〩', u'/path/to/dest/file/with/unicode-fö〩')
|
||||
conn._run.assert_called_with('some command to run', expected_in_data, checkrc=False)
|
||||
|
||||
|
@ -292,8 +291,8 @@ class TestConnectionBaseClass(unittest.TestCase):
|
|||
conn._run.assert_called_with('some command to run', expected_in_data, checkrc=False)
|
||||
|
||||
expected_in_data = b' '.join((b'get',
|
||||
to_bytes(shlex_quote('/path/to/in/file/with/unicode-fö〩')),
|
||||
to_bytes(shlex_quote('/path/to/dest/file/with/unicode-fö〩')))) + b'\n'
|
||||
to_bytes(shlex_quote('/path/to/in/file/with/unicode-fö〩')),
|
||||
to_bytes(shlex_quote('/path/to/dest/file/with/unicode-fö〩')))) + b'\n'
|
||||
conn.fetch_file(u'/path/to/in/file/with/unicode-fö〩', u'/path/to/dest/file/with/unicode-fö〩')
|
||||
conn._run.assert_called_with('some command to run', expected_in_data, checkrc=False)
|
||||
|
||||
|
|
|
@ -29,26 +29,31 @@ class TestINILookup(unittest.TestCase):
|
|||
# Currently there isn't a new-style
|
||||
old_style_params_data = (
|
||||
# Simple case
|
||||
dict(term=u'keyA section=sectionA file=/path/to/file',
|
||||
expected=[u'keyA', u'section=sectionA', u'file=/path/to/file'],
|
||||
),
|
||||
dict(term=u'keyB section=sectionB with space file=/path/with/embedded spaces and/file',
|
||||
expected=[u'keyB', u'section=sectionB with space', u'file=/path/with/embedded spaces and/file'],
|
||||
),
|
||||
dict(term=u'keyC section=sectionC file=/path/with/equals/cn=com.ansible',
|
||||
expected=[u'keyC', u'section=sectionC', u'file=/path/with/equals/cn=com.ansible'],
|
||||
),
|
||||
dict(term=u'keyD section=sectionD file=/path/with space and/equals/cn=com.ansible',
|
||||
expected=[u'keyD', u'section=sectionD', u'file=/path/with space and/equals/cn=com.ansible'],
|
||||
),
|
||||
dict(term=u'keyE section=sectionE file=/path/with/unicode/くらとみ/file',
|
||||
expected=[u'keyE', u'section=sectionE', u'file=/path/with/unicode/くらとみ/file'],
|
||||
),
|
||||
dict(term=u'keyF section=sectionF file=/path/with/utf 8 and spaces/くらとみ/file',
|
||||
expected=[u'keyF', u'section=sectionF', u'file=/path/with/utf 8 and spaces/くらとみ/file'],
|
||||
),
|
||||
)
|
||||
|
||||
dict(
|
||||
term=u'keyA section=sectionA file=/path/to/file',
|
||||
expected=[u'keyA', u'section=sectionA', u'file=/path/to/file'],
|
||||
),
|
||||
dict(
|
||||
term=u'keyB section=sectionB with space file=/path/with/embedded spaces and/file',
|
||||
expected=[u'keyB', u'section=sectionB with space', u'file=/path/with/embedded spaces and/file'],
|
||||
),
|
||||
dict(
|
||||
term=u'keyC section=sectionC file=/path/with/equals/cn=com.ansible',
|
||||
expected=[u'keyC', u'section=sectionC', u'file=/path/with/equals/cn=com.ansible'],
|
||||
),
|
||||
dict(
|
||||
term=u'keyD section=sectionD file=/path/with space and/equals/cn=com.ansible',
|
||||
expected=[u'keyD', u'section=sectionD', u'file=/path/with space and/equals/cn=com.ansible'],
|
||||
),
|
||||
dict(
|
||||
term=u'keyE section=sectionE file=/path/with/unicode/くらとみ/file',
|
||||
expected=[u'keyE', u'section=sectionE', u'file=/path/with/unicode/くらとみ/file'],
|
||||
),
|
||||
dict(
|
||||
term=u'keyF section=sectionF file=/path/with/utf 8 and spaces/くらとみ/file',
|
||||
expected=[u'keyF', u'section=sectionF', u'file=/path/with/utf 8 and spaces/くらとみ/file'],
|
||||
),
|
||||
)
|
||||
|
||||
def setUp(self):
|
||||
pass
|
||||
|
@ -58,6 +63,6 @@ class TestINILookup(unittest.TestCase):
|
|||
|
||||
def test_parse_parameters(self):
|
||||
for testcase in self.old_style_params_data:
|
||||
#print(testcase)
|
||||
# print(testcase)
|
||||
params = _parse_params(testcase['term'])
|
||||
self.assertEqual(params, testcase['expected'])
|
||||
|
|
|
@ -22,6 +22,7 @@ __metaclass__ = type
|
|||
|
||||
import passlib
|
||||
from passlib.handlers import pbkdf2
|
||||
from units.mock.loader import DictDataLoader
|
||||
|
||||
from ansible.compat.tests import unittest
|
||||
from ansible.compat.tests.mock import mock_open, patch
|
||||
|
@ -29,11 +30,8 @@ from ansible.errors import AnsibleError
|
|||
from ansible.module_utils.six import text_type
|
||||
from ansible.module_utils.six.moves import builtins
|
||||
from ansible.plugins import PluginLoader
|
||||
from ansible.utils import encrypt
|
||||
|
||||
from units.mock.loader import DictDataLoader
|
||||
|
||||
from ansible.plugins.lookup import password
|
||||
from ansible.utils import encrypt
|
||||
|
||||
|
||||
DEFAULT_CHARS = sorted([u'ascii_letters', u'digits', u".,:-_"])
|
||||
|
@ -42,128 +40,149 @@ DEFAULT_CANDIDATE_CHARS = u'.,:-_abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTU
|
|||
# Currently there isn't a new-style
|
||||
old_style_params_data = (
|
||||
# Simple case
|
||||
dict(term=u'/path/to/file',
|
||||
filename=u'/path/to/file',
|
||||
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=DEFAULT_CHARS),
|
||||
candidate_chars=DEFAULT_CANDIDATE_CHARS,
|
||||
),
|
||||
dict(
|
||||
term=u'/path/to/file',
|
||||
filename=u'/path/to/file',
|
||||
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=DEFAULT_CHARS),
|
||||
candidate_chars=DEFAULT_CANDIDATE_CHARS,
|
||||
),
|
||||
|
||||
# Special characters in path
|
||||
dict(term=u'/path/with/embedded spaces and/file',
|
||||
filename=u'/path/with/embedded spaces and/file',
|
||||
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=DEFAULT_CHARS),
|
||||
candidate_chars=DEFAULT_CANDIDATE_CHARS,
|
||||
),
|
||||
dict(term=u'/path/with/equals/cn=com.ansible',
|
||||
filename=u'/path/with/equals/cn=com.ansible',
|
||||
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=DEFAULT_CHARS),
|
||||
candidate_chars=DEFAULT_CANDIDATE_CHARS,
|
||||
),
|
||||
dict(term=u'/path/with/unicode/くらとみ/file',
|
||||
filename=u'/path/with/unicode/くらとみ/file',
|
||||
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=DEFAULT_CHARS),
|
||||
candidate_chars=DEFAULT_CANDIDATE_CHARS,
|
||||
),
|
||||
dict(
|
||||
term=u'/path/with/embedded spaces and/file',
|
||||
filename=u'/path/with/embedded spaces and/file',
|
||||
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=DEFAULT_CHARS),
|
||||
candidate_chars=DEFAULT_CANDIDATE_CHARS,
|
||||
),
|
||||
dict(
|
||||
term=u'/path/with/equals/cn=com.ansible',
|
||||
filename=u'/path/with/equals/cn=com.ansible',
|
||||
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=DEFAULT_CHARS),
|
||||
candidate_chars=DEFAULT_CANDIDATE_CHARS,
|
||||
),
|
||||
dict(
|
||||
term=u'/path/with/unicode/くらとみ/file',
|
||||
filename=u'/path/with/unicode/くらとみ/file',
|
||||
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=DEFAULT_CHARS),
|
||||
candidate_chars=DEFAULT_CANDIDATE_CHARS,
|
||||
),
|
||||
# Mix several special chars
|
||||
dict(term=u'/path/with/utf 8 and spaces/くらとみ/file',
|
||||
filename=u'/path/with/utf 8 and spaces/くらとみ/file',
|
||||
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=DEFAULT_CHARS),
|
||||
candidate_chars=DEFAULT_CANDIDATE_CHARS,
|
||||
),
|
||||
dict(term=u'/path/with/encoding=unicode/くらとみ/file',
|
||||
filename=u'/path/with/encoding=unicode/くらとみ/file',
|
||||
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=DEFAULT_CHARS),
|
||||
candidate_chars=DEFAULT_CANDIDATE_CHARS,
|
||||
),
|
||||
dict(term=u'/path/with/encoding=unicode/くらとみ/and spaces file',
|
||||
filename=u'/path/with/encoding=unicode/くらとみ/and spaces file',
|
||||
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=DEFAULT_CHARS),
|
||||
candidate_chars=DEFAULT_CANDIDATE_CHARS,
|
||||
),
|
||||
dict(
|
||||
term=u'/path/with/utf 8 and spaces/くらとみ/file',
|
||||
filename=u'/path/with/utf 8 and spaces/くらとみ/file',
|
||||
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=DEFAULT_CHARS),
|
||||
candidate_chars=DEFAULT_CANDIDATE_CHARS,
|
||||
),
|
||||
dict(
|
||||
term=u'/path/with/encoding=unicode/くらとみ/file',
|
||||
filename=u'/path/with/encoding=unicode/くらとみ/file',
|
||||
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=DEFAULT_CHARS),
|
||||
candidate_chars=DEFAULT_CANDIDATE_CHARS,
|
||||
),
|
||||
dict(
|
||||
term=u'/path/with/encoding=unicode/くらとみ/and spaces file',
|
||||
filename=u'/path/with/encoding=unicode/くらとみ/and spaces file',
|
||||
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=DEFAULT_CHARS),
|
||||
candidate_chars=DEFAULT_CANDIDATE_CHARS,
|
||||
),
|
||||
|
||||
# Simple parameters
|
||||
dict(term=u'/path/to/file length=42',
|
||||
filename=u'/path/to/file',
|
||||
params=dict(length=42, encrypt=None, chars=DEFAULT_CHARS),
|
||||
candidate_chars=DEFAULT_CANDIDATE_CHARS,
|
||||
),
|
||||
dict(term=u'/path/to/file encrypt=pbkdf2_sha256',
|
||||
filename=u'/path/to/file',
|
||||
params=dict(length=password.DEFAULT_LENGTH, encrypt='pbkdf2_sha256', chars=DEFAULT_CHARS),
|
||||
candidate_chars=DEFAULT_CANDIDATE_CHARS,
|
||||
),
|
||||
dict(term=u'/path/to/file chars=abcdefghijklmnop',
|
||||
dict(
|
||||
term=u'/path/to/file length=42',
|
||||
filename=u'/path/to/file',
|
||||
params=dict(length=42, encrypt=None, chars=DEFAULT_CHARS),
|
||||
candidate_chars=DEFAULT_CANDIDATE_CHARS,
|
||||
),
|
||||
dict(
|
||||
term=u'/path/to/file encrypt=pbkdf2_sha256',
|
||||
filename=u'/path/to/file',
|
||||
params=dict(length=password.DEFAULT_LENGTH, encrypt='pbkdf2_sha256', chars=DEFAULT_CHARS),
|
||||
candidate_chars=DEFAULT_CANDIDATE_CHARS,
|
||||
),
|
||||
dict(
|
||||
term=u'/path/to/file chars=abcdefghijklmnop',
|
||||
filename=u'/path/to/file',
|
||||
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=[u'abcdefghijklmnop']),
|
||||
candidate_chars=u'abcdefghijklmnop',
|
||||
),
|
||||
dict(term=u'/path/to/file chars=digits,abc,def',
|
||||
),
|
||||
dict(
|
||||
term=u'/path/to/file chars=digits,abc,def',
|
||||
filename=u'/path/to/file',
|
||||
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=sorted([u'digits', u'abc', u'def'])),
|
||||
candidate_chars=u'abcdef0123456789',
|
||||
),
|
||||
),
|
||||
|
||||
# Including comma in chars
|
||||
dict(term=u'/path/to/file chars=abcdefghijklmnop,,digits',
|
||||
filename=u'/path/to/file',
|
||||
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=sorted([u'abcdefghijklmnop', u',', u'digits'])),
|
||||
candidate_chars = u',abcdefghijklmnop0123456789',
|
||||
),
|
||||
dict(term=u'/path/to/file chars=,,',
|
||||
filename=u'/path/to/file',
|
||||
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=[u',']),
|
||||
candidate_chars=u',',
|
||||
),
|
||||
dict(
|
||||
term=u'/path/to/file chars=abcdefghijklmnop,,digits',
|
||||
filename=u'/path/to/file',
|
||||
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=sorted([u'abcdefghijklmnop', u',', u'digits'])),
|
||||
candidate_chars=u',abcdefghijklmnop0123456789',
|
||||
),
|
||||
dict(
|
||||
term=u'/path/to/file chars=,,',
|
||||
filename=u'/path/to/file',
|
||||
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=[u',']),
|
||||
candidate_chars=u',',
|
||||
),
|
||||
|
||||
# Including = in chars
|
||||
dict(term=u'/path/to/file chars=digits,=,,',
|
||||
filename=u'/path/to/file',
|
||||
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=sorted([u'digits', u'=', u','])),
|
||||
candidate_chars=u',=0123456789',
|
||||
),
|
||||
dict(term=u'/path/to/file chars=digits,abc=def',
|
||||
filename=u'/path/to/file',
|
||||
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=sorted([u'digits', u'abc=def'])),
|
||||
candidate_chars=u'abc=def0123456789',
|
||||
),
|
||||
dict(
|
||||
term=u'/path/to/file chars=digits,=,,',
|
||||
filename=u'/path/to/file',
|
||||
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=sorted([u'digits', u'=', u','])),
|
||||
candidate_chars=u',=0123456789',
|
||||
),
|
||||
dict(
|
||||
term=u'/path/to/file chars=digits,abc=def',
|
||||
filename=u'/path/to/file',
|
||||
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=sorted([u'digits', u'abc=def'])),
|
||||
candidate_chars=u'abc=def0123456789',
|
||||
),
|
||||
|
||||
# Including unicode in chars
|
||||
dict(term=u'/path/to/file chars=digits,くらとみ,,',
|
||||
filename=u'/path/to/file',
|
||||
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=sorted([u'digits', u'くらとみ', u','])),
|
||||
candidate_chars=u',0123456789くらとみ',
|
||||
),
|
||||
dict(
|
||||
term=u'/path/to/file chars=digits,くらとみ,,',
|
||||
filename=u'/path/to/file',
|
||||
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=sorted([u'digits', u'くらとみ', u','])),
|
||||
candidate_chars=u',0123456789くらとみ',
|
||||
),
|
||||
# Including only unicode in chars
|
||||
dict(term=u'/path/to/file chars=くらとみ',
|
||||
filename=u'/path/to/file',
|
||||
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=sorted([u'くらとみ'])),
|
||||
candidate_chars=u'くらとみ',
|
||||
),
|
||||
dict(
|
||||
term=u'/path/to/file chars=くらとみ',
|
||||
filename=u'/path/to/file',
|
||||
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=sorted([u'くらとみ'])),
|
||||
candidate_chars=u'くらとみ',
|
||||
),
|
||||
|
||||
# Include ':' in path
|
||||
dict(term=u'/path/to/file_with:colon chars=ascii_letters,digits',
|
||||
filename=u'/path/to/file_with:colon',
|
||||
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=sorted([u'ascii_letters', u'digits'])),
|
||||
candidate_chars=u'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789',
|
||||
),
|
||||
dict(
|
||||
term=u'/path/to/file_with:colon chars=ascii_letters,digits',
|
||||
filename=u'/path/to/file_with:colon',
|
||||
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=sorted([u'ascii_letters', u'digits'])),
|
||||
candidate_chars=u'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789',
|
||||
),
|
||||
|
||||
# Including special chars in both path and chars
|
||||
# Special characters in path
|
||||
dict(term=u'/path/with/embedded spaces and/file chars=abc=def',
|
||||
filename=u'/path/with/embedded spaces and/file',
|
||||
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=[u'abc=def']),
|
||||
candidate_chars=u'abc=def',
|
||||
),
|
||||
dict(term=u'/path/with/equals/cn=com.ansible chars=abc=def',
|
||||
filename=u'/path/with/equals/cn=com.ansible',
|
||||
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=[u'abc=def']),
|
||||
candidate_chars=u'abc=def',
|
||||
),
|
||||
dict(term=u'/path/with/unicode/くらとみ/file chars=くらとみ',
|
||||
filename=u'/path/with/unicode/くらとみ/file',
|
||||
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=[u'くらとみ']),
|
||||
candidate_chars=u'くらとみ',
|
||||
),
|
||||
dict(
|
||||
term=u'/path/with/embedded spaces and/file chars=abc=def',
|
||||
filename=u'/path/with/embedded spaces and/file',
|
||||
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=[u'abc=def']),
|
||||
candidate_chars=u'abc=def',
|
||||
),
|
||||
dict(
|
||||
term=u'/path/with/equals/cn=com.ansible chars=abc=def',
|
||||
filename=u'/path/with/equals/cn=com.ansible',
|
||||
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=[u'abc=def']),
|
||||
candidate_chars=u'abc=def',
|
||||
),
|
||||
dict(
|
||||
term=u'/path/with/unicode/くらとみ/file chars=くらとみ',
|
||||
filename=u'/path/with/unicode/くらとみ/file',
|
||||
params=dict(length=password.DEFAULT_LENGTH, encrypt=None, chars=[u'くらとみ']),
|
||||
candidate_chars=u'くらとみ',
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
@ -306,27 +325,26 @@ class TestFormatContent(unittest.TestCase):
|
|||
def test_no_encrypt(self):
|
||||
self.assertEqual(
|
||||
password._format_content(password=u'hunter42',
|
||||
salt=u'87654321',
|
||||
encrypt=False),
|
||||
salt=u'87654321',
|
||||
encrypt=False),
|
||||
u'hunter42 salt=87654321')
|
||||
|
||||
def test_no_encrypt_no_salt(self):
|
||||
self.assertEqual(
|
||||
password._format_content(password=u'hunter42',
|
||||
salt=None,
|
||||
encrypt=False),
|
||||
salt=None,
|
||||
encrypt=False),
|
||||
u'hunter42')
|
||||
|
||||
def test_encrypt(self):
|
||||
self.assertEqual(
|
||||
password._format_content(password=u'hunter42',
|
||||
salt=u'87654321',
|
||||
encrypt='pbkdf2_sha256'),
|
||||
salt=u'87654321',
|
||||
encrypt='pbkdf2_sha256'),
|
||||
u'hunter42 salt=87654321')
|
||||
|
||||
def test_encrypt_no_salt(self):
|
||||
self.assertRaises(AssertionError, password._format_content,
|
||||
u'hunter42', None, 'pbkdf2_sha256')
|
||||
self.assertRaises(AssertionError, password._format_content, u'hunter42', None, 'pbkdf2_sha256')
|
||||
|
||||
|
||||
class TestWritePasswordFile(unittest.TestCase):
|
||||
|
@ -351,13 +369,13 @@ class TestWritePasswordFile(unittest.TestCase):
|
|||
|
||||
class TestLookupModule(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.fake_loader = DictDataLoader({'/path/to/somewhere':'sdfsdf'})
|
||||
self.fake_loader = DictDataLoader({'/path/to/somewhere': 'sdfsdf'})
|
||||
self.password_lookup = password.LookupModule(loader=self.fake_loader)
|
||||
self.os_path_exists = password.os.path.exists
|
||||
|
||||
# Different releases of passlib default to a different number of rounds
|
||||
self.sha256 = passlib.registry.get_crypt_handler('pbkdf2_sha256')
|
||||
sha256_for_tests = pbkdf2.create_pbkdf2_hash("sha256", 32, 20000)
|
||||
sha256_for_tests = pbkdf2.create_pbkdf2_hash("sha256", 32, 20000)
|
||||
passlib.registry.register_crypt_handler(sha256_for_tests, force=True)
|
||||
|
||||
def tearDown(self):
|
||||
|
@ -369,8 +387,7 @@ class TestLookupModule(unittest.TestCase):
|
|||
def test_no_encrypt(self, mock_get_paths, mock_write_file):
|
||||
mock_get_paths.return_value = ['/path/one', '/path/two', '/path/three']
|
||||
|
||||
results = self.password_lookup.run([u'/path/to/somewhere'],
|
||||
None)
|
||||
results = self.password_lookup.run([u'/path/to/somewhere'], None)
|
||||
|
||||
# FIXME: assert something useful
|
||||
for result in results:
|
||||
|
@ -382,8 +399,7 @@ class TestLookupModule(unittest.TestCase):
|
|||
def test_encrypt(self, mock_get_paths, mock_write_file):
|
||||
mock_get_paths.return_value = ['/path/one', '/path/two', '/path/three']
|
||||
|
||||
results = self.password_lookup.run([u'/path/to/somewhere encrypt=pbkdf2_sha256'],
|
||||
None)
|
||||
results = self.password_lookup.run([u'/path/to/somewhere encrypt=pbkdf2_sha256'], None)
|
||||
|
||||
# pbkdf2 format plus hash
|
||||
expected_password_length = 76
|
||||
|
@ -412,8 +428,7 @@ class TestLookupModule(unittest.TestCase):
|
|||
password.os.path.exists = lambda x: True
|
||||
|
||||
with patch.object(builtins, 'open', mock_open(read_data=b'hunter42 salt=87654321\n')) as m:
|
||||
results = self.password_lookup.run([u'/path/to/somewhere chars=anything encrypt=pbkdf2_sha256'],
|
||||
None)
|
||||
results = self.password_lookup.run([u'/path/to/somewhere chars=anything encrypt=pbkdf2_sha256'], None)
|
||||
for result in results:
|
||||
self.assertEqual(result, u'$pbkdf2-sha256$20000$ODc2NTQzMjE$Uikde0cv0BKaRaAXMrUQB.zvG4GmnjClwjghwIRf2gU')
|
||||
|
||||
|
@ -424,8 +439,7 @@ class TestLookupModule(unittest.TestCase):
|
|||
password.os.path.exists = lambda x: True
|
||||
|
||||
with patch.object(builtins, 'open', mock_open(read_data=b'hunter42 salt=87654321\n')) as m:
|
||||
results = self.password_lookup.run([u'/path/to/somewhere chars=anything'],
|
||||
None)
|
||||
results = self.password_lookup.run([u'/path/to/somewhere chars=anything'], None)
|
||||
|
||||
for result in results:
|
||||
self.assertEqual(result, u'hunter42')
|
||||
|
@ -435,7 +449,6 @@ class TestLookupModule(unittest.TestCase):
|
|||
def test_only_a(self, mock_get_paths, mock_write_file):
|
||||
mock_get_paths.return_value = ['/path/one', '/path/two', '/path/three']
|
||||
|
||||
results = self.password_lookup.run([u'/path/to/somewhere chars=a'],
|
||||
None)
|
||||
results = self.password_lookup.run([u'/path/to/somewhere chars=a'], None)
|
||||
for result in results:
|
||||
self.assertEquals(result, u'a' * password.DEFAULT_LENGTH)
|
||||
|
|
|
@ -19,11 +19,11 @@
|
|||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
from units.mock.loader import DictDataLoader
|
||||
import uuid
|
||||
|
||||
from ansible.compat.tests import unittest
|
||||
from ansible.compat.tests.mock import patch, MagicMock
|
||||
|
||||
from ansible.errors import AnsibleError, AnsibleParserError
|
||||
from ansible.executor.process.worker import WorkerProcess
|
||||
from ansible.executor.task_queue_manager import TaskQueueManager
|
||||
|
@ -34,7 +34,6 @@ from ansible.playbook.block import Block
|
|||
from ansible.playbook.handler import Handler
|
||||
from ansible.plugins.strategy import StrategyBase
|
||||
|
||||
from units.mock.loader import DictDataLoader
|
||||
|
||||
class TestStrategyBase(unittest.TestCase):
|
||||
|
||||
|
@ -46,13 +45,16 @@ class TestStrategyBase(unittest.TestCase):
|
|||
|
||||
def test_strategy_base_init(self):
|
||||
queue_items = []
|
||||
|
||||
def _queue_empty(*args, **kwargs):
|
||||
return len(queue_items) == 0
|
||||
|
||||
def _queue_get(*args, **kwargs):
|
||||
if len(queue_items) == 0:
|
||||
raise Queue.Empty
|
||||
else:
|
||||
return queue_items.pop()
|
||||
|
||||
def _queue_put(item, *args, **kwargs):
|
||||
queue_items.append(item)
|
||||
|
||||
|
@ -71,13 +73,16 @@ class TestStrategyBase(unittest.TestCase):
|
|||
|
||||
def test_strategy_base_run(self):
|
||||
queue_items = []
|
||||
|
||||
def _queue_empty(*args, **kwargs):
|
||||
return len(queue_items) == 0
|
||||
|
||||
def _queue_get(*args, **kwargs):
|
||||
if len(queue_items) == 0:
|
||||
raise Queue.Empty
|
||||
else:
|
||||
return queue_items.pop()
|
||||
|
||||
def _queue_put(item, *args, **kwargs):
|
||||
queue_items.append(item)
|
||||
|
||||
|
@ -96,7 +101,7 @@ class TestStrategyBase(unittest.TestCase):
|
|||
for attr in ('RUN_OK', 'RUN_ERROR', 'RUN_FAILED_HOSTS', 'RUN_UNREACHABLE_HOSTS'):
|
||||
setattr(mock_tqm, attr, getattr(TaskQueueManager, attr))
|
||||
|
||||
mock_iterator = MagicMock()
|
||||
mock_iterator = MagicMock()
|
||||
mock_iterator._play = MagicMock()
|
||||
mock_iterator._play.handlers = []
|
||||
|
||||
|
@ -124,13 +129,16 @@ class TestStrategyBase(unittest.TestCase):
|
|||
|
||||
def test_strategy_base_get_hosts(self):
|
||||
queue_items = []
|
||||
|
||||
def _queue_empty(*args, **kwargs):
|
||||
return len(queue_items) == 0
|
||||
|
||||
def _queue_get(*args, **kwargs):
|
||||
if len(queue_items) == 0:
|
||||
raise Queue.Empty
|
||||
else:
|
||||
return queue_items.pop()
|
||||
|
||||
def _queue_put(item, *args, **kwargs):
|
||||
queue_items.append(item)
|
||||
|
||||
|
@ -142,7 +150,7 @@ class TestStrategyBase(unittest.TestCase):
|
|||
mock_hosts = []
|
||||
for i in range(0, 5):
|
||||
mock_host = MagicMock()
|
||||
mock_host.name = "host%02d" % (i+1)
|
||||
mock_host.name = "host%02d" % (i + 1)
|
||||
mock_host.has_hostkey = True
|
||||
mock_hosts.append(mock_host)
|
||||
|
||||
|
@ -156,7 +164,7 @@ class TestStrategyBase(unittest.TestCase):
|
|||
mock_tqm.get_inventory.return_value = mock_inventory
|
||||
|
||||
mock_play = MagicMock()
|
||||
mock_play.hosts = ["host%02d" % (i+1) for i in range(0, 5)]
|
||||
mock_play.hosts = ["host%02d" % (i + 1) for i in range(0, 5)]
|
||||
|
||||
strategy_base = StrategyBase(tqm=mock_tqm)
|
||||
|
||||
|
@ -213,7 +221,6 @@ class TestStrategyBase(unittest.TestCase):
|
|||
finally:
|
||||
tqm.cleanup()
|
||||
|
||||
|
||||
def test_strategy_base_process_pending_results(self):
|
||||
mock_tqm = MagicMock()
|
||||
mock_tqm._terminated = False
|
||||
|
@ -224,13 +231,16 @@ class TestStrategyBase(unittest.TestCase):
|
|||
mock_tqm._listening_handlers = {}
|
||||
|
||||
queue_items = []
|
||||
|
||||
def _queue_empty(*args, **kwargs):
|
||||
return len(queue_items) == 0
|
||||
|
||||
def _queue_get(*args, **kwargs):
|
||||
if len(queue_items) == 0:
|
||||
raise Queue.Empty
|
||||
else:
|
||||
return queue_items.pop()
|
||||
|
||||
def _queue_put(item, *args, **kwargs):
|
||||
queue_items.append(item)
|
||||
|
||||
|
@ -290,6 +300,7 @@ class TestStrategyBase(unittest.TestCase):
|
|||
if host_name == 'test01':
|
||||
return mock_host
|
||||
return None
|
||||
|
||||
def _get_group(group_name):
|
||||
if group_name in ('all', 'foo'):
|
||||
return mock_group
|
||||
|
@ -341,8 +352,8 @@ class TestStrategyBase(unittest.TestCase):
|
|||
self.assertEqual(results[0], task_result)
|
||||
self.assertEqual(strategy_base._pending_results, 0)
|
||||
self.assertNotIn('test01', strategy_base._blocked_hosts)
|
||||
#self.assertIn('test01', mock_tqm._failed_hosts)
|
||||
#del mock_tqm._failed_hosts['test01']
|
||||
# self.assertIn('test01', mock_tqm._failed_hosts)
|
||||
# del mock_tqm._failed_hosts['test01']
|
||||
mock_iterator.is_failed.return_value = False
|
||||
|
||||
task_result = TaskResult(host=mock_host.name, task=mock_task._uuid, return_data='{"unreachable": true}')
|
||||
|
@ -393,18 +404,18 @@ class TestStrategyBase(unittest.TestCase):
|
|||
self.assertIn(mock_handler_task._uuid, strategy_base._notified_handlers)
|
||||
self.assertIn(mock_host, strategy_base._notified_handlers[mock_handler_task._uuid])
|
||||
|
||||
#queue_items.append(('set_host_var', mock_host, mock_task, None, 'foo', 'bar'))
|
||||
#results = strategy_base._process_pending_results(iterator=mock_iterator)
|
||||
#self.assertEqual(len(results), 0)
|
||||
#self.assertEqual(strategy_base._pending_results, 1)
|
||||
# queue_items.append(('set_host_var', mock_host, mock_task, None, 'foo', 'bar'))
|
||||
# results = strategy_base._process_pending_results(iterator=mock_iterator)
|
||||
# self.assertEqual(len(results), 0)
|
||||
# self.assertEqual(strategy_base._pending_results, 1)
|
||||
|
||||
#queue_items.append(('set_host_facts', mock_host, mock_task, None, 'foo', dict()))
|
||||
#results = strategy_base._process_pending_results(iterator=mock_iterator)
|
||||
#self.assertEqual(len(results), 0)
|
||||
#self.assertEqual(strategy_base._pending_results, 1)
|
||||
# queue_items.append(('set_host_facts', mock_host, mock_task, None, 'foo', dict()))
|
||||
# results = strategy_base._process_pending_results(iterator=mock_iterator)
|
||||
# self.assertEqual(len(results), 0)
|
||||
# self.assertEqual(strategy_base._pending_results, 1)
|
||||
|
||||
#queue_items.append(('bad'))
|
||||
#self.assertRaises(AnsibleError, strategy_base._process_pending_results, iterator=mock_iterator)
|
||||
# queue_items.append(('bad'))
|
||||
# self.assertRaises(AnsibleError, strategy_base._process_pending_results, iterator=mock_iterator)
|
||||
strategy_base.cleanup()
|
||||
|
||||
def test_strategy_base_load_included_file(self):
|
||||
|
@ -417,13 +428,16 @@ class TestStrategyBase(unittest.TestCase):
|
|||
})
|
||||
|
||||
queue_items = []
|
||||
|
||||
def _queue_empty(*args, **kwargs):
|
||||
return len(queue_items) == 0
|
||||
|
||||
def _queue_get(*args, **kwargs):
|
||||
if len(queue_items) == 0:
|
||||
raise Queue.Empty
|
||||
else:
|
||||
return queue_items.pop()
|
||||
|
||||
def _queue_put(item, *args, **kwargs):
|
||||
queue_items.append(item)
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue