From 3d61f077ec1ba2c0fdd4d493c730a4299e2f883d Mon Sep 17 00:00:00 2001
From: Jordon Replogle
Date: Wed, 30 Jul 2014 10:08:22 -0700
Subject: [PATCH 001/971] Added OpenVZ Inventory python script
---
plugins/inventory/openvz.py | 74 +++++++++++++++++++++++++++++++++++++
1 file changed, 74 insertions(+)
create mode 100644 plugins/inventory/openvz.py
diff --git a/plugins/inventory/openvz.py b/plugins/inventory/openvz.py
new file mode 100644
index 0000000000..1f441a39f5
--- /dev/null
+++ b/plugins/inventory/openvz.py
@@ -0,0 +1,74 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+#
+# openvz.py
+#
+# Copyright 2014 jordonr
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
+# MA 02110-1301, USA.
+#
+#
+# Inspired by libvirt_lxc.py inventory script
+# https://github.com/ansible/ansible/blob/e5ef0eca03cbb6c8950c06dc50d0ca22aa8902f4/plugins/inventory/libvirt_lxc.py
+#
+# Groups are determined by the description field of openvz guests
+# multiple groups can be seperated by commas: webserver,dbserver
+
+from subprocess import Popen,PIPE
+import sys
+import json
+
+
+#List openvz hosts
+vzhosts = ['192.168.1.3','192.168.1.2','192.168.1.1']
+#Add openvzhosts to the inventory
+inventory = {'vzhosts': {'hosts': vzhosts}}
+#default group, when description not defined
+default_group = ['vzguest']
+
+def getGuests():
+ #Loop through vzhosts
+ for h in vzhosts:
+ #SSH to vzhost and get the list of guests in json
+ pipe = Popen(['ssh', h,'vzlist','-j'], stdout=PIPE, universal_newlines=True)
+
+ #Load Json info of guests
+ json_data = json.loads(pipe.stdout.read())
+
+ #loop through guests
+ for j in json_data:
+ #determine group from guest description
+ if j['description'] is not None:
+ groups = j['description'].split(",")
+ else:
+ groups = default_group
+
+ #add guest to inventory
+ for g in groups:
+ if g not in inventory:
+ inventory[g] = {'hosts': []}
+
+ for ip in j['ip']:
+ inventory[g]['hosts'].append(ip)
+
+ print json.dumps(inventory)
+
+if len(sys.argv) == 2 and sys.argv[1] == '--list':
+ getGuests()
+elif len(sys.argv) == 3 and sys.argv[1] == '--host':
+ print json.dumps({});
+else:
+ print "Need an argument, either --list or --host "
From df8dfdce06f837c49f230d5e27b513f2bfe27cf1 Mon Sep 17 00:00:00 2001
From: Serge van Ginderachter
Date: Wed, 6 Aug 2014 13:00:14 +0200
Subject: [PATCH 002/971] packaging: add short has and branch name in package
version for unofficial builds
---
Makefile | 9 +++++++--
1 file changed, 7 insertions(+), 2 deletions(-)
diff --git a/Makefile b/Makefile
index afd7162f96..56c63903b6 100644
--- a/Makefile
+++ b/Makefile
@@ -39,6 +39,11 @@ VERSION := $(shell cat VERSION)
# Get the branch information from git
ifneq ($(shell which git),)
GIT_DATE := $(shell git log -n 1 --format="%ai")
+GIT_HASH := $(shell git log -n 1 --format="%h")
+GIT_BRANCH := $(shell git rev-parse --abbrev-ref HEAD | sed 's/[-_.]//g')
+GITINFO = .$(GIT_HASH).$(GIT_BRANCH)
+else
+GITINFO = ''
endif
ifeq ($(shell echo $(OS) | egrep -c 'Darwin|FreeBSD|OpenBSD'),1)
@@ -60,7 +65,7 @@ ifeq ($(OFFICIAL),yes)
DEBUILD_OPTS += -k$(DEBSIGN_KEYID)
endif
else
- DEB_RELEASE = 0.git$(DATE)
+ DEB_RELEASE = 0.git$(DATE)$(GITINFO)
# Do not sign unofficial builds
DEBUILD_OPTS += -uc -us
DPUT_OPTS += -u
@@ -76,7 +81,7 @@ RPMSPEC = $(RPMSPECDIR)/ansible.spec
RPMDIST = $(shell rpm --eval '%{?dist}')
RPMRELEASE = 1
ifneq ($(OFFICIAL),yes)
- RPMRELEASE = 0.git$(DATE)
+ RPMRELEASE = 0.git$(DATE)$(GITINFO)
endif
RPMNVR = "$(NAME)-$(VERSION)-$(RPMRELEASE)$(RPMDIST)"
From 0ff2936626afe83e2898e8ccecf59b891e550bf5 Mon Sep 17 00:00:00 2001
From: Jordon Replogle
Date: Wed, 13 Aug 2014 10:28:43 -0700
Subject: [PATCH 003/971] Updated per Revision Request
---
plugins/inventory/openvz.py | 73 +++++++++++++++++++------------------
1 file changed, 38 insertions(+), 35 deletions(-)
diff --git a/plugins/inventory/openvz.py b/plugins/inventory/openvz.py
index 1f441a39f5..fd0bd9ff79 100644
--- a/plugins/inventory/openvz.py
+++ b/plugins/inventory/openvz.py
@@ -5,21 +5,20 @@
#
# Copyright 2014 jordonr
#
-# This program is free software; you can redistribute it and/or modify
+# This file is part of Ansible.
+#
+# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation; either version 2 of the License, or
+# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
-# This program is distributed in the hope that it will be useful,
+# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
-# MA 02110-1301, USA.
-#
+# along with Ansible. If not, see .
#
# Inspired by libvirt_lxc.py inventory script
# https://github.com/ansible/ansible/blob/e5ef0eca03cbb6c8950c06dc50d0ca22aa8902f4/plugins/inventory/libvirt_lxc.py
@@ -33,42 +32,46 @@ import json
#List openvz hosts
-vzhosts = ['192.168.1.3','192.168.1.2','192.168.1.1']
-#Add openvzhosts to the inventory
-inventory = {'vzhosts': {'hosts': vzhosts}}
+vzhosts = ['vzhost1','vzhost2','vzhost3']
+#Add openvz hosts to the inventory and Add "_meta" trick
+inventory = {'vzhosts': {'hosts': vzhosts}, '_meta': {'hostvars': {}}}
#default group, when description not defined
default_group = ['vzguest']
-def getGuests():
- #Loop through vzhosts
- for h in vzhosts:
- #SSH to vzhost and get the list of guests in json
- pipe = Popen(['ssh', h,'vzlist','-j'], stdout=PIPE, universal_newlines=True)
+def get_guests():
+ #Loop through vzhosts
+ for h in vzhosts:
+ #SSH to vzhost and get the list of guests in json
+ pipe = Popen(['ssh', h,'vzlist','-j'], stdout=PIPE, universal_newlines=True)
- #Load Json info of guests
- json_data = json.loads(pipe.stdout.read())
+ #Load Json info of guests
+ json_data = json.loads(pipe.stdout.read())
- #loop through guests
- for j in json_data:
- #determine group from guest description
- if j['description'] is not None:
- groups = j['description'].split(",")
- else:
- groups = default_group
+ #loop through guests
+ for j in json_data:
+ #Add information to host vars
+ inventory['_meta']['hostvars'][j['hostname']] = {'ctid': j['ctid'], 'veid': j['veid'], 'vpsid': j['vpsid'], 'private_path': j['private'], 'root_path': j['root'], 'ip': j['ip']}
- #add guest to inventory
- for g in groups:
- if g not in inventory:
- inventory[g] = {'hosts': []}
+ #determine group from guest description
+ if j['description'] is not None:
+ groups = j['description'].split(",")
+ else:
+ groups = default_group
- for ip in j['ip']:
- inventory[g]['hosts'].append(ip)
+ #add guest to inventory
+ for g in groups:
+ if g not in inventory:
+ inventory[g] = {'hosts': []}
+
+ inventory[g]['hosts'].append(j['hostname'])
+
+ return inventory
- print json.dumps(inventory)
if len(sys.argv) == 2 and sys.argv[1] == '--list':
- getGuests()
+ inv_json = get_guests()
+ print json.dumps(inv_json, sort_keys=True)
elif len(sys.argv) == 3 and sys.argv[1] == '--host':
- print json.dumps({});
+ print json.dumps({});
else:
- print "Need an argument, either --list or --host "
+ print "Need an argument, either --list or --host "
From 76f473cd5d5a8ed1c6c5deb173587ce01e5b8f29 Mon Sep 17 00:00:00 2001
From: Mathieu GAUTHIER-LAFAYE
Date: Mon, 6 Oct 2014 17:12:03 +0200
Subject: [PATCH 004/971] add a proxmox inventory plugin
---
plugins/inventory/proxmox.py | 131 +++++++++++++++++++++++++++++++++++
1 file changed, 131 insertions(+)
create mode 100755 plugins/inventory/proxmox.py
diff --git a/plugins/inventory/proxmox.py b/plugins/inventory/proxmox.py
new file mode 100755
index 0000000000..ceb4111027
--- /dev/null
+++ b/plugins/inventory/proxmox.py
@@ -0,0 +1,131 @@
+#!/usr/bin/env python
+
+# Copyright (C) 2014 Mathieu GAUTHIER-LAFAYE
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+
+import urllib
+import urllib2
+try:
+ import json
+except ImportError:
+ import simplejson as json
+import os
+import sys
+from optparse import OptionParser
+
+class ProxmoxNodeList(list):
+ def get_names(self):
+ return [node['node'] for node in self]
+
+class ProxmoxQemuList(list):
+ def get_names(self):
+ return [qemu['name'] for qemu in self if qemu['template'] != 1]
+
+class ProxmoxPoolList(list):
+ def get_names(self):
+ return [pool['poolid'] for pool in self]
+
+class ProxmoxPool(dict):
+ def get_members_name(self):
+ return [member['name'] for member in self['members'] if member['template'] != 1]
+
+class ProxmoxAPI(object):
+ def __init__(self, options):
+ self.options = options
+ self.credentials = None
+
+ if not options.url:
+ raise Exception('Missing mandatory parameter --url (or PROXMOX_URL).')
+ elif not options.username:
+ raise Exception('Missing mandatory parameter --username (or PROXMOX_USERNAME).')
+ elif not options.password:
+ raise Exception('Missing mandatory parameter --password (or PROXMOX_PASSWORD).')
+
+ def auth(self):
+ request_path = '{}api2/json/access/ticket'.format(self.options.url)
+
+ request_params = urllib.urlencode({
+ 'username': self.options.username,
+ 'password': self.options.password,
+ })
+
+ data = json.load(urllib2.urlopen(request_path, request_params))
+
+ self.credentials = {
+ 'ticket': data['data']['ticket'],
+ 'CSRFPreventionToken': data['data']['CSRFPreventionToken'],
+ }
+
+ def get(self, url, data=None):
+ opener = urllib2.build_opener()
+ opener.addheaders.append(('Cookie', 'PVEAuthCookie={}'.format(self.credentials['ticket'])))
+
+ request_path = '{}{}'.format(self.options.url, url)
+ request = opener.open(request_path, data)
+
+ response = json.load(request)
+ return response['data']
+
+ def nodes(self):
+ return ProxmoxNodeList(self.get('api2/json/nodes'))
+
+ def node_qemu(self, node):
+ return ProxmoxQemuList(self.get('api2/json/nodes/{}/qemu'.format(node)))
+
+ def pools(self):
+ return ProxmoxPoolList(self.get('api2/json/pools'))
+
+ def pool(self, poolid):
+ return ProxmoxPool(self.get('api2/json/pools/{}'.format(poolid)))
+
+def main_list(options):
+ result = {}
+
+ proxmox_api = ProxmoxAPI(options)
+ proxmox_api.auth()
+
+ # all
+ result['all'] = []
+ for node in proxmox_api.nodes().get_names():
+ result['all'] += proxmox_api.node_qemu(node).get_names()
+
+ # pools
+ for pool in proxmox_api.pools().get_names():
+ result[pool] = proxmox_api.pool(pool).get_members_name()
+
+ print json.dumps(result)
+
+def main_host():
+ print json.dumps({})
+
+def main():
+ parser = OptionParser(usage='%prog [options] --list | --host HOSTNAME')
+ parser.add_option('--list', action="store_true", default=False, dest="list")
+ parser.add_option('--host', dest="host")
+ parser.add_option('--url', default=os.environ.get('PROXMOX_URL'), dest='url')
+ parser.add_option('--username', default=os.environ.get('PROXMOX_USERNAME'), dest='username')
+ parser.add_option('--password', default=os.environ.get('PROXMOX_PASSWORD'), dest='password')
+ (options, args) = parser.parse_args()
+
+ if options.list:
+ main_list(options)
+ elif options.host:
+ main_host()
+ else:
+ parser.print_help()
+ sys.exit(1)
+
+if __name__ == '__main__':
+ main()
From 3d62e55abe14be12292186760413ce641f852c09 Mon Sep 17 00:00:00 2001
From: Mathieu GAUTHIER-LAFAYE
Date: Tue, 7 Oct 2014 13:10:10 +0200
Subject: [PATCH 005/971] add host variables (proxmox_vmid, proxmox_uptime,
proxmox_maxmem, ...)
---
plugins/inventory/proxmox.py | 26 +++++++++++++++++++++++---
1 file changed, 23 insertions(+), 3 deletions(-)
diff --git a/plugins/inventory/proxmox.py b/plugins/inventory/proxmox.py
index ceb4111027..590949a4c6 100755
--- a/plugins/inventory/proxmox.py
+++ b/plugins/inventory/proxmox.py
@@ -33,6 +33,10 @@ class ProxmoxQemuList(list):
def get_names(self):
return [qemu['name'] for qemu in self if qemu['template'] != 1]
+ def get_by_name(self, name):
+ results = [qemu for qemu in self if qemu['name'] == name]
+ return results[0] if len(results) > 0 else None
+
class ProxmoxPoolList(list):
def get_names(self):
return [pool['poolid'] for pool in self]
@@ -107,8 +111,24 @@ def main_list(options):
print json.dumps(result)
-def main_host():
- print json.dumps({})
+def main_host(options):
+ results = {}
+
+ proxmox_api = ProxmoxAPI(options)
+ proxmox_api.auth()
+
+ host = None
+ for node in proxmox_api.nodes().get_names():
+ qemu_list = proxmox_api.node_qemu(node)
+ qemu = qemu_list.get_by_name(options.host)
+ if qemu:
+ break
+
+ if qemu:
+ for key, value in qemu.iteritems():
+ results['proxmox_' + key] = value
+
+ print json.dumps(results)
def main():
parser = OptionParser(usage='%prog [options] --list | --host HOSTNAME')
@@ -122,7 +142,7 @@ def main():
if options.list:
main_list(options)
elif options.host:
- main_host()
+ main_host(options)
else:
parser.print_help()
sys.exit(1)
From 7c094c93798eeae5af92961031125de83d6ec91d Mon Sep 17 00:00:00 2001
From: Mathieu GAUTHIER-LAFAYE
Date: Tue, 7 Oct 2014 13:45:41 +0200
Subject: [PATCH 006/971] add _meta in the list json
---
plugins/inventory/proxmox.py | 56 +++++++++++++++++++++++++-----------
1 file changed, 39 insertions(+), 17 deletions(-)
diff --git a/plugins/inventory/proxmox.py b/plugins/inventory/proxmox.py
index 590949a4c6..c9d5e82a62 100755
--- a/plugins/inventory/proxmox.py
+++ b/plugins/inventory/proxmox.py
@@ -29,7 +29,18 @@ class ProxmoxNodeList(list):
def get_names(self):
return [node['node'] for node in self]
+class ProxmoxQemu(dict):
+ def get_variables(self):
+ variables = {}
+ for key, value in self.iteritems():
+ variables['proxmox_' + key] = value
+ return variables
+
class ProxmoxQemuList(list):
+ def __init__(self, data=[]):
+ for item in data:
+ self.append(ProxmoxQemu(item))
+
def get_names(self):
return [qemu['name'] for qemu in self if qemu['template'] != 1]
@@ -37,6 +48,13 @@ class ProxmoxQemuList(list):
results = [qemu for qemu in self if qemu['name'] == name]
return results[0] if len(results) > 0 else None
+ def get_variables(self):
+ variables = {}
+ for qemu in self:
+ variables[qemu['name']] = qemu.get_variables()
+
+ return variables
+
class ProxmoxPoolList(list):
def get_names(self):
return [pool['poolid'] for pool in self]
@@ -95,40 +113,42 @@ class ProxmoxAPI(object):
return ProxmoxPool(self.get('api2/json/pools/{}'.format(poolid)))
def main_list(options):
- result = {}
+ results = {
+ 'all': {
+ 'hosts': [],
+ },
+ '_meta': {
+ 'hostvars': {},
+ }
+ }
proxmox_api = ProxmoxAPI(options)
proxmox_api.auth()
- # all
- result['all'] = []
for node in proxmox_api.nodes().get_names():
- result['all'] += proxmox_api.node_qemu(node).get_names()
+ qemu_list = proxmox_api.node_qemu(node)
+ results['all']['hosts'] += qemu_list.get_names()
+ results['_meta']['hostvars'].update(qemu_list.get_variables())
# pools
for pool in proxmox_api.pools().get_names():
- result[pool] = proxmox_api.pool(pool).get_members_name()
+ results[pool] = {
+ 'hosts': proxmox_api.pool(pool).get_members_name(),
+ }
- print json.dumps(result)
+ return json.dumps(results)
def main_host(options):
- results = {}
-
proxmox_api = ProxmoxAPI(options)
proxmox_api.auth()
- host = None
for node in proxmox_api.nodes().get_names():
qemu_list = proxmox_api.node_qemu(node)
qemu = qemu_list.get_by_name(options.host)
if qemu:
- break
+ return json.dumps(qemu.get_variables())
- if qemu:
- for key, value in qemu.iteritems():
- results['proxmox_' + key] = value
-
- print json.dumps(results)
+ print json.dumps({})
def main():
parser = OptionParser(usage='%prog [options] --list | --host HOSTNAME')
@@ -140,12 +160,14 @@ def main():
(options, args) = parser.parse_args()
if options.list:
- main_list(options)
+ json = main_list(options)
elif options.host:
- main_host(options)
+ json = main_host(options)
else:
parser.print_help()
sys.exit(1)
+ print json
+
if __name__ == '__main__':
main()
From d20ef3a10af5dada0a3e3b3c1f7b15fee3839990 Mon Sep 17 00:00:00 2001
From: Mathieu GAUTHIER-LAFAYE
Date: Tue, 7 Oct 2014 13:58:01 +0200
Subject: [PATCH 007/971] add --pretty for debuging purpose
---
plugins/inventory/proxmox.py | 17 +++++++++++------
1 file changed, 11 insertions(+), 6 deletions(-)
diff --git a/plugins/inventory/proxmox.py b/plugins/inventory/proxmox.py
index c9d5e82a62..80f6628d97 100755
--- a/plugins/inventory/proxmox.py
+++ b/plugins/inventory/proxmox.py
@@ -136,7 +136,7 @@ def main_list(options):
'hosts': proxmox_api.pool(pool).get_members_name(),
}
- return json.dumps(results)
+ return results
def main_host(options):
proxmox_api = ProxmoxAPI(options)
@@ -146,9 +146,9 @@ def main_host(options):
qemu_list = proxmox_api.node_qemu(node)
qemu = qemu_list.get_by_name(options.host)
if qemu:
- return json.dumps(qemu.get_variables())
+ return qemu.get_variables()
- print json.dumps({})
+ return {}
def main():
parser = OptionParser(usage='%prog [options] --list | --host HOSTNAME')
@@ -157,17 +157,22 @@ def main():
parser.add_option('--url', default=os.environ.get('PROXMOX_URL'), dest='url')
parser.add_option('--username', default=os.environ.get('PROXMOX_USERNAME'), dest='username')
parser.add_option('--password', default=os.environ.get('PROXMOX_PASSWORD'), dest='password')
+ parser.add_option('--pretty', action="store_true", default=False, dest='pretty')
(options, args) = parser.parse_args()
if options.list:
- json = main_list(options)
+ data = main_list(options)
elif options.host:
- json = main_host(options)
+ data = main_host(options)
else:
parser.print_help()
sys.exit(1)
- print json
+ indent = None
+ if options.pretty:
+ indent = 2
+
+ print json.dumps(data, indent=indent)
if __name__ == '__main__':
main()
From 3b7280b364b14e5fd6a7d1bec5fbaabd1fd23640 Mon Sep 17 00:00:00 2001
From: ktosiek
Date: Sun, 9 Nov 2014 22:40:29 +0100
Subject: [PATCH 008/971] guide_rax.rst: fix add_host invocations
change `groupname` to `groups`, as per add_host documentation
---
docsite/rst/guide_rax.rst | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/docsite/rst/guide_rax.rst b/docsite/rst/guide_rax.rst
index d00a090fa3..28321ce7fa 100644
--- a/docsite/rst/guide_rax.rst
+++ b/docsite/rst/guide_rax.rst
@@ -131,7 +131,7 @@ The rax module returns data about the nodes it creates, like IP addresses, hostn
hostname: "{{ item.name }}"
ansible_ssh_host: "{{ item.rax_accessipv4 }}"
ansible_ssh_pass: "{{ item.rax_adminpass }}"
- groupname: raxhosts
+ groups: raxhosts
with_items: rax.success
when: rax.action == 'create'
@@ -519,7 +519,7 @@ Build a complete webserver environment with servers, custom networks and load ba
ansible_ssh_host: "{{ item.rax_accessipv4 }}"
ansible_ssh_pass: "{{ item.rax_adminpass }}"
ansible_ssh_user: root
- groupname: web
+ groups: web
with_items: rax.success
when: rax.action == 'create'
From 8146d1fff3a31cf8e801770d49ee1c24b7728806 Mon Sep 17 00:00:00 2001
From: Justin Wyer
Date: Mon, 1 Dec 2014 17:17:54 +0200
Subject: [PATCH 009/971] /sys/block/sdX/queue/physical_block_size does not
correlate with /sys/block/sdX/size for advanced drives larger than 2TB,
/sys/block/sdX/queue/logical_block_size correlates with both see #9549
---
lib/ansible/module_utils/facts.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/lib/ansible/module_utils/facts.py b/lib/ansible/module_utils/facts.py
index 5ceeb405d5..57476586ae 100644
--- a/lib/ansible/module_utils/facts.py
+++ b/lib/ansible/module_utils/facts.py
@@ -791,7 +791,7 @@ class LinuxHardware(Hardware):
part['start'] = get_file_content(part_sysdir + "/start",0)
part['sectors'] = get_file_content(part_sysdir + "/size",0)
- part['sectorsize'] = get_file_content(part_sysdir + "/queue/physical_block_size")
+ part['sectorsize'] = get_file_content(part_sysdir + "/queue/logical_block_size")
if not part['sectorsize']:
part['sectorsize'] = get_file_content(part_sysdir + "/queue/hw_sector_size",512)
part['size'] = module.pretty_bytes((float(part['sectors']) * float(part['sectorsize'])))
@@ -808,7 +808,7 @@ class LinuxHardware(Hardware):
d['sectors'] = get_file_content(sysdir + "/size")
if not d['sectors']:
d['sectors'] = 0
- d['sectorsize'] = get_file_content(sysdir + "/queue/physical_block_size")
+ d['sectorsize'] = get_file_content(sysdir + "/queue/logical_block_size")
if not d['sectorsize']:
d['sectorsize'] = get_file_content(sysdir + "/queue/hw_sector_size",512)
d['size'] = module.pretty_bytes(float(d['sectors']) * float(d['sectorsize']))
From 19d40cc54ce65b346901e4f040ec9007a57b3fb7 Mon Sep 17 00:00:00 2001
From: Sebastien Goasguen
Date: Wed, 10 Dec 2014 11:26:21 -0500
Subject: [PATCH 010/971] Add tags for inventory
---
plugins/inventory/apache-libcloud.py | 13 +++++++++----
1 file changed, 9 insertions(+), 4 deletions(-)
diff --git a/plugins/inventory/apache-libcloud.py b/plugins/inventory/apache-libcloud.py
index 95804095da..151daeefe0 100755
--- a/plugins/inventory/apache-libcloud.py
+++ b/plugins/inventory/apache-libcloud.py
@@ -222,12 +222,17 @@ class LibcloudInventory(object):
self.push(self.inventory, self.to_safe('type_' + node.instance_type), dest)
'''
# Inventory: Group by key pair
- if node.extra['keyname']:
- self.push(self.inventory, self.to_safe('key_' + node.extra['keyname']), dest)
+ if node.extra['key_name']:
+ self.push(self.inventory, self.to_safe('key_' + node.extra['key_name']), dest)
# Inventory: Group by security group, quick thing to handle single sg
- if node.extra['securitygroup']:
- self.push(self.inventory, self.to_safe('sg_' + node.extra['securitygroup'][0]), dest)
+ if node.extra['security_group']:
+ self.push(self.inventory, self.to_safe('sg_' + node.extra['security_group'][0]), dest)
+
+ # Inventory: Group by tag
+ if node.extra['tags']:
+ for tagkey in node.extra['tags'].keys():
+ self.push(self.inventory, self.to_safe('tag_' + tagkey + '_' + node.extra['tags'][tagkey]), dest)
def get_host_info(self):
'''
From fce04b1eba5343f0b23c50af24404a2826591345 Mon Sep 17 00:00:00 2001
From: "Federico G. Schwindt"
Date: Sun, 14 Dec 2014 22:39:17 +0000
Subject: [PATCH 011/971] Use command= when we intended to
While here sort register variables and add a comment to signal multiline
testing.
---
.../roles/test_command_shell/tasks/main.yml | 28 ++++++++++---------
1 file changed, 15 insertions(+), 13 deletions(-)
diff --git a/test/integration/roles/test_command_shell/tasks/main.yml b/test/integration/roles/test_command_shell/tasks/main.yml
index b331452b7c..877eb11cd6 100644
--- a/test/integration/roles/test_command_shell/tasks/main.yml
+++ b/test/integration/roles/test_command_shell/tasks/main.yml
@@ -82,7 +82,7 @@
file: path={{output_dir_test}}/afile.txt state=absent
- name: create afile.txt with create_afile.sh via command
- shell: "{{output_dir_test | expanduser}}/create_afile.sh {{output_dir_test | expanduser}}/afile.txt creates={{output_dir_test | expanduser}}/afile.txt"
+ command: "{{output_dir_test | expanduser}}/create_afile.sh {{output_dir_test | expanduser}}/afile.txt creates={{output_dir_test | expanduser}}/afile.txt"
- name: verify that afile.txt is present
file: path={{output_dir_test}}/afile.txt state=file
@@ -90,7 +90,7 @@
# removes
- name: remove afile.txt with remote_afile.sh via command
- shell: "{{output_dir_test | expanduser}}/remove_afile.sh {{output_dir_test | expanduser}}/afile.txt removes={{output_dir_test | expanduser}}/afile.txt"
+ command: "{{output_dir_test | expanduser}}/remove_afile.sh {{output_dir_test | expanduser}}/afile.txt removes={{output_dir_test | expanduser}}/afile.txt"
- name: verify that afile.txt is absent
file: path={{output_dir_test}}/afile.txt state=absent
@@ -161,21 +161,23 @@
- name: remove afile.txt using rm
shell: rm {{output_dir_test | expanduser}}/afile.txt removes={{output_dir_test | expanduser}}/afile.txt
- register: shell_result4
+ register: shell_result3
- name: assert that using rm under shell causes a warning
assert:
that:
- - "shell_result4.warnings"
+ - "shell_result3.warnings"
- name: verify that afile.txt is absent
file: path={{output_dir_test}}/afile.txt state=absent
- register: shell_result5
+ register: shell_result4
- name: assert that the file was removed by the shell
assert:
that:
- - "shell_result5.changed == False"
+ - "shell_result4.changed == False"
+
+# multiline
- name: execute a shell command using a literal multiline block
args:
@@ -189,28 +191,28 @@
| tr -s ' ' \
| cut -f1 -d ' '
echo "this is a second line"
- register: shell_result6
+ register: shell_result5
-- debug: var=shell_result6
+- debug: var=shell_result5
- name: assert the multiline shell command ran as expected
assert:
that:
- - "shell_result6.changed"
- - "shell_result6.stdout == '5575bb6b71c9558db0b6fbbf2f19909eeb4e3b98\nthis is a second line'"
+ - "shell_result5.changed"
+ - "shell_result5.stdout == '5575bb6b71c9558db0b6fbbf2f19909eeb4e3b98\nthis is a second line'"
- name: execute a shell command using a literal multiline block with arguments in it
shell: |
executable=/bin/bash
creates={{output_dir_test | expanduser}}/afile.txt
echo "test"
- register: shell_result7
+ register: shell_result6
- name: assert the multiline shell command with arguments in it run as expected
assert:
that:
- - "shell_result7.changed"
- - "shell_result7.stdout == 'test'"
+ - "shell_result6.changed"
+ - "shell_result6.stdout == 'test'"
- name: remove the previously created file
file: path={{output_dir_test}}/afile.txt state=absent
From 91a73cff81476873d73f112406a1c6dae6793c6f Mon Sep 17 00:00:00 2001
From: "Federico G. Schwindt"
Date: Sun, 14 Dec 2014 22:40:04 +0000
Subject: [PATCH 012/971] Add tests for globbing support
---
.../roles/test_command_shell/tasks/main.yml | 18 +++++++++++++++---
1 file changed, 15 insertions(+), 3 deletions(-)
diff --git a/test/integration/roles/test_command_shell/tasks/main.yml b/test/integration/roles/test_command_shell/tasks/main.yml
index 877eb11cd6..325e76cffe 100644
--- a/test/integration/roles/test_command_shell/tasks/main.yml
+++ b/test/integration/roles/test_command_shell/tasks/main.yml
@@ -87,6 +87,15 @@
- name: verify that afile.txt is present
file: path={{output_dir_test}}/afile.txt state=file
+- name: re-run previous command using creates with globbing
+ command: "{{output_dir_test | expanduser}}/create_afile.sh {{output_dir_test | expanduser}}/afile.txt creates={{output_dir_test | expanduser}}/afile.*"
+ register: command_result3
+
+- name: assert that creates with globbing is working
+ assert:
+ that:
+ - "command_result3.changed != True"
+
# removes
- name: remove afile.txt with remote_afile.sh via command
@@ -94,12 +103,15 @@
- name: verify that afile.txt is absent
file: path={{output_dir_test}}/afile.txt state=absent
- register: command_result3
-- name: assert that the file was removed by the script
+- name: re-run previous command using removes with globbing
+ command: "{{output_dir_test | expanduser}}/remove_afile.sh {{output_dir_test | expanduser}}/afile.txt removes={{output_dir_test | expanduser}}/afile.*"
+ register: command_result4
+
+- name: assert that removes with globbing is working
assert:
that:
- - "command_result3.changed != True"
+ - "command_result4.changed != True"
##
## shell
From 9639f1d8e7b4a756b7343cebd37b015b67a2418f Mon Sep 17 00:00:00 2001
From: axos88
Date: Thu, 18 Dec 2014 12:52:15 +0100
Subject: [PATCH 013/971] Make issue rypes as an enumeration
Easier to copy&paste, and delete all except the correct line.
---
ISSUE_TEMPLATE.md | 9 ++++++++-
1 file changed, 8 insertions(+), 1 deletion(-)
diff --git a/ISSUE_TEMPLATE.md b/ISSUE_TEMPLATE.md
index 8ce40348ca..511760de26 100644
--- a/ISSUE_TEMPLATE.md
+++ b/ISSUE_TEMPLATE.md
@@ -1,6 +1,13 @@
##### Issue Type:
-Can you help us out in labelling this by telling us what kind of ticket this this? You can say “Bug Report”, “Feature Idea”, “Feature Pull Request”, “New Module Pull Request”, “Bugfix Pull Request”, “Documentation Report”, or “Docs Pull Request”.
+Can you help us out in labelling this by telling us what kind of ticket this this? You can say:
+ - Bug Report
+ - Feature Idea
+ - Feature Pull Request
+ - New Module Pull Request
+ - Bugfix Pull Request
+ - Documentation Report
+ - Docs Pull Request
##### Ansible Version:
From 64141dd78987d19b5b72330c0c456d76e31d609f Mon Sep 17 00:00:00 2001
From: John Barker
Date: Wed, 31 Dec 2014 22:06:15 +0000
Subject: [PATCH 014/971] Correct URL to github so links work when testing
locally
---
docsite/rst/community.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docsite/rst/community.rst b/docsite/rst/community.rst
index 4d2de28ce1..c4c9f52b2e 100644
--- a/docsite/rst/community.rst
+++ b/docsite/rst/community.rst
@@ -66,7 +66,7 @@ Bugs related to the core language should be reported to `github.com/ansible/ansi
signing up for a free github account. Before reporting a bug, please use the bug/issue search
to see if the issue has already been reported.
-MODULE related bugs however should go to `ansible-modules-core `_ or `ansible-modules-extras `_ based on the classification of the module. This is listed on the bottom of the docs page for any module.
+MODULE related bugs however should go to `ansible-modules-core `_ or `ansible-modules-extras `_ based on the classification of the module. This is listed on the bottom of the docs page for any module.
When filing a bug, please use the `issue template `_ to provide all relevant information, regardless of what repo you are filing a ticket against.
From 54f1eebde855d5ee14b97d0cd91ed1b3b54fe49a Mon Sep 17 00:00:00 2001
From: John Barker
Date: Thu, 1 Jan 2015 14:13:59 +0000
Subject: [PATCH 015/971] Strip formatting from lists of modules
---
hacking/module_formatter.py | 21 ++++++++++++++++++++-
1 file changed, 20 insertions(+), 1 deletion(-)
diff --git a/hacking/module_formatter.py b/hacking/module_formatter.py
index 0a7d1c884c..26e403e865 100755
--- a/hacking/module_formatter.py
+++ b/hacking/module_formatter.py
@@ -88,6 +88,24 @@ def html_ify(text):
return t
+#####################################################################################
+
+def strip_formatting(text):
+ ''' Strips formatting
+ In lists of modules, etc, we don't want certain words to be formatted
+ Also due to a bug in RST, you can not easily nest formatting
+ #http://docutils.sourceforge.net/FAQ.html#is-nested-inline-markup-possible
+ '''
+
+ t = cgi.escape(text)
+ t = _ITALIC.sub(r"\1", t)
+ t = _BOLD.sub(r"\1", t)
+ t = _MODULE.sub(r"\1", t)
+ t = _URL.sub(r"\1", t)
+ t = _CONST.sub(r"\1", t)
+
+ return t
+
#####################################################################################
@@ -310,7 +328,8 @@ def print_modules(module, category_file, deprecated, core, options, env, templat
result = process_module(modname, options, env, template, outputname, module_map, aliases)
if result != "SKIPPED":
- category_file.write(" %s - %s <%s_module>\n" % (modstring, result, module))
+ # Some of the module descriptions have formatting in them, this is noisy in lists, so remove it
+ category_file.write(" %s - %s <%s_module>\n" % (modstring, strip_formatting(result), module))
def process_category(category, categories, options, env, template, outputname):
From dc6e8bff34e1305a79febca44722c4345512d6ad Mon Sep 17 00:00:00 2001
From: John Barker
Date: Sat, 3 Jan 2015 11:42:44 +0000
Subject: [PATCH 016/971] Fix some mistakes in CHANELOG.md
---
CHANGELOG.md | 10 +++++-----
1 file changed, 5 insertions(+), 5 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index a989cdcd44..70e1c8dc9b 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -97,7 +97,7 @@ And various other bug fixes and improvements ...
- Fixes a bug in vault where the password file option was not being used correctly internally.
- Improved multi-line parsing when using YAML literal blocks (using > or |).
- Fixed a bug with the file module and the creation of relative symlinks.
-- Fixed a bug where checkmode was not being honored during the templating of files.
+- Fixed a bug where checkmode was not being honoured during the templating of files.
- Other various bug fixes.
## 1.7.1 "Summer Nights" - Aug 14, 2014
@@ -140,7 +140,7 @@ New Modules:
Other notable changes:
* Security fixes
- - Prevent the use of lookups when using legaxy "{{ }}" syntax around variables and with_* loops.
+ - Prevent the use of lookups when using legacy "{{ }}" syntax around variables and with_* loops.
- Remove relative paths in TAR-archived file names used by ansible-galaxy.
* Inventory speed improvements for very large inventories.
* Vault password files can now be executable, to support scripts that fetch the vault password.
@@ -319,7 +319,7 @@ Major features/changes:
* ec2 module now accepts 'exact_count' and 'count_tag' as a way to enforce a running number of nodes by tags.
* all ec2 modules that work with Eucalyptus also now support a 'validate_certs' option, which can be set to 'off' for installations using self-signed certs.
* Start of new integration test infrastructure (WIP, more details TBD)
-* if repoquery is unavailble, the yum module will automatically attempt to install yum-utils
+* if repoquery is unavailable, the yum module will automatically attempt to install yum-utils
* ansible-vault: a framework for encrypting your playbooks and variable files
* added support for privilege escalation via 'su' into bin/ansible and bin/ansible-playbook and associated keywords 'su', 'su_user', 'su_pass' for tasks/plays
@@ -782,7 +782,7 @@ Bugfixes and Misc Changes:
* misc fixes to the Riak module
* make template module slightly more efficient
* base64encode / decode filters are now available to templates
-* libvirt module can now work with multiple different libvirt connecton URIs
+* libvirt module can now work with multiple different libvirt connection URIs
* fix for postgresql password escaping
* unicode fix for shlex.split in some cases
* apt module upgrade logic improved
@@ -817,7 +817,7 @@ the variable is still registered for the host, with the attribute skipped: True.
* service pattern argument now correctly read for BSD services
* fetch location can now be controlled more directly via the 'flat' parameter.
* added basename and dirname as Jinja2 filters available to all templates
-* pip works better when sudoing from unpriveledged users
+* pip works better when sudoing from unprivileged users
* fix for user creation with groups specification reporting 'changed' incorrectly in some cases
* fix for some unicode encoding errors in outputing some data in verbose mode
* improved FreeBSD, NetBSD and Solaris facts
From 64e61197f970f1602243f84cbfe9da2761b46a7c Mon Sep 17 00:00:00 2001
From: John Barker
Date: Mon, 5 Jan 2015 20:57:05 +0000
Subject: [PATCH 017/971] Revert accidental changes
---
hacking/module_formatter.py | 21 +--------------------
1 file changed, 1 insertion(+), 20 deletions(-)
diff --git a/hacking/module_formatter.py b/hacking/module_formatter.py
index 26e403e865..0a7d1c884c 100755
--- a/hacking/module_formatter.py
+++ b/hacking/module_formatter.py
@@ -88,24 +88,6 @@ def html_ify(text):
return t
-#####################################################################################
-
-def strip_formatting(text):
- ''' Strips formatting
- In lists of modules, etc, we don't want certain words to be formatted
- Also due to a bug in RST, you can not easily nest formatting
- #http://docutils.sourceforge.net/FAQ.html#is-nested-inline-markup-possible
- '''
-
- t = cgi.escape(text)
- t = _ITALIC.sub(r"\1", t)
- t = _BOLD.sub(r"\1", t)
- t = _MODULE.sub(r"\1", t)
- t = _URL.sub(r"\1", t)
- t = _CONST.sub(r"\1", t)
-
- return t
-
#####################################################################################
@@ -328,8 +310,7 @@ def print_modules(module, category_file, deprecated, core, options, env, templat
result = process_module(modname, options, env, template, outputname, module_map, aliases)
if result != "SKIPPED":
- # Some of the module descriptions have formatting in them, this is noisy in lists, so remove it
- category_file.write(" %s - %s <%s_module>\n" % (modstring, strip_formatting(result), module))
+ category_file.write(" %s - %s <%s_module>\n" % (modstring, result, module))
def process_category(category, categories, options, env, template, outputname):
From e213fdb15dfc6964705c0b5d1567cd0872a26497 Mon Sep 17 00:00:00 2001
From: volanja
Date: Fri, 9 Jan 2015 01:24:41 +0900
Subject: [PATCH 018/971] to replace `running` with `started`
---
docsite/rst/test_strategies.rst | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/docsite/rst/test_strategies.rst b/docsite/rst/test_strategies.rst
index a3abf16090..be1b80550d 100644
--- a/docsite/rst/test_strategies.rst
+++ b/docsite/rst/test_strategies.rst
@@ -19,16 +19,16 @@ also very easy to run the steps on the localhost or testing servers. Ansible let
The Right Level of Testing
``````````````````````````
-Ansible resources are models of desired-state. As such, it should not be necessary to test that services are running, packages are
+Ansible resources are models of desired-state. As such, it should not be necessary to test that services are started, packages are
installed, or other such things. Ansible is the system that will ensure these things are declaratively true. Instead, assert these
things in your playbooks.
.. code-block:: yaml
tasks:
- - service: name=foo state=running enabled=yes
+ - service: name=foo state=started enabled=yes
-If you think the service may not be running, the best thing to do is request it to be running. If the service fails to start, Ansible
+If you think the service may not be started, the best thing to do is request it to be started. If the service fails to start, Ansible
will yell appropriately. (This should not be confused with whether the service is doing something functional, which we'll show more about how to
do later).
From 4c661e2b93ad9a7b51de196287b9da7c6b7467d6 Mon Sep 17 00:00:00 2001
From: pdelared
Date: Tue, 10 Feb 2015 17:33:29 +0100
Subject: [PATCH 019/971] Update facts.py
Added support for HPUX network fact
---
lib/ansible/module_utils/facts.py | 51 +++++++++++++++++++++++++++++++
1 file changed, 51 insertions(+)
diff --git a/lib/ansible/module_utils/facts.py b/lib/ansible/module_utils/facts.py
index 6d602af736..323c0c0d05 100644
--- a/lib/ansible/module_utils/facts.py
+++ b/lib/ansible/module_utils/facts.py
@@ -2048,6 +2048,57 @@ class GenericBsdIfconfigNetwork(Network):
for item in ifinfo[ip_type][0].keys():
defaults[item] = ifinfo[ip_type][0][item]
+class HPUX(Network):
+ """
+ HP-UX-specifig subclass of Network. Defines networking facts:
+ - default_interface
+ - interfaces (a list of interface names)
+ - interface_ dictionary of ipv4 address information.
+ """
+ platform = 'HP-UX'
+
+ def __init__(self, module):
+ Network.__init__(self, module)
+
+ def populate(self):
+ netstat_path = self.module.get_bin_path('netstat')
+ if netstat_path is None:
+ return self.facts
+ self.get_default_interfaces()
+ interfaces = self.get_interfaces_info()
+ self.facts['interfaces'] = interfaces.keys()
+ for iface in interfaces:
+ self.facts[iface] = interfaces[iface]
+ return self.facts
+
+ def get_default_interfaces(self):
+ rc, out, err = module.run_command("/usr/bin/netstat -nr", use_unsafe_shell=True)
+ lines = out.split('\n')
+ for line in lines:
+ words = line.split()
+ if len(words) > 1:
+ if words[0] == 'default':
+ self.facts['default_interface'] = words[4]
+ self.facts['default_gateway'] = words[1]
+
+ def get_interfaces_info(self):
+ interfaces = {}
+ rc, out, err = module.run_command("/usr/bin/netstat -ni", use_unsafe_shell=True)
+ lines = out.split('\n')
+ for line in lines:
+ words = line.split()
+ for i in range(len(words) - 1):
+ if words[i][:3] == 'lan':
+ device = words[i]
+ interfaces[device] = { 'device': device }
+ address = words[i+3]
+ interfaces[device]['ipv4'] = { 'address': address }
+ network = words[i+2]
+ interfaces[device]['ipv4'] = { 'network': network,
+ 'interface': device,
+ 'address': address }
+ return interfaces
+
class DarwinNetwork(GenericBsdIfconfigNetwork, Network):
"""
This is the Mac OS X/Darwin Network Class.
From c6942578bfb8ecf79850f418ca94d2655b3cef12 Mon Sep 17 00:00:00 2001
From: Henrik Danielsson
Date: Tue, 24 Mar 2015 11:27:12 +0100
Subject: [PATCH 020/971] Added installation instructions for Arch Linux.
---
docsite/rst/intro_installation.rst | 11 +++++++++++
1 file changed, 11 insertions(+)
diff --git a/docsite/rst/intro_installation.rst b/docsite/rst/intro_installation.rst
index 303880cac1..450d125e5f 100644
--- a/docsite/rst/intro_installation.rst
+++ b/docsite/rst/intro_installation.rst
@@ -261,6 +261,17 @@ Ansible is available for Solaris as `SysV package from OpenCSW `_.
+
.. _from_pip:
Latest Releases Via Pip
From a0c34da779f583915a945f4ec039dd5f7b6e422c Mon Sep 17 00:00:00 2001
From: Simon Gomizelj
Date: Wed, 8 Apr 2015 13:57:56 -0400
Subject: [PATCH 021/971] Support querying systemd container information
systemd writes a /run/systemd/container file in any container it starts
to make it really easy to detect the container type. This adds support
for detecting systemd-nspawn containers (and any other container format
that will write data there for compatibility).
---
lib/ansible/module_utils/facts.py | 6 ++++++
1 file changed, 6 insertions(+)
diff --git a/lib/ansible/module_utils/facts.py b/lib/ansible/module_utils/facts.py
index 628d1dd267..300ed3ad2e 100644
--- a/lib/ansible/module_utils/facts.py
+++ b/lib/ansible/module_utils/facts.py
@@ -2394,6 +2394,12 @@ class LinuxVirtual(Virtual):
self.facts['virtualization_role'] = 'guest'
return
+ systemd_container = get_file_content('/run/systemd/container')
+ if systemd_container:
+ self.facts['virtualization_type'] = systemd_container
+ self.facts['virtualization_role'] = 'guest'
+ return
+
if os.path.exists('/proc/1/cgroup'):
for line in get_file_lines('/proc/1/cgroup'):
if re.search(r'/docker(/|-[0-9a-f]+\.scope)', line):
From 1bf5224f8210141b24f98c8e432ae28b6a9a6eb5 Mon Sep 17 00:00:00 2001
From: Devin Christensen
Date: Wed, 26 Nov 2014 17:58:45 -0700
Subject: [PATCH 022/971] Enable writing plugins for jinja2 tests
---
lib/ansible/constants.py | 1 +
lib/ansible/runner/filter_plugins/core.py | 86 -------------
.../runner/filter_plugins/mathstuff.py | 8 --
lib/ansible/runner/test_plugins/__init__.py | 0
lib/ansible/runner/test_plugins/core.py | 113 ++++++++++++++++++
lib/ansible/runner/test_plugins/math.py | 36 ++++++
lib/ansible/utils/__init__.py | 6 +-
lib/ansible/utils/plugins.py | 7 ++
lib/ansible/utils/template.py | 19 +++
v2/ansible/constants.py | 1 +
v2/ansible/plugins/__init__.py | 7 ++
v2/ansible/template/__init__.py | 23 +++-
v2/ansible/template/safe_eval.py | 8 +-
13 files changed, 216 insertions(+), 99 deletions(-)
create mode 100644 lib/ansible/runner/test_plugins/__init__.py
create mode 100644 lib/ansible/runner/test_plugins/core.py
create mode 100644 lib/ansible/runner/test_plugins/math.py
diff --git a/lib/ansible/constants.py b/lib/ansible/constants.py
index 089de5b7c5..5dbb9e2383 100644
--- a/lib/ansible/constants.py
+++ b/lib/ansible/constants.py
@@ -156,6 +156,7 @@ DEFAULT_CONNECTION_PLUGIN_PATH = get_config(p, DEFAULTS, 'connection_plugins', '
DEFAULT_LOOKUP_PLUGIN_PATH = get_config(p, DEFAULTS, 'lookup_plugins', 'ANSIBLE_LOOKUP_PLUGINS', '~/.ansible/plugins/lookup_plugins:/usr/share/ansible_plugins/lookup_plugins')
DEFAULT_VARS_PLUGIN_PATH = get_config(p, DEFAULTS, 'vars_plugins', 'ANSIBLE_VARS_PLUGINS', '~/.ansible/plugins/vars_plugins:/usr/share/ansible_plugins/vars_plugins')
DEFAULT_FILTER_PLUGIN_PATH = get_config(p, DEFAULTS, 'filter_plugins', 'ANSIBLE_FILTER_PLUGINS', '~/.ansible/plugins/filter_plugins:/usr/share/ansible_plugins/filter_plugins')
+DEFAULT_TEST_PLUGIN_PATH = get_config(p, DEFAULTS, 'test_plugins', 'ANSIBLE_TEST_PLUGINS', '~/.ansible/plugins/test_plugins:/usr/share/ansible_plugins/test_plugins')
CACHE_PLUGIN = get_config(p, DEFAULTS, 'fact_caching', 'ANSIBLE_CACHE_PLUGIN', 'memory')
CACHE_PLUGIN_CONNECTION = get_config(p, DEFAULTS, 'fact_caching_connection', 'ANSIBLE_CACHE_PLUGIN_CONNECTION', None)
diff --git a/lib/ansible/runner/filter_plugins/core.py b/lib/ansible/runner/filter_plugins/core.py
index bdf45509c3..c527bc529f 100644
--- a/lib/ansible/runner/filter_plugins/core.py
+++ b/lib/ansible/runner/filter_plugins/core.py
@@ -74,55 +74,6 @@ def to_nice_json(a, *args, **kw):
return to_json(a, *args, **kw)
return json.dumps(a, indent=4, sort_keys=True, *args, **kw)
-def failed(*a, **kw):
- ''' Test if task result yields failed '''
- item = a[0]
- if type(item) != dict:
- raise errors.AnsibleFilterError("|failed expects a dictionary")
- rc = item.get('rc',0)
- failed = item.get('failed',False)
- if rc != 0 or failed:
- return True
- else:
- return False
-
-def success(*a, **kw):
- ''' Test if task result yields success '''
- return not failed(*a, **kw)
-
-def changed(*a, **kw):
- ''' Test if task result yields changed '''
- item = a[0]
- if type(item) != dict:
- raise errors.AnsibleFilterError("|changed expects a dictionary")
- if not 'changed' in item:
- changed = False
- if ('results' in item # some modules return a 'results' key
- and type(item['results']) == list
- and type(item['results'][0]) == dict):
- for result in item['results']:
- changed = changed or result.get('changed', False)
- else:
- changed = item.get('changed', False)
- return changed
-
-def skipped(*a, **kw):
- ''' Test if task result yields skipped '''
- item = a[0]
- if type(item) != dict:
- raise errors.AnsibleFilterError("|skipped expects a dictionary")
- skipped = item.get('skipped', False)
- return skipped
-
-def mandatory(a):
- ''' Make a variable mandatory '''
- try:
- a
- except NameError:
- raise errors.AnsibleFilterError('Mandatory variable not defined.')
- else:
- return a
-
def bool(a):
''' return a bool for the arg '''
if a is None or type(a) == bool:
@@ -142,27 +93,6 @@ def fileglob(pathname):
''' return list of matched files for glob '''
return glob.glob(pathname)
-def regex(value='', pattern='', ignorecase=False, match_type='search'):
- ''' Expose `re` as a boolean filter using the `search` method by default.
- This is likely only useful for `search` and `match` which already
- have their own filters.
- '''
- if ignorecase:
- flags = re.I
- else:
- flags = 0
- _re = re.compile(pattern, flags=flags)
- _bool = __builtins__.get('bool')
- return _bool(getattr(_re, match_type, 'search')(value))
-
-def match(value, pattern='', ignorecase=False):
- ''' Perform a `re.match` returning a boolean '''
- return regex(value, pattern, ignorecase, 'match')
-
-def search(value, pattern='', ignorecase=False):
- ''' Perform a `re.search` returning a boolean '''
- return regex(value, pattern, ignorecase, 'search')
-
def regex_replace(value='', pattern='', replacement='', ignorecase=False):
''' Perform a `re.sub` returning a string '''
@@ -299,19 +229,6 @@ class FilterModule(object):
'realpath': partial(unicode_wrap, os.path.realpath),
'relpath': partial(unicode_wrap, os.path.relpath),
- # failure testing
- 'failed' : failed,
- 'success' : success,
-
- # changed testing
- 'changed' : changed,
-
- # skip testing
- 'skipped' : skipped,
-
- # variable existence
- 'mandatory': mandatory,
-
# value as boolean
'bool': bool,
@@ -333,9 +250,6 @@ class FilterModule(object):
'fileglob': fileglob,
# regex
- 'match': match,
- 'search': search,
- 'regex': regex,
'regex_replace': regex_replace,
# ? : ;
diff --git a/lib/ansible/runner/filter_plugins/mathstuff.py b/lib/ansible/runner/filter_plugins/mathstuff.py
index c6a49485a4..a841c6e457 100644
--- a/lib/ansible/runner/filter_plugins/mathstuff.py
+++ b/lib/ansible/runner/filter_plugins/mathstuff.py
@@ -67,13 +67,6 @@ def max(a):
_max = __builtins__.get('max')
return _max(a);
-def isnotanumber(x):
- try:
- return math.isnan(x)
- except TypeError:
- return False
-
-
def logarithm(x, base=math.e):
try:
if base == 10:
@@ -107,7 +100,6 @@ class FilterModule(object):
def filters(self):
return {
# general math
- 'isnan': isnotanumber,
'min' : min,
'max' : max,
diff --git a/lib/ansible/runner/test_plugins/__init__.py b/lib/ansible/runner/test_plugins/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/lib/ansible/runner/test_plugins/core.py b/lib/ansible/runner/test_plugins/core.py
new file mode 100644
index 0000000000..cc8c702d75
--- /dev/null
+++ b/lib/ansible/runner/test_plugins/core.py
@@ -0,0 +1,113 @@
+# (c) 2012, Jeroen Hoekx
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see .
+
+import re
+from ansible import errors
+
+def failed(*a, **kw):
+ ''' Test if task result yields failed '''
+ item = a[0]
+ if type(item) != dict:
+ raise errors.AnsibleFilterError("|failed expects a dictionary")
+ rc = item.get('rc',0)
+ failed = item.get('failed',False)
+ if rc != 0 or failed:
+ return True
+ else:
+ return False
+
+def success(*a, **kw):
+ ''' Test if task result yields success '''
+ return not failed(*a, **kw)
+
+def changed(*a, **kw):
+ ''' Test if task result yields changed '''
+ item = a[0]
+ if type(item) != dict:
+ raise errors.AnsibleFilterError("|changed expects a dictionary")
+ if not 'changed' in item:
+ changed = False
+ if ('results' in item # some modules return a 'results' key
+ and type(item['results']) == list
+ and type(item['results'][0]) == dict):
+ for result in item['results']:
+ changed = changed or result.get('changed', False)
+ else:
+ changed = item.get('changed', False)
+ return changed
+
+def skipped(*a, **kw):
+ ''' Test if task result yields skipped '''
+ item = a[0]
+ if type(item) != dict:
+ raise errors.AnsibleFilterError("|skipped expects a dictionary")
+ skipped = item.get('skipped', False)
+ return skipped
+
+def mandatory(a):
+ ''' Make a variable mandatory '''
+ try:
+ a
+ except NameError:
+ raise errors.AnsibleFilterError('Mandatory variable not defined.')
+ else:
+ return a
+
+def regex(value='', pattern='', ignorecase=False, match_type='search'):
+ ''' Expose `re` as a boolean filter using the `search` method by default.
+ This is likely only useful for `search` and `match` which already
+ have their own filters.
+ '''
+ if ignorecase:
+ flags = re.I
+ else:
+ flags = 0
+ _re = re.compile(pattern, flags=flags)
+ _bool = __builtins__.get('bool')
+ return _bool(getattr(_re, match_type, 'search')(value))
+
+def match(value, pattern='', ignorecase=False):
+ ''' Perform a `re.match` returning a boolean '''
+ return regex(value, pattern, ignorecase, 'match')
+
+def search(value, pattern='', ignorecase=False):
+ ''' Perform a `re.search` returning a boolean '''
+ return regex(value, pattern, ignorecase, 'search')
+
+class TestModule(object):
+ ''' Ansible core jinja2 tests '''
+
+ def tests(self):
+ return {
+ # failure testing
+ 'failed' : failed,
+ 'success' : success,
+
+ # changed testing
+ 'changed' : changed,
+
+ # skip testing
+ 'skipped' : skipped,
+
+ # variable existence
+ 'mandatory': mandatory,
+
+ # regex
+ 'match': match,
+ 'search': search,
+ 'regex': regex,
+ }
diff --git a/lib/ansible/runner/test_plugins/math.py b/lib/ansible/runner/test_plugins/math.py
new file mode 100644
index 0000000000..3ac871c435
--- /dev/null
+++ b/lib/ansible/runner/test_plugins/math.py
@@ -0,0 +1,36 @@
+# (c) 2014, Brian Coca
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see .
+
+from __future__ import absolute_import
+
+import math
+from ansible import errors
+
+def isnotanumber(x):
+ try:
+ return math.isnan(x)
+ except TypeError:
+ return False
+
+class TestModule(object):
+ ''' Ansible math jinja2 tests '''
+
+ def tests(self):
+ return {
+ # general math
+ 'isnan': isnotanumber,
+ }
diff --git a/lib/ansible/utils/__init__.py b/lib/ansible/utils/__init__.py
index 7ed07a54c8..17790d63c5 100644
--- a/lib/ansible/utils/__init__.py
+++ b/lib/ansible/utils/__init__.py
@@ -1403,7 +1403,11 @@ def safe_eval(expr, locals={}, include_exceptions=False):
for filter in filter_loader.all():
filter_list.extend(filter.filters().keys())
- CALL_WHITELIST = C.DEFAULT_CALLABLE_WHITELIST + filter_list
+ test_list = []
+ for test in test_loader.all():
+ test_list.extend(test.tests().keys())
+
+ CALL_WHITELIST = C.DEFAULT_CALLABLE_WHITELIST + filter_list + test_list
class CleansingNodeVisitor(ast.NodeVisitor):
def generic_visit(self, node, inside_call=False):
diff --git a/lib/ansible/utils/plugins.py b/lib/ansible/utils/plugins.py
index 14953d8f44..c50ebcb9ce 100644
--- a/lib/ansible/utils/plugins.py
+++ b/lib/ansible/utils/plugins.py
@@ -296,6 +296,13 @@ filter_loader = PluginLoader(
'filter_plugins'
)
+test_loader = PluginLoader(
+ 'TestModule',
+ 'ansible.runner.test_plugins',
+ C.DEFAULT_TEST_PLUGIN_PATH,
+ 'test_plugins'
+)
+
fragment_loader = PluginLoader(
'ModuleDocFragment',
'ansible.utils.module_docs_fragments',
diff --git a/lib/ansible/utils/template.py b/lib/ansible/utils/template.py
index 5f712b2675..043ad0c419 100644
--- a/lib/ansible/utils/template.py
+++ b/lib/ansible/utils/template.py
@@ -39,6 +39,7 @@ from ansible.utils import to_bytes, to_unicode
class Globals(object):
FILTERS = None
+ TESTS = None
def __init__(self):
pass
@@ -54,10 +55,26 @@ def _get_filters():
filters = {}
for fp in plugins:
filters.update(fp.filters())
+ filters.update(_get_tests())
Globals.FILTERS = filters
return Globals.FILTERS
+def _get_tests():
+ ''' return test plugin instances '''
+
+ if Globals.TESTS is not None:
+ return Globals.TESTS
+
+ from ansible import utils
+ plugins = [ x for x in utils.plugins.test_loader.all()]
+ tests = {}
+ for tp in plugins:
+ tests.update(tp.tests())
+ Globals.TESTS = tests
+
+ return Globals.TESTS
+
def _get_extensions():
''' return jinja2 extensions to load '''
@@ -237,6 +254,7 @@ def template_from_file(basedir, path, vars, vault_password=None):
environment = jinja2.Environment(loader=loader, trim_blocks=True, extensions=_get_extensions())
environment.filters.update(_get_filters())
+ environment.tests.update(_get_tests())
environment.globals['lookup'] = my_lookup
environment.globals['finalize'] = my_finalize
if fail_on_undefined:
@@ -351,6 +369,7 @@ def template_from_string(basedir, data, vars, fail_on_undefined=False):
environment = jinja2.Environment(trim_blocks=True, undefined=StrictUndefined, extensions=_get_extensions(), finalize=my_finalize)
environment.filters.update(_get_filters())
+ environment.tests.update(_get_tests())
environment.template_class = J2Template
if '_original_file' in vars:
diff --git a/v2/ansible/constants.py b/v2/ansible/constants.py
index 913df310c1..2fbb4d39c5 100644
--- a/v2/ansible/constants.py
+++ b/v2/ansible/constants.py
@@ -162,6 +162,7 @@ DEFAULT_CONNECTION_PLUGIN_PATH = get_config(p, DEFAULTS, 'connection_plugins', '
DEFAULT_LOOKUP_PLUGIN_PATH = get_config(p, DEFAULTS, 'lookup_plugins', 'ANSIBLE_LOOKUP_PLUGINS', '~/.ansible/plugins/lookup_plugins:/usr/share/ansible_plugins/lookup_plugins')
DEFAULT_VARS_PLUGIN_PATH = get_config(p, DEFAULTS, 'vars_plugins', 'ANSIBLE_VARS_PLUGINS', '~/.ansible/plugins/vars_plugins:/usr/share/ansible_plugins/vars_plugins')
DEFAULT_FILTER_PLUGIN_PATH = get_config(p, DEFAULTS, 'filter_plugins', 'ANSIBLE_FILTER_PLUGINS', '~/.ansible/plugins/filter_plugins:/usr/share/ansible_plugins/filter_plugins')
+DEFAULT_TEST_PLUGIN_PATH = get_config(p, DEFAULTS, 'test_plugins', 'ANSIBLE_TEST_PLUGINS', '~/.ansible/plugins/test_plugins:/usr/share/ansible_plugins/test_plugins')
CACHE_PLUGIN = get_config(p, DEFAULTS, 'fact_caching', 'ANSIBLE_CACHE_PLUGIN', 'memory')
CACHE_PLUGIN_CONNECTION = get_config(p, DEFAULTS, 'fact_caching_connection', 'ANSIBLE_CACHE_PLUGIN_CONNECTION', None)
diff --git a/v2/ansible/plugins/__init__.py b/v2/ansible/plugins/__init__.py
index d16eecd3c3..1c445c3f5a 100644
--- a/v2/ansible/plugins/__init__.py
+++ b/v2/ansible/plugins/__init__.py
@@ -311,6 +311,13 @@ filter_loader = PluginLoader(
'filter_plugins'
)
+test_loader = PluginLoader(
+ 'TestModule',
+ 'ansible.plugins.test',
+ C.DEFAULT_TEST_PLUGIN_PATH,
+ 'test_plugins'
+)
+
fragment_loader = PluginLoader(
'ModuleDocFragment',
'ansible.utils.module_docs_fragments',
diff --git a/v2/ansible/template/__init__.py b/v2/ansible/template/__init__.py
index 6c41ad3cf4..9e15bb3bd8 100644
--- a/v2/ansible/template/__init__.py
+++ b/v2/ansible/template/__init__.py
@@ -28,7 +28,7 @@ from jinja2.runtime import StrictUndefined
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleFilterError, AnsibleUndefinedVariable
-from ansible.plugins import filter_loader, lookup_loader
+from ansible.plugins import filter_loader, lookup_loader, test_loader
from ansible.template.safe_eval import safe_eval
from ansible.template.template import AnsibleJ2Template
from ansible.template.vars import AnsibleJ2Vars
@@ -57,6 +57,7 @@ class Templar:
self._loader = loader
self._basedir = loader.get_basedir()
self._filters = None
+ self._tests = None
self._available_variables = variables
# flags to determine whether certain failures during templating
@@ -93,11 +94,28 @@ class Templar:
self._filters = dict()
for fp in plugins:
self._filters.update(fp.filters())
+ self._filters.update(self._get_tests())
return self._filters.copy()
+ def _get_tests(self):
+ '''
+ Returns tests plugins, after loading and caching them if need be
+ '''
+
+ if self._tests is not None:
+ return self._tests.copy()
+
+ plugins = [x for x in test_loader.all()]
+
+ self._tests = dict()
+ for fp in plugins:
+ self._tests.update(fp.tests())
+
+ return self._tests.copy()
+
def _get_extensions(self):
- '''
+ '''
Return jinja2 extensions to load.
If some extensions are set via jinja_extensions in ansible.cfg, we try
@@ -229,6 +247,7 @@ class Templar:
environment = Environment(trim_blocks=True, undefined=StrictUndefined, extensions=self._get_extensions(), finalize=self._finalize)
environment.filters.update(self._get_filters())
+ environment.tests.update(self._get_tests())
environment.template_class = AnsibleJ2Template
# FIXME: may not be required anymore, as the basedir stuff will
diff --git a/v2/ansible/template/safe_eval.py b/v2/ansible/template/safe_eval.py
index 2689949504..5e2d1e1fe3 100644
--- a/v2/ansible/template/safe_eval.py
+++ b/v2/ansible/template/safe_eval.py
@@ -23,7 +23,7 @@ import sys
from six.moves import builtins
from ansible import constants as C
-from ansible.plugins import filter_loader
+from ansible.plugins import filter_loader, test_loader
def safe_eval(expr, locals={}, include_exceptions=False):
'''
@@ -77,7 +77,11 @@ def safe_eval(expr, locals={}, include_exceptions=False):
for filter in filter_loader.all():
filter_list.extend(filter.filters().keys())
- CALL_WHITELIST = C.DEFAULT_CALLABLE_WHITELIST + filter_list
+ test_list = []
+ for test in test_loader.all():
+ test_list.extend(test.tests().keys())
+
+ CALL_WHITELIST = C.DEFAULT_CALLABLE_WHITELIST + filter_list + test_list
class CleansingNodeVisitor(ast.NodeVisitor):
def generic_visit(self, node, inside_call=False):
From 47c3d75c3cac67875e6711e992a3d95c4351cad3 Mon Sep 17 00:00:00 2001
From: Jeff Bachtel
Date: Tue, 28 Apr 2015 14:17:53 -0400
Subject: [PATCH 023/971] Add test for
https://github.com/ansible/ansible/issues/9851
---
.../roles/test_filters/files/9851.txt | 3 +++
.../roles/test_filters/tasks/main.yml | 19 +++++++++++++++++++
2 files changed, 22 insertions(+)
create mode 100644 test/integration/roles/test_filters/files/9851.txt
diff --git a/test/integration/roles/test_filters/files/9851.txt b/test/integration/roles/test_filters/files/9851.txt
new file mode 100644
index 0000000000..70b12793e1
--- /dev/null
+++ b/test/integration/roles/test_filters/files/9851.txt
@@ -0,0 +1,3 @@
+ [{
+ "k": "Quotes \"'\n"
+}]
diff --git a/test/integration/roles/test_filters/tasks/main.yml b/test/integration/roles/test_filters/tasks/main.yml
index 3d1ee322e3..c4872b5037 100644
--- a/test/integration/roles/test_filters/tasks/main.yml
+++ b/test/integration/roles/test_filters/tasks/main.yml
@@ -25,6 +25,25 @@
- name: Verify that we workaround a py26 json bug
template: src=py26json.j2 dest={{output_dir}}/py26json.templated mode=0644
+- name: 9851 - Verify that we don't trigger https://github.com/ansible/ansible/issues/9851
+ copy:
+ content: " [{{item|to_nice_json}}]"
+ dest: "{{output_dir}}/9851.out"
+ with_items:
+ - {"k": "Quotes \"'\n"}
+
+- name: 9851 - copy known good output into place
+ copy: src=9851.txt dest={{output_dir}}/9851.txt
+
+- name: 9851 - Compare generated json to known good
+ shell: diff {{output_dir}}/9851.out {{output_dir}}/9851.txt
+ register: 9851_diff_result
+
+- name: 9851 - verify generated file matches known good
+ assert:
+ that:
+ - '9851_diff_result.stdout == ""'
+
- name: fill in a basic template
template: src=foo.j2 dest={{output_dir}}/foo.templated mode=0644
register: template_result
From cf3f7b0043bed07415b6fab9578894a91cdf75b4 Mon Sep 17 00:00:00 2001
From: Daniel Farrell
Date: Tue, 28 Apr 2015 18:24:01 -0400
Subject: [PATCH 024/971] Correct minor grammar error in Playbook intro docs
Signed-off-by: Daniel Farrell
---
docsite/rst/playbooks_intro.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docsite/rst/playbooks_intro.rst b/docsite/rst/playbooks_intro.rst
index a27285b4a9..3899502ed4 100644
--- a/docsite/rst/playbooks_intro.rst
+++ b/docsite/rst/playbooks_intro.rst
@@ -148,7 +148,7 @@ Remote users can also be defined per task::
The `remote_user` parameter for tasks was added in 1.4.
-Support for running things from as another user is also available (see :doc:`become`)::
+Support for running things as another user is also available (see :doc:`become`)::
---
- hosts: webservers
From ce3ef7f4c16e47d5a0b5600e1c56c177b7c93f0d Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Sun, 3 May 2015 21:47:26 -0500
Subject: [PATCH 025/971] Making the switch to v2
---
.gitmodules | 16 -
bin/ansible | 202 +-
bin/ansible-doc | 338 +---
bin/ansible-galaxy | 958 +---------
bin/ansible-playbook | 331 +---
bin/ansible-pull | 258 +--
bin/ansible-vault | 242 +--
lib/ansible/__init__.py | 8 +-
{v2 => lib}/ansible/cli/__init__.py | 0
{v2 => lib}/ansible/cli/adhoc.py | 0
{v2 => lib}/ansible/cli/doc.py | 0
{v2 => lib}/ansible/cli/galaxy.py | 0
{v2 => lib}/ansible/cli/playbook.py | 0
{v2 => lib}/ansible/cli/pull.py | 0
{v2 => lib}/ansible/cli/vault.py | 0
{v2 => lib}/ansible/compat/__init__.py | 0
{v2 => lib}/ansible/compat/tests/__init__.py | 0
{v2 => lib}/ansible/compat/tests/mock.py | 0
{v2 => lib}/ansible/compat/tests/unittest.py | 0
{v2 => lib}/ansible/config/__init__.py | 0
lib/ansible/constants.py | 47 +-
{v2 => lib}/ansible/errors/__init__.py | 0
{v2 => lib}/ansible/errors/yaml_strings.py | 0
{v2 => lib}/ansible/executor/__init__.py | 0
.../ansible/executor/connection_info.py | 0
{v2 => lib}/ansible/executor/module_common.py | 0
{v2 => lib}/ansible/executor/play_iterator.py | 0
.../ansible/executor/playbook_executor.py | 0
.../ansible/executor/process/__init__.py | 0
.../ansible/executor/process/result.py | 0
.../ansible/executor/process/worker.py | 0
{v2 => lib}/ansible/executor/stats.py | 0
{v2 => lib}/ansible/executor/task_executor.py | 0
.../ansible/executor/task_queue_manager.py | 0
.../ansible/executor/task_queue_manager.py: | 0
{v2 => lib}/ansible/executor/task_result.py | 0
{v2 => lib}/ansible/galaxy/__init__.py | 0
{v2 => lib}/ansible/galaxy/api.py | 0
.../ansible/galaxy/data/metadata_template.j2 | 0
{v2 => lib}/ansible/galaxy/data/readme | 0
{v2 => lib}/ansible/galaxy/role.py | 0
lib/ansible/inventory/__init__.py | 96 +-
lib/ansible/inventory/dir.py | 31 +-
lib/ansible/inventory/expand_hosts.py | 3 +
lib/ansible/inventory/group.py | 54 +-
lib/ansible/inventory/host.py | 87 +-
lib/ansible/inventory/ini.py | 58 +-
lib/ansible/inventory/script.py | 36 +-
lib/ansible/inventory/vars_plugins/noop.py | 2 +
lib/ansible/module_utils/basic.py | 68 +-
lib/ansible/module_utils/powershell.ps1 | 6 +-
lib/ansible/modules/__init__.py | 20 +
lib/ansible/modules/core | 1 -
lib/ansible/modules/extras | 1 -
{v2 => lib}/ansible/new_inventory/__init__.py | 0
{v2 => lib}/ansible/new_inventory/group.py | 0
{v2 => lib}/ansible/new_inventory/host.py | 0
{v2 => lib}/ansible/parsing/__init__.py | 0
{v2 => lib}/ansible/parsing/mod_args.py | 0
{v2 => lib}/ansible/parsing/splitter.py | 0
{v2 => lib}/ansible/parsing/utils/__init__.py | 0
{v2 => lib}/ansible/parsing/utils/jsonify.py | 0
{v2 => lib}/ansible/parsing/vault/__init__.py | 0
{v2 => lib}/ansible/parsing/yaml/__init__.py | 0
.../ansible/parsing/yaml/constructor.py | 0
{v2 => lib}/ansible/parsing/yaml/loader.py | 0
{v2 => lib}/ansible/parsing/yaml/objects.py | 0
lib/ansible/playbook/__init__.py | 887 +--------
{v2 => lib}/ansible/playbook/attribute.py | 0
{v2 => lib}/ansible/playbook/base.py | 0
{v2 => lib}/ansible/playbook/become.py | 0
{v2 => lib}/ansible/playbook/block.py | 0
{v2 => lib}/ansible/playbook/conditional.py | 0
{v2 => lib}/ansible/playbook/handler.py | 0
{v2 => lib}/ansible/playbook/helpers.py | 0
lib/ansible/playbook/play.py | 1080 ++---------
.../ansible/playbook/playbook_include.py | 0
{v2 => lib}/ansible/playbook/role/__init__.py | 0
.../ansible/playbook/role/definition.py | 0
{v2 => lib}/ansible/playbook/role/include.py | 0
{v2 => lib}/ansible/playbook/role/metadata.py | 0
.../ansible/playbook/role/requirement.py | 0
{v2 => lib}/ansible/playbook/taggable.py | 0
lib/ansible/playbook/task.py | 558 +++---
{v2 => lib}/ansible/playbook/vars.py | 0
{v2 => lib}/ansible/playbook/vars_file.py | 0
{v2 => lib}/ansible/plugins/__init__.py | 0
.../ansible/plugins/action/__init__.py | 0
.../ansible/plugins/action/add_host.py | 0
.../ansible/plugins/action/assemble.py | 0
{v2 => lib}/ansible/plugins/action/assert.py | 0
{v2 => lib}/ansible/plugins/action/async.py | 0
{v2 => lib}/ansible/plugins/action/copy.py | 0
{v2 => lib}/ansible/plugins/action/debug.py | 0
{v2 => lib}/ansible/plugins/action/fail.py | 0
{v2 => lib}/ansible/plugins/action/fetch.py | 0
.../ansible/plugins/action/group_by.py | 0
.../ansible/plugins/action/include_vars.py | 0
{v2 => lib}/ansible/plugins/action/normal.py | 0
{v2 => lib}/ansible/plugins/action/patch.py | 0
{v2 => lib}/ansible/plugins/action/pause.py | 0
{v2 => lib}/ansible/plugins/action/raw.py | 0
{v2 => lib}/ansible/plugins/action/script.py | 0
.../ansible/plugins/action/set_fact.py | 0
.../ansible/plugins/action/synchronize.py | 0
.../ansible/plugins/action/template.py | 0
.../ansible/plugins/action/unarchive.py | 0
{v2 => lib}/ansible/plugins/cache/__init__.py | 0
{v2 => lib}/ansible/plugins/cache/base.py | 0
.../ansible/plugins/cache/memcached.py | 0
{v2 => lib}/ansible/plugins/cache/memory.py | 0
{v2 => lib}/ansible/plugins/cache/redis.py | 0
.../ansible/plugins/callback/__init__.py | 0
.../ansible/plugins/callback/default.py | 0
.../ansible/plugins/callback/minimal.py | 0
.../ansible/plugins/connections/__init__.py | 0
.../ansible/plugins/connections/accelerate.py | 0
.../ansible/plugins/connections/chroot.py | 0
.../ansible/plugins/connections/funcd.py | 0
.../ansible/plugins/connections/jail.py | 0
.../plugins/connections/libvirt_lxc.py | 0
.../ansible/plugins/connections/local.py | 0
.../plugins/connections/paramiko_ssh.py | 0
.../ansible/plugins/connections/ssh.py | 0
.../ansible/plugins/connections/winrm.py | 0
.../ansible/plugins/connections/zone.py | 0
{v2 => lib}/ansible/plugins/filter | 0
.../ansible/plugins/inventory/__init__.py | 0
.../ansible/plugins/inventory/aggregate.py | 0
.../ansible/plugins/inventory/directory.py | 0
{v2 => lib}/ansible/plugins/inventory/ini.py | 0
.../ansible/plugins/lookup/__init__.py | 0
.../ansible/plugins/lookup/cartesian.py | 0
{v2 => lib}/ansible/plugins/lookup/csvfile.py | 0
{v2 => lib}/ansible/plugins/lookup/dict.py | 0
{v2 => lib}/ansible/plugins/lookup/dnstxt.py | 0
{v2 => lib}/ansible/plugins/lookup/env.py | 0
{v2 => lib}/ansible/plugins/lookup/etcd.py | 0
{v2 => lib}/ansible/plugins/lookup/file.py | 0
.../ansible/plugins/lookup/fileglob.py | 0
.../ansible/plugins/lookup/first_found.py | 0
.../ansible/plugins/lookup/flattened.py | 0
.../ansible/plugins/lookup/indexed_items.py | 0
.../plugins/lookup/inventory_hostnames.py | 0
{v2 => lib}/ansible/plugins/lookup/items.py | 0
{v2 => lib}/ansible/plugins/lookup/lines.py | 0
{v2 => lib}/ansible/plugins/lookup/nested.py | 0
.../ansible/plugins/lookup/password.py | 0
{v2 => lib}/ansible/plugins/lookup/pipe.py | 0
.../ansible/plugins/lookup/random_choice.py | 0
.../ansible/plugins/lookup/redis_kv.py | 0
.../ansible/plugins/lookup/sequence.py | 0
.../ansible/plugins/lookup/subelements.py | 0
.../ansible/plugins/lookup/template.py | 0
.../ansible/plugins/lookup/together.py | 0
{v2 => lib}/ansible/plugins/lookup/url.py | 0
{v2 => lib}/ansible/plugins/shell/__init__.py | 0
{v2 => lib}/ansible/plugins/shell/csh.py | 0
{v2 => lib}/ansible/plugins/shell/fish.py | 0
.../ansible/plugins/shell/powershell.py | 0
{v2 => lib}/ansible/plugins/shell/sh.py | 0
.../ansible/plugins/strategies/__init__.py | 0
.../ansible/plugins/strategies/free.py | 0
.../ansible/plugins/strategies/linear.py | 0
{v2 => lib}/ansible/plugins/vars/__init__.py | 0
{v2 => lib}/ansible/template/__init__.py | 0
{v2 => lib}/ansible/template/safe_eval.py | 0
{v2 => lib}/ansible/template/template.py | 0
{v2 => lib}/ansible/template/vars.py | 0
{v2 => lib/ansible}/test-requirements.txt | 0
lib/ansible/utils/__init__.py | 1646 +---------------
{v2 => lib}/ansible/utils/boolean.py | 0
{v2 => lib}/ansible/utils/color.py | 0
{v2 => lib}/ansible/utils/debug.py | 0
{v2 => lib}/ansible/utils/display.py | 0
{v2 => lib}/ansible/utils/encrypt.py | 0
lib/ansible/utils/hashing.py | 7 +-
{v2 => lib}/ansible/utils/listify.py | 0
lib/ansible/utils/module_docs.py | 4 +-
.../ansible/utils/module_docs_fragments | 0
{v2 => lib}/ansible/utils/path.py | 0
lib/ansible/utils/unicode.py | 37 +-
{v2 => lib}/ansible/utils/vars.py | 0
lib/ansible/utils/vault.py | 597 +-----
{v2 => lib}/ansible/vars/__init__.py | 0
{v2 => lib}/ansible/vars/hostvars.py | 0
{v2/samples => samples}/README.md | 0
{v2/samples => samples}/common_include.yml | 0
{v2/samples => samples}/hosts | 0
{v2/samples => samples}/ignore_errors.yml | 0
{v2/samples => samples}/include.yml | 0
{v2/samples => samples}/inv_lg | 0
{v2/samples => samples}/inv_md | 0
{v2/samples => samples}/inv_sm | 0
{v2/samples => samples}/l1_include.yml | 0
{v2/samples => samples}/l2_include.yml | 0
{v2/samples => samples}/l3_include.yml | 0
{v2/samples => samples}/localhost_include.yml | 0
{v2/samples => samples}/localhosts | 0
{v2/samples => samples}/lookup_file.yml | 0
{v2/samples => samples}/lookup_password.yml | 0
{v2/samples => samples}/lookup_pipe.py | 0
{v2/samples => samples}/lookup_template.yml | 0
{v2/samples => samples}/multi.py | 0
{v2/samples => samples}/multi_queues.py | 0
.../roles/common/meta/main.yml | 0
.../roles/common/tasks/main.yml | 0
.../roles/role_a/meta/main.yml | 0
.../roles/role_a/tasks/main.yml | 0
.../roles/role_b/meta/main.yml | 0
.../roles/role_b/tasks/main.yml | 0
.../roles/test_become_r1/meta/main.yml | 0
.../roles/test_become_r1/tasks/main.yml | 0
.../roles/test_become_r2/meta/main.yml | 0
.../roles/test_become_r2/tasks/main.yml | 0
.../roles/test_role/meta/main.yml | 0
.../roles/test_role/tasks/main.yml | 0
.../roles/test_role_dep/tasks/main.yml | 0
{v2/samples => samples}/src | 0
{v2/samples => samples}/template.j2 | 0
{v2/samples => samples}/test_become.yml | 0
{v2/samples => samples}/test_big_debug.yml | 0
{v2/samples => samples}/test_big_ping.yml | 0
{v2/samples => samples}/test_block.yml | 0
.../test_blocks_of_blocks.yml | 0
{v2/samples => samples}/test_fact_gather.yml | 0
{v2/samples => samples}/test_free.yml | 0
{v2/samples => samples}/test_include.yml | 0
{v2/samples => samples}/test_pb.yml | 0
{v2/samples => samples}/test_role.yml | 0
.../test_roles_complex.yml | 0
{v2/samples => samples}/test_run_once.yml | 0
{v2/samples => samples}/test_sudo.yml | 0
{v2/samples => samples}/test_tags.yml | 0
.../testing/extra_vars.yml | 0
{v2/samples => samples}/testing/frag1 | 0
{v2/samples => samples}/testing/frag2 | 0
{v2/samples => samples}/testing/frag3 | 0
{v2/samples => samples}/testing/vars.yml | 0
{v2/samples => samples}/with_dict.yml | 0
{v2/samples => samples}/with_env.yml | 0
{v2/samples => samples}/with_fileglob.yml | 0
{v2/samples => samples}/with_first_found.yml | 0
{v2/samples => samples}/with_flattened.yml | 0
.../with_indexed_items.yml | 0
{v2/samples => samples}/with_items.yml | 0
{v2/samples => samples}/with_lines.yml | 0
{v2/samples => samples}/with_nested.yml | 0
.../with_random_choice.yml | 0
{v2/samples => samples}/with_sequence.yml | 0
{v2/samples => samples}/with_subelements.yml | 0
{v2/samples => samples}/with_together.yml | 0
{v2/test => test/units}/__init__.py | 0
{v2/test => test/units}/errors/__init__.py | 0
{v2/test => test/units}/errors/test_errors.py | 0
{v2/test => test/units}/executor/__init__.py | 0
.../units}/executor/test_play_iterator.py | 0
.../modules => test/units/mock}/__init__.py | 0
{v2/test => test/units}/mock/loader.py | 0
{v2/test => test/units}/parsing/__init__.py | 0
.../units}/parsing/test_data_loader.py | 0
.../units}/parsing/test_mod_args.py | 0
.../units}/parsing/test_splitter.py | 0
.../units}/parsing/vault/__init__.py | 0
.../units}/parsing/vault/test_vault.py | 0
.../units}/parsing/vault/test_vault_editor.py | 0
.../units/parsing/yaml}/__init__.py | 0
.../units}/parsing/yaml/test_loader.py | 0
{v2/test => test/units}/playbook/__init__.py | 0
.../units}/playbook/test_block.py | 0
{v2/test => test/units}/playbook/test_play.py | 0
.../units}/playbook/test_playbook.py | 0
{v2/test => test/units}/playbook/test_role.py | 0
{v2/test => test/units}/playbook/test_task.py | 0
{v2/test => test/units}/plugins/__init__.py | 0
{v2/test => test/units}/plugins/test_cache.py | 0
.../units}/plugins/test_connection.py | 0
.../units}/plugins/test_plugins.py | 0
{v2/test => test/units}/vars/__init__.py | 0
.../units}/vars/test_variable_manager.py | 0
{v2/ansible/utils => v1/ansible}/__init__.py | 6 +-
{lib => v1}/ansible/cache/__init__.py | 0
{lib => v1}/ansible/cache/base.py | 0
{lib => v1}/ansible/cache/jsonfile.py | 0
{lib => v1}/ansible/cache/memcached.py | 0
{lib => v1}/ansible/cache/memory.py | 0
{lib => v1}/ansible/cache/redis.py | 0
.../ansible/callback_plugins}/__init__.py | 0
{lib => v1}/ansible/callback_plugins/noop.py | 0
{lib => v1}/ansible/callbacks.py | 0
{lib => v1}/ansible/color.py | 0
{v2 => v1}/ansible/constants.py | 47 +-
{lib => v1}/ansible/errors.py | 0
{v2 => v1}/ansible/inventory/__init__.py | 96 +-
{v2 => v1}/ansible/inventory/dir.py | 31 +-
{v2 => v1}/ansible/inventory/expand_hosts.py | 3 -
{v2 => v1}/ansible/inventory/group.py | 54 +-
v1/ansible/inventory/host.py | 67 +
{v2 => v1}/ansible/inventory/ini.py | 58 +-
{v2 => v1}/ansible/inventory/script.py | 36 +-
.../inventory/vars_plugins}/__init__.py | 0
.../ansible/inventory/vars_plugins/noop.py | 2 -
{lib => v1}/ansible/module_common.py | 0
{v2 => v1}/ansible/module_utils/__init__.py | 0
{v2 => v1}/ansible/module_utils/a10.py | 0
{v2 => v1}/ansible/module_utils/basic.py | 68 +-
{v2 => v1}/ansible/module_utils/cloudstack.py | 0
{v2 => v1}/ansible/module_utils/database.py | 0
{v2 => v1}/ansible/module_utils/ec2.py | 0
{v2 => v1}/ansible/module_utils/facts.py | 0
{v2 => v1}/ansible/module_utils/gce.py | 0
.../ansible/module_utils/known_hosts.py | 0
{v2 => v1}/ansible/module_utils/openstack.py | 0
.../ansible/module_utils/powershell.ps1 | 6 +-
{v2 => v1}/ansible/module_utils/rax.py | 0
{v2 => v1}/ansible/module_utils/redhat.py | 0
{v2 => v1}/ansible/module_utils/splitter.py | 0
{v2 => v1}/ansible/module_utils/urls.py | 0
{lib => v1}/ansible/module_utils/vmware.py | 0
.../ansible/modules}/__init__.py | 0
v1/ansible/playbook/__init__.py | 874 +++++++++
v1/ansible/playbook/play.py | 949 ++++++++++
v1/ansible/playbook/task.py | 346 ++++
{lib => v1}/ansible/runner/__init__.py | 0
.../runner/action_plugins}/__init__.py | 0
.../ansible/runner/action_plugins/add_host.py | 0
.../ansible/runner/action_plugins/assemble.py | 0
.../ansible/runner/action_plugins/assert.py | 0
.../ansible/runner/action_plugins/async.py | 0
.../ansible/runner/action_plugins/copy.py | 0
.../ansible/runner/action_plugins/debug.py | 0
.../ansible/runner/action_plugins/fail.py | 0
.../ansible/runner/action_plugins/fetch.py | 0
.../ansible/runner/action_plugins/group_by.py | 0
.../runner/action_plugins/include_vars.py | 0
.../ansible/runner/action_plugins/normal.py | 0
.../ansible/runner/action_plugins/patch.py | 0
.../ansible/runner/action_plugins/pause.py | 0
.../ansible/runner/action_plugins/raw.py | 0
.../ansible/runner/action_plugins/script.py | 0
.../ansible/runner/action_plugins/set_fact.py | 0
.../runner/action_plugins/synchronize.py | 0
.../ansible/runner/action_plugins/template.py | 0
.../runner/action_plugins/unarchive.py | 0
.../ansible/runner/action_plugins/win_copy.py | 0
.../runner/action_plugins/win_template.py | 0
{lib => v1}/ansible/runner/connection.py | 0
.../runner/connection_plugins}/__init__.py | 0
.../runner/connection_plugins/accelerate.py | 0
.../runner/connection_plugins/chroot.py | 0
.../runner/connection_plugins/fireball.py | 0
.../runner/connection_plugins/funcd.py | 0
.../ansible/runner/connection_plugins/jail.py | 0
.../runner/connection_plugins/libvirt_lxc.py | 0
.../runner/connection_plugins/local.py | 0
.../runner/connection_plugins/paramiko_ssh.py | 0
.../ansible/runner/connection_plugins/ssh.py | 0
.../runner/connection_plugins/winrm.py | 0
.../ansible/runner/connection_plugins/zone.py | 0
.../runner/filter_plugins}/__init__.py | 0
.../ansible/runner/filter_plugins/core.py | 0
.../ansible/runner/filter_plugins/ipaddr.py | 0
.../runner/filter_plugins/mathstuff.py | 0
.../runner/lookup_plugins}/__init__.py | 0
.../runner/lookup_plugins/cartesian.py | 0
.../runner/lookup_plugins/consul_kv.py | 0
.../ansible/runner/lookup_plugins/csvfile.py | 0
.../ansible/runner/lookup_plugins/dict.py | 0
.../ansible/runner/lookup_plugins/dig.py | 0
.../ansible/runner/lookup_plugins/dnstxt.py | 0
.../ansible/runner/lookup_plugins/env.py | 0
.../ansible/runner/lookup_plugins/etcd.py | 0
.../ansible/runner/lookup_plugins/file.py | 0
.../ansible/runner/lookup_plugins/fileglob.py | 0
.../runner/lookup_plugins/first_found.py | 0
.../runner/lookup_plugins/flattened.py | 0
.../runner/lookup_plugins/indexed_items.py | 0
.../lookup_plugins/inventory_hostnames.py | 0
.../ansible/runner/lookup_plugins/items.py | 0
.../ansible/runner/lookup_plugins/lines.py | 0
.../ansible/runner/lookup_plugins/nested.py | 0
.../ansible/runner/lookup_plugins/password.py | 0
.../ansible/runner/lookup_plugins/pipe.py | 0
.../runner/lookup_plugins/random_choice.py | 0
.../ansible/runner/lookup_plugins/redis_kv.py | 0
.../ansible/runner/lookup_plugins/sequence.py | 0
.../runner/lookup_plugins/subelements.py | 0
.../ansible/runner/lookup_plugins/template.py | 0
.../ansible/runner/lookup_plugins/together.py | 0
.../ansible/runner/lookup_plugins/url.py | 0
{lib => v1}/ansible/runner/poller.py | 0
{lib => v1}/ansible/runner/return_data.py | 0
.../ansible/runner/shell_plugins}/__init__.py | 0
.../ansible/runner/shell_plugins/csh.py | 0
.../ansible/runner/shell_plugins/fish.py | 0
.../runner/shell_plugins/powershell.py | 0
.../ansible/runner/shell_plugins/sh.py | 0
v1/ansible/utils/__init__.py | 1660 +++++++++++++++++
{lib => v1}/ansible/utils/cmd_functions.py | 0
.../ansible/utils/display_functions.py | 0
{v2 => v1}/ansible/utils/hashing.py | 7 +-
{v2 => v1}/ansible/utils/module_docs.py | 4 +-
.../utils/module_docs_fragments/__init__.py | 0
.../utils/module_docs_fragments/aws.py | 0
.../utils/module_docs_fragments/cloudstack.py | 0
.../utils/module_docs_fragments/files.py | 0
.../utils/module_docs_fragments/openstack.py | 0
.../utils/module_docs_fragments/rackspace.py | 0
{lib => v1}/ansible/utils/plugins.py | 0
{lib => v1}/ansible/utils/string_functions.py | 0
{lib => v1}/ansible/utils/su_prompts.py | 0
{lib => v1}/ansible/utils/template.py | 0
{v2 => v1}/ansible/utils/unicode.py | 37 +-
v1/ansible/utils/vault.py | 585 ++++++
v1/bin/ansible | 207 ++
v1/bin/ansible-doc | 337 ++++
v1/bin/ansible-galaxy | 957 ++++++++++
v1/bin/ansible-playbook | 330 ++++
v1/bin/ansible-pull | 257 +++
v1/bin/ansible-vault | 241 +++
{test/units => v1/tests}/README.md | 0
{test/units => v1/tests}/TestConstants.py | 0
{test/units => v1/tests}/TestFilters.py | 0
{test/units => v1/tests}/TestInventory.py | 0
.../tests}/TestModuleUtilsBasic.py | 0
.../tests}/TestModuleUtilsDatabase.py | 0
{test/units => v1/tests}/TestModules.py | 0
{test/units => v1/tests}/TestPlayVarsFiles.py | 0
{test/units => v1/tests}/TestSynchronize.py | 0
{test/units => v1/tests}/TestUtils.py | 0
.../tests}/TestUtilsStringFunctions.py | 0
{test/units => v1/tests}/TestVault.py | 0
{test/units => v1/tests}/TestVaultEditor.py | 0
{test/units => v1/tests}/ansible.cfg | 0
.../tests}/inventory_test_data/ansible_hosts | 0
.../tests}/inventory_test_data/broken.yml | 0
.../inventory_test_data/common_vars.yml | 0
.../tests}/inventory_test_data/complex_hosts | 0
.../tests}/inventory_test_data/encrypted.yml | 0
.../tests}/inventory_test_data/hosts_list.yml | 0
.../inventory/test_alpha_end_before_beg | 0
.../inventory/test_combined_range | 0
.../inventory/test_incorrect_format | 0
.../inventory/test_incorrect_range | 0
.../inventory/test_leading_range | 0
.../inventory/test_missing_end | 0
.../inventory_test_data/inventory_api.py | 0
.../inventory_test_data/inventory_dir/0hosts | 0
.../inventory_dir/1mythology | 0
.../inventory_test_data/inventory_dir/2levels | 0
.../inventory_dir/3comments | 0
.../inventory_dir/4skip_extensions.ini | 0
.../tests}/inventory_test_data/large_range | 0
.../inventory_test_data/restrict_pattern | 0
.../tests}/inventory_test_data/simple_hosts | 0
.../tests}/module_tests/TestApt.py | 0
.../tests}/module_tests/TestDocker.py | 0
.../vault_test_data/foo-ansible-1.0.yml | 0
...oo-ansible-1.1-ansible-newline-ansible.yml | 0
.../vault_test_data/foo-ansible-1.1.yml | 0
v2/README-tests.md | 33 -
v2/ansible/__init__.py | 22 -
v2/ansible/inventory/host.py | 130 --
v2/ansible/modules/core | 1 -
v2/ansible/modules/extras | 1 -
v2/ansible/playbook/__init__.py | 85 -
v2/ansible/playbook/play.py | 263 ---
v2/ansible/playbook/task.py | 310 ---
v2/ansible/utils/vault.py | 56 -
v2/bin/ansible | 79 -
v2/bin/ansible-doc | 1 -
v2/bin/ansible-galaxy | 1 -
v2/bin/ansible-playbook | 1 -
v2/bin/ansible-pull | 1 -
v2/bin/ansible-vault | 1 -
v2/hacking/README.md | 48 -
v2/hacking/authors.sh | 14 -
v2/hacking/env-setup | 78 -
v2/hacking/env-setup.fish | 57 -
v2/hacking/get_library.py | 29 -
v2/hacking/module_formatter.py | 442 -----
v2/hacking/templates/rst.j2 | 153 --
v2/hacking/test-module | 192 --
v2/scripts/ansible | 20 -
v2/setup.py | 36 -
v2/test/mock/__init__.py | 20 -
486 files changed, 7948 insertions(+), 9070 deletions(-)
mode change 100755 => 120000 bin/ansible-doc
mode change 100755 => 120000 bin/ansible-galaxy
mode change 100755 => 120000 bin/ansible-playbook
mode change 100755 => 120000 bin/ansible-pull
mode change 100755 => 120000 bin/ansible-vault
rename {v2 => lib}/ansible/cli/__init__.py (100%)
rename {v2 => lib}/ansible/cli/adhoc.py (100%)
rename {v2 => lib}/ansible/cli/doc.py (100%)
rename {v2 => lib}/ansible/cli/galaxy.py (100%)
rename {v2 => lib}/ansible/cli/playbook.py (100%)
rename {v2 => lib}/ansible/cli/pull.py (100%)
rename {v2 => lib}/ansible/cli/vault.py (100%)
rename {v2 => lib}/ansible/compat/__init__.py (100%)
rename {v2 => lib}/ansible/compat/tests/__init__.py (100%)
rename {v2 => lib}/ansible/compat/tests/mock.py (100%)
rename {v2 => lib}/ansible/compat/tests/unittest.py (100%)
rename {v2 => lib}/ansible/config/__init__.py (100%)
rename {v2 => lib}/ansible/errors/__init__.py (100%)
rename {v2 => lib}/ansible/errors/yaml_strings.py (100%)
rename {v2 => lib}/ansible/executor/__init__.py (100%)
rename {v2 => lib}/ansible/executor/connection_info.py (100%)
rename {v2 => lib}/ansible/executor/module_common.py (100%)
rename {v2 => lib}/ansible/executor/play_iterator.py (100%)
rename {v2 => lib}/ansible/executor/playbook_executor.py (100%)
rename {v2 => lib}/ansible/executor/process/__init__.py (100%)
rename {v2 => lib}/ansible/executor/process/result.py (100%)
rename {v2 => lib}/ansible/executor/process/worker.py (100%)
rename {v2 => lib}/ansible/executor/stats.py (100%)
rename {v2 => lib}/ansible/executor/task_executor.py (100%)
rename {v2 => lib}/ansible/executor/task_queue_manager.py (100%)
rename {v2 => lib}/ansible/executor/task_queue_manager.py: (100%)
rename {v2 => lib}/ansible/executor/task_result.py (100%)
rename {v2 => lib}/ansible/galaxy/__init__.py (100%)
rename {v2 => lib}/ansible/galaxy/api.py (100%)
rename {v2 => lib}/ansible/galaxy/data/metadata_template.j2 (100%)
rename {v2 => lib}/ansible/galaxy/data/readme (100%)
rename {v2 => lib}/ansible/galaxy/role.py (100%)
delete mode 160000 lib/ansible/modules/core
delete mode 160000 lib/ansible/modules/extras
rename {v2 => lib}/ansible/new_inventory/__init__.py (100%)
rename {v2 => lib}/ansible/new_inventory/group.py (100%)
rename {v2 => lib}/ansible/new_inventory/host.py (100%)
rename {v2 => lib}/ansible/parsing/__init__.py (100%)
rename {v2 => lib}/ansible/parsing/mod_args.py (100%)
rename {v2 => lib}/ansible/parsing/splitter.py (100%)
rename {v2 => lib}/ansible/parsing/utils/__init__.py (100%)
rename {v2 => lib}/ansible/parsing/utils/jsonify.py (100%)
rename {v2 => lib}/ansible/parsing/vault/__init__.py (100%)
rename {v2 => lib}/ansible/parsing/yaml/__init__.py (100%)
rename {v2 => lib}/ansible/parsing/yaml/constructor.py (100%)
rename {v2 => lib}/ansible/parsing/yaml/loader.py (100%)
rename {v2 => lib}/ansible/parsing/yaml/objects.py (100%)
rename {v2 => lib}/ansible/playbook/attribute.py (100%)
rename {v2 => lib}/ansible/playbook/base.py (100%)
rename {v2 => lib}/ansible/playbook/become.py (100%)
rename {v2 => lib}/ansible/playbook/block.py (100%)
rename {v2 => lib}/ansible/playbook/conditional.py (100%)
rename {v2 => lib}/ansible/playbook/handler.py (100%)
rename {v2 => lib}/ansible/playbook/helpers.py (100%)
rename {v2 => lib}/ansible/playbook/playbook_include.py (100%)
rename {v2 => lib}/ansible/playbook/role/__init__.py (100%)
rename {v2 => lib}/ansible/playbook/role/definition.py (100%)
rename {v2 => lib}/ansible/playbook/role/include.py (100%)
rename {v2 => lib}/ansible/playbook/role/metadata.py (100%)
rename {v2 => lib}/ansible/playbook/role/requirement.py (100%)
rename {v2 => lib}/ansible/playbook/taggable.py (100%)
rename {v2 => lib}/ansible/playbook/vars.py (100%)
rename {v2 => lib}/ansible/playbook/vars_file.py (100%)
rename {v2 => lib}/ansible/plugins/__init__.py (100%)
rename {v2 => lib}/ansible/plugins/action/__init__.py (100%)
rename {v2 => lib}/ansible/plugins/action/add_host.py (100%)
rename {v2 => lib}/ansible/plugins/action/assemble.py (100%)
rename {v2 => lib}/ansible/plugins/action/assert.py (100%)
rename {v2 => lib}/ansible/plugins/action/async.py (100%)
rename {v2 => lib}/ansible/plugins/action/copy.py (100%)
rename {v2 => lib}/ansible/plugins/action/debug.py (100%)
rename {v2 => lib}/ansible/plugins/action/fail.py (100%)
rename {v2 => lib}/ansible/plugins/action/fetch.py (100%)
rename {v2 => lib}/ansible/plugins/action/group_by.py (100%)
rename {v2 => lib}/ansible/plugins/action/include_vars.py (100%)
rename {v2 => lib}/ansible/plugins/action/normal.py (100%)
rename {v2 => lib}/ansible/plugins/action/patch.py (100%)
rename {v2 => lib}/ansible/plugins/action/pause.py (100%)
rename {v2 => lib}/ansible/plugins/action/raw.py (100%)
rename {v2 => lib}/ansible/plugins/action/script.py (100%)
rename {v2 => lib}/ansible/plugins/action/set_fact.py (100%)
rename {v2 => lib}/ansible/plugins/action/synchronize.py (100%)
rename {v2 => lib}/ansible/plugins/action/template.py (100%)
rename {v2 => lib}/ansible/plugins/action/unarchive.py (100%)
rename {v2 => lib}/ansible/plugins/cache/__init__.py (100%)
rename {v2 => lib}/ansible/plugins/cache/base.py (100%)
rename {v2 => lib}/ansible/plugins/cache/memcached.py (100%)
rename {v2 => lib}/ansible/plugins/cache/memory.py (100%)
rename {v2 => lib}/ansible/plugins/cache/redis.py (100%)
rename {v2 => lib}/ansible/plugins/callback/__init__.py (100%)
rename {v2 => lib}/ansible/plugins/callback/default.py (100%)
rename {v2 => lib}/ansible/plugins/callback/minimal.py (100%)
rename {v2 => lib}/ansible/plugins/connections/__init__.py (100%)
rename {v2 => lib}/ansible/plugins/connections/accelerate.py (100%)
rename {v2 => lib}/ansible/plugins/connections/chroot.py (100%)
rename {v2 => lib}/ansible/plugins/connections/funcd.py (100%)
rename {v2 => lib}/ansible/plugins/connections/jail.py (100%)
rename {v2 => lib}/ansible/plugins/connections/libvirt_lxc.py (100%)
rename {v2 => lib}/ansible/plugins/connections/local.py (100%)
rename {v2 => lib}/ansible/plugins/connections/paramiko_ssh.py (100%)
rename {v2 => lib}/ansible/plugins/connections/ssh.py (100%)
rename {v2 => lib}/ansible/plugins/connections/winrm.py (100%)
rename {v2 => lib}/ansible/plugins/connections/zone.py (100%)
rename {v2 => lib}/ansible/plugins/filter (100%)
rename {v2 => lib}/ansible/plugins/inventory/__init__.py (100%)
rename {v2 => lib}/ansible/plugins/inventory/aggregate.py (100%)
rename {v2 => lib}/ansible/plugins/inventory/directory.py (100%)
rename {v2 => lib}/ansible/plugins/inventory/ini.py (100%)
rename {v2 => lib}/ansible/plugins/lookup/__init__.py (100%)
rename {v2 => lib}/ansible/plugins/lookup/cartesian.py (100%)
rename {v2 => lib}/ansible/plugins/lookup/csvfile.py (100%)
rename {v2 => lib}/ansible/plugins/lookup/dict.py (100%)
rename {v2 => lib}/ansible/plugins/lookup/dnstxt.py (100%)
rename {v2 => lib}/ansible/plugins/lookup/env.py (100%)
rename {v2 => lib}/ansible/plugins/lookup/etcd.py (100%)
rename {v2 => lib}/ansible/plugins/lookup/file.py (100%)
rename {v2 => lib}/ansible/plugins/lookup/fileglob.py (100%)
rename {v2 => lib}/ansible/plugins/lookup/first_found.py (100%)
rename {v2 => lib}/ansible/plugins/lookup/flattened.py (100%)
rename {v2 => lib}/ansible/plugins/lookup/indexed_items.py (100%)
rename {v2 => lib}/ansible/plugins/lookup/inventory_hostnames.py (100%)
rename {v2 => lib}/ansible/plugins/lookup/items.py (100%)
rename {v2 => lib}/ansible/plugins/lookup/lines.py (100%)
rename {v2 => lib}/ansible/plugins/lookup/nested.py (100%)
rename {v2 => lib}/ansible/plugins/lookup/password.py (100%)
rename {v2 => lib}/ansible/plugins/lookup/pipe.py (100%)
rename {v2 => lib}/ansible/plugins/lookup/random_choice.py (100%)
rename {v2 => lib}/ansible/plugins/lookup/redis_kv.py (100%)
rename {v2 => lib}/ansible/plugins/lookup/sequence.py (100%)
rename {v2 => lib}/ansible/plugins/lookup/subelements.py (100%)
rename {v2 => lib}/ansible/plugins/lookup/template.py (100%)
rename {v2 => lib}/ansible/plugins/lookup/together.py (100%)
rename {v2 => lib}/ansible/plugins/lookup/url.py (100%)
rename {v2 => lib}/ansible/plugins/shell/__init__.py (100%)
rename {v2 => lib}/ansible/plugins/shell/csh.py (100%)
rename {v2 => lib}/ansible/plugins/shell/fish.py (100%)
rename {v2 => lib}/ansible/plugins/shell/powershell.py (100%)
rename {v2 => lib}/ansible/plugins/shell/sh.py (100%)
rename {v2 => lib}/ansible/plugins/strategies/__init__.py (100%)
rename {v2 => lib}/ansible/plugins/strategies/free.py (100%)
rename {v2 => lib}/ansible/plugins/strategies/linear.py (100%)
rename {v2 => lib}/ansible/plugins/vars/__init__.py (100%)
rename {v2 => lib}/ansible/template/__init__.py (100%)
rename {v2 => lib}/ansible/template/safe_eval.py (100%)
rename {v2 => lib}/ansible/template/template.py (100%)
rename {v2 => lib}/ansible/template/vars.py (100%)
rename {v2 => lib/ansible}/test-requirements.txt (100%)
rename {v2 => lib}/ansible/utils/boolean.py (100%)
rename {v2 => lib}/ansible/utils/color.py (100%)
rename {v2 => lib}/ansible/utils/debug.py (100%)
rename {v2 => lib}/ansible/utils/display.py (100%)
rename {v2 => lib}/ansible/utils/encrypt.py (100%)
rename {v2 => lib}/ansible/utils/listify.py (100%)
rename {v2 => lib}/ansible/utils/module_docs_fragments (100%)
rename {v2 => lib}/ansible/utils/path.py (100%)
rename {v2 => lib}/ansible/utils/vars.py (100%)
rename {v2 => lib}/ansible/vars/__init__.py (100%)
rename {v2 => lib}/ansible/vars/hostvars.py (100%)
rename {v2/samples => samples}/README.md (100%)
rename {v2/samples => samples}/common_include.yml (100%)
rename {v2/samples => samples}/hosts (100%)
rename {v2/samples => samples}/ignore_errors.yml (100%)
rename {v2/samples => samples}/include.yml (100%)
rename {v2/samples => samples}/inv_lg (100%)
rename {v2/samples => samples}/inv_md (100%)
rename {v2/samples => samples}/inv_sm (100%)
rename {v2/samples => samples}/l1_include.yml (100%)
rename {v2/samples => samples}/l2_include.yml (100%)
rename {v2/samples => samples}/l3_include.yml (100%)
rename {v2/samples => samples}/localhost_include.yml (100%)
rename {v2/samples => samples}/localhosts (100%)
rename {v2/samples => samples}/lookup_file.yml (100%)
rename {v2/samples => samples}/lookup_password.yml (100%)
rename {v2/samples => samples}/lookup_pipe.py (100%)
rename {v2/samples => samples}/lookup_template.yml (100%)
rename {v2/samples => samples}/multi.py (100%)
rename {v2/samples => samples}/multi_queues.py (100%)
rename {v2/samples => samples}/roles/common/meta/main.yml (100%)
rename {v2/samples => samples}/roles/common/tasks/main.yml (100%)
rename {v2/samples => samples}/roles/role_a/meta/main.yml (100%)
rename {v2/samples => samples}/roles/role_a/tasks/main.yml (100%)
rename {v2/samples => samples}/roles/role_b/meta/main.yml (100%)
rename {v2/samples => samples}/roles/role_b/tasks/main.yml (100%)
rename {v2/samples => samples}/roles/test_become_r1/meta/main.yml (100%)
rename {v2/samples => samples}/roles/test_become_r1/tasks/main.yml (100%)
rename {v2/samples => samples}/roles/test_become_r2/meta/main.yml (100%)
rename {v2/samples => samples}/roles/test_become_r2/tasks/main.yml (100%)
rename {v2/samples => samples}/roles/test_role/meta/main.yml (100%)
rename {v2/samples => samples}/roles/test_role/tasks/main.yml (100%)
rename {v2/samples => samples}/roles/test_role_dep/tasks/main.yml (100%)
rename {v2/samples => samples}/src (100%)
rename {v2/samples => samples}/template.j2 (100%)
rename {v2/samples => samples}/test_become.yml (100%)
rename {v2/samples => samples}/test_big_debug.yml (100%)
rename {v2/samples => samples}/test_big_ping.yml (100%)
rename {v2/samples => samples}/test_block.yml (100%)
rename {v2/samples => samples}/test_blocks_of_blocks.yml (100%)
rename {v2/samples => samples}/test_fact_gather.yml (100%)
rename {v2/samples => samples}/test_free.yml (100%)
rename {v2/samples => samples}/test_include.yml (100%)
rename {v2/samples => samples}/test_pb.yml (100%)
rename {v2/samples => samples}/test_role.yml (100%)
rename {v2/samples => samples}/test_roles_complex.yml (100%)
rename {v2/samples => samples}/test_run_once.yml (100%)
rename {v2/samples => samples}/test_sudo.yml (100%)
rename {v2/samples => samples}/test_tags.yml (100%)
rename {v2/samples => samples}/testing/extra_vars.yml (100%)
rename {v2/samples => samples}/testing/frag1 (100%)
rename {v2/samples => samples}/testing/frag2 (100%)
rename {v2/samples => samples}/testing/frag3 (100%)
rename {v2/samples => samples}/testing/vars.yml (100%)
rename {v2/samples => samples}/with_dict.yml (100%)
rename {v2/samples => samples}/with_env.yml (100%)
rename {v2/samples => samples}/with_fileglob.yml (100%)
rename {v2/samples => samples}/with_first_found.yml (100%)
rename {v2/samples => samples}/with_flattened.yml (100%)
rename {v2/samples => samples}/with_indexed_items.yml (100%)
rename {v2/samples => samples}/with_items.yml (100%)
rename {v2/samples => samples}/with_lines.yml (100%)
rename {v2/samples => samples}/with_nested.yml (100%)
rename {v2/samples => samples}/with_random_choice.yml (100%)
rename {v2/samples => samples}/with_sequence.yml (100%)
rename {v2/samples => samples}/with_subelements.yml (100%)
rename {v2/samples => samples}/with_together.yml (100%)
rename {v2/test => test/units}/__init__.py (100%)
rename {v2/test => test/units}/errors/__init__.py (100%)
rename {v2/test => test/units}/errors/test_errors.py (100%)
rename {v2/test => test/units}/executor/__init__.py (100%)
rename {v2/test => test/units}/executor/test_play_iterator.py (100%)
rename {v2/ansible/modules => test/units/mock}/__init__.py (100%)
rename {v2/test => test/units}/mock/loader.py (100%)
rename {v2/test => test/units}/parsing/__init__.py (100%)
rename {v2/test => test/units}/parsing/test_data_loader.py (100%)
rename {v2/test => test/units}/parsing/test_mod_args.py (100%)
rename {v2/test => test/units}/parsing/test_splitter.py (100%)
rename {v2/test => test/units}/parsing/vault/__init__.py (100%)
rename {v2/test => test/units}/parsing/vault/test_vault.py (100%)
rename {v2/test => test/units}/parsing/vault/test_vault_editor.py (100%)
rename {lib/ansible/callback_plugins => test/units/parsing/yaml}/__init__.py (100%)
rename {v2/test => test/units}/parsing/yaml/test_loader.py (100%)
rename {v2/test => test/units}/playbook/__init__.py (100%)
rename {v2/test => test/units}/playbook/test_block.py (100%)
rename {v2/test => test/units}/playbook/test_play.py (100%)
rename {v2/test => test/units}/playbook/test_playbook.py (100%)
rename {v2/test => test/units}/playbook/test_role.py (100%)
rename {v2/test => test/units}/playbook/test_task.py (100%)
rename {v2/test => test/units}/plugins/__init__.py (100%)
rename {v2/test => test/units}/plugins/test_cache.py (100%)
rename {v2/test => test/units}/plugins/test_connection.py (100%)
rename {v2/test => test/units}/plugins/test_plugins.py (100%)
rename {v2/test => test/units}/vars/__init__.py (100%)
rename {v2/test => test/units}/vars/test_variable_manager.py (100%)
rename {v2/ansible/utils => v1/ansible}/__init__.py (85%)
rename {lib => v1}/ansible/cache/__init__.py (100%)
rename {lib => v1}/ansible/cache/base.py (100%)
rename {lib => v1}/ansible/cache/jsonfile.py (100%)
rename {lib => v1}/ansible/cache/memcached.py (100%)
rename {lib => v1}/ansible/cache/memory.py (100%)
rename {lib => v1}/ansible/cache/redis.py (100%)
rename {lib/ansible/runner/action_plugins => v1/ansible/callback_plugins}/__init__.py (100%)
rename {lib => v1}/ansible/callback_plugins/noop.py (100%)
rename {lib => v1}/ansible/callbacks.py (100%)
rename {lib => v1}/ansible/color.py (100%)
rename {v2 => v1}/ansible/constants.py (89%)
rename {lib => v1}/ansible/errors.py (100%)
rename {v2 => v1}/ansible/inventory/__init__.py (88%)
rename {v2 => v1}/ansible/inventory/dir.py (91%)
rename {v2 => v1}/ansible/inventory/expand_hosts.py (97%)
rename {v2 => v1}/ansible/inventory/group.py (69%)
create mode 100644 v1/ansible/inventory/host.py
rename {v2 => v1}/ansible/inventory/ini.py (82%)
rename {v2 => v1}/ansible/inventory/script.py (82%)
rename {lib/ansible/runner/connection_plugins => v1/ansible/inventory/vars_plugins}/__init__.py (100%)
rename {v2 => v1}/ansible/inventory/vars_plugins/noop.py (94%)
rename {lib => v1}/ansible/module_common.py (100%)
rename {v2 => v1}/ansible/module_utils/__init__.py (100%)
rename {v2 => v1}/ansible/module_utils/a10.py (100%)
rename {v2 => v1}/ansible/module_utils/basic.py (97%)
rename {v2 => v1}/ansible/module_utils/cloudstack.py (100%)
rename {v2 => v1}/ansible/module_utils/database.py (100%)
rename {v2 => v1}/ansible/module_utils/ec2.py (100%)
rename {v2 => v1}/ansible/module_utils/facts.py (100%)
rename {v2 => v1}/ansible/module_utils/gce.py (100%)
rename {v2 => v1}/ansible/module_utils/known_hosts.py (100%)
rename {v2 => v1}/ansible/module_utils/openstack.py (100%)
rename {v2 => v1}/ansible/module_utils/powershell.ps1 (97%)
rename {v2 => v1}/ansible/module_utils/rax.py (100%)
rename {v2 => v1}/ansible/module_utils/redhat.py (100%)
rename {v2 => v1}/ansible/module_utils/splitter.py (100%)
rename {v2 => v1}/ansible/module_utils/urls.py (100%)
rename {lib => v1}/ansible/module_utils/vmware.py (100%)
rename {lib/ansible/runner/filter_plugins => v1/ansible/modules}/__init__.py (100%)
create mode 100644 v1/ansible/playbook/__init__.py
create mode 100644 v1/ansible/playbook/play.py
create mode 100644 v1/ansible/playbook/task.py
rename {lib => v1}/ansible/runner/__init__.py (100%)
rename {lib/ansible/runner/lookup_plugins => v1/ansible/runner/action_plugins}/__init__.py (100%)
rename {lib => v1}/ansible/runner/action_plugins/add_host.py (100%)
rename {lib => v1}/ansible/runner/action_plugins/assemble.py (100%)
rename {lib => v1}/ansible/runner/action_plugins/assert.py (100%)
rename {lib => v1}/ansible/runner/action_plugins/async.py (100%)
rename {lib => v1}/ansible/runner/action_plugins/copy.py (100%)
rename {lib => v1}/ansible/runner/action_plugins/debug.py (100%)
rename {lib => v1}/ansible/runner/action_plugins/fail.py (100%)
rename {lib => v1}/ansible/runner/action_plugins/fetch.py (100%)
rename {lib => v1}/ansible/runner/action_plugins/group_by.py (100%)
rename {lib => v1}/ansible/runner/action_plugins/include_vars.py (100%)
rename {lib => v1}/ansible/runner/action_plugins/normal.py (100%)
rename {lib => v1}/ansible/runner/action_plugins/patch.py (100%)
rename {lib => v1}/ansible/runner/action_plugins/pause.py (100%)
rename {lib => v1}/ansible/runner/action_plugins/raw.py (100%)
rename {lib => v1}/ansible/runner/action_plugins/script.py (100%)
rename {lib => v1}/ansible/runner/action_plugins/set_fact.py (100%)
rename {lib => v1}/ansible/runner/action_plugins/synchronize.py (100%)
rename {lib => v1}/ansible/runner/action_plugins/template.py (100%)
rename {lib => v1}/ansible/runner/action_plugins/unarchive.py (100%)
rename {lib => v1}/ansible/runner/action_plugins/win_copy.py (100%)
rename {lib => v1}/ansible/runner/action_plugins/win_template.py (100%)
rename {lib => v1}/ansible/runner/connection.py (100%)
rename {lib/ansible/runner/shell_plugins => v1/ansible/runner/connection_plugins}/__init__.py (100%)
rename {lib => v1}/ansible/runner/connection_plugins/accelerate.py (100%)
rename {lib => v1}/ansible/runner/connection_plugins/chroot.py (100%)
rename {lib => v1}/ansible/runner/connection_plugins/fireball.py (100%)
rename {lib => v1}/ansible/runner/connection_plugins/funcd.py (100%)
rename {lib => v1}/ansible/runner/connection_plugins/jail.py (100%)
rename {lib => v1}/ansible/runner/connection_plugins/libvirt_lxc.py (100%)
rename {lib => v1}/ansible/runner/connection_plugins/local.py (100%)
rename {lib => v1}/ansible/runner/connection_plugins/paramiko_ssh.py (100%)
rename {lib => v1}/ansible/runner/connection_plugins/ssh.py (100%)
rename {lib => v1}/ansible/runner/connection_plugins/winrm.py (100%)
rename {lib => v1}/ansible/runner/connection_plugins/zone.py (100%)
rename {lib/ansible/utils/module_docs_fragments => v1/ansible/runner/filter_plugins}/__init__.py (100%)
rename {lib => v1}/ansible/runner/filter_plugins/core.py (100%)
rename {lib => v1}/ansible/runner/filter_plugins/ipaddr.py (100%)
rename {lib => v1}/ansible/runner/filter_plugins/mathstuff.py (100%)
rename {v2/ansible/inventory/vars_plugins => v1/ansible/runner/lookup_plugins}/__init__.py (100%)
rename {lib => v1}/ansible/runner/lookup_plugins/cartesian.py (100%)
rename {lib => v1}/ansible/runner/lookup_plugins/consul_kv.py (100%)
rename {lib => v1}/ansible/runner/lookup_plugins/csvfile.py (100%)
rename {lib => v1}/ansible/runner/lookup_plugins/dict.py (100%)
rename {lib => v1}/ansible/runner/lookup_plugins/dig.py (100%)
rename {lib => v1}/ansible/runner/lookup_plugins/dnstxt.py (100%)
rename {lib => v1}/ansible/runner/lookup_plugins/env.py (100%)
rename {lib => v1}/ansible/runner/lookup_plugins/etcd.py (100%)
rename {lib => v1}/ansible/runner/lookup_plugins/file.py (100%)
rename {lib => v1}/ansible/runner/lookup_plugins/fileglob.py (100%)
rename {lib => v1}/ansible/runner/lookup_plugins/first_found.py (100%)
rename {lib => v1}/ansible/runner/lookup_plugins/flattened.py (100%)
rename {lib => v1}/ansible/runner/lookup_plugins/indexed_items.py (100%)
rename {lib => v1}/ansible/runner/lookup_plugins/inventory_hostnames.py (100%)
rename {lib => v1}/ansible/runner/lookup_plugins/items.py (100%)
rename {lib => v1}/ansible/runner/lookup_plugins/lines.py (100%)
rename {lib => v1}/ansible/runner/lookup_plugins/nested.py (100%)
rename {lib => v1}/ansible/runner/lookup_plugins/password.py (100%)
rename {lib => v1}/ansible/runner/lookup_plugins/pipe.py (100%)
rename {lib => v1}/ansible/runner/lookup_plugins/random_choice.py (100%)
rename {lib => v1}/ansible/runner/lookup_plugins/redis_kv.py (100%)
rename {lib => v1}/ansible/runner/lookup_plugins/sequence.py (100%)
rename {lib => v1}/ansible/runner/lookup_plugins/subelements.py (100%)
rename {lib => v1}/ansible/runner/lookup_plugins/template.py (100%)
rename {lib => v1}/ansible/runner/lookup_plugins/together.py (100%)
rename {lib => v1}/ansible/runner/lookup_plugins/url.py (100%)
rename {lib => v1}/ansible/runner/poller.py (100%)
rename {lib => v1}/ansible/runner/return_data.py (100%)
rename {v2/test/parsing/yaml => v1/ansible/runner/shell_plugins}/__init__.py (100%)
rename {lib => v1}/ansible/runner/shell_plugins/csh.py (100%)
rename {lib => v1}/ansible/runner/shell_plugins/fish.py (100%)
rename {lib => v1}/ansible/runner/shell_plugins/powershell.py (100%)
rename {lib => v1}/ansible/runner/shell_plugins/sh.py (100%)
create mode 100644 v1/ansible/utils/__init__.py
rename {lib => v1}/ansible/utils/cmd_functions.py (100%)
rename {lib => v1}/ansible/utils/display_functions.py (100%)
rename {v2 => v1}/ansible/utils/hashing.py (92%)
rename {v2 => v1}/ansible/utils/module_docs.py (96%)
create mode 100644 v1/ansible/utils/module_docs_fragments/__init__.py
rename {lib => v1}/ansible/utils/module_docs_fragments/aws.py (100%)
rename {lib => v1}/ansible/utils/module_docs_fragments/cloudstack.py (100%)
rename {lib => v1}/ansible/utils/module_docs_fragments/files.py (100%)
rename {lib => v1}/ansible/utils/module_docs_fragments/openstack.py (100%)
rename {lib => v1}/ansible/utils/module_docs_fragments/rackspace.py (100%)
rename {lib => v1}/ansible/utils/plugins.py (100%)
rename {lib => v1}/ansible/utils/string_functions.py (100%)
rename {lib => v1}/ansible/utils/su_prompts.py (100%)
rename {lib => v1}/ansible/utils/template.py (100%)
rename {v2 => v1}/ansible/utils/unicode.py (93%)
create mode 100644 v1/ansible/utils/vault.py
create mode 100755 v1/bin/ansible
create mode 100755 v1/bin/ansible-doc
create mode 100755 v1/bin/ansible-galaxy
create mode 100755 v1/bin/ansible-playbook
create mode 100755 v1/bin/ansible-pull
create mode 100755 v1/bin/ansible-vault
rename {test/units => v1/tests}/README.md (100%)
rename {test/units => v1/tests}/TestConstants.py (100%)
rename {test/units => v1/tests}/TestFilters.py (100%)
rename {test/units => v1/tests}/TestInventory.py (100%)
rename {test/units => v1/tests}/TestModuleUtilsBasic.py (100%)
rename {test/units => v1/tests}/TestModuleUtilsDatabase.py (100%)
rename {test/units => v1/tests}/TestModules.py (100%)
rename {test/units => v1/tests}/TestPlayVarsFiles.py (100%)
rename {test/units => v1/tests}/TestSynchronize.py (100%)
rename {test/units => v1/tests}/TestUtils.py (100%)
rename {test/units => v1/tests}/TestUtilsStringFunctions.py (100%)
rename {test/units => v1/tests}/TestVault.py (100%)
rename {test/units => v1/tests}/TestVaultEditor.py (100%)
rename {test/units => v1/tests}/ansible.cfg (100%)
rename {test/units => v1/tests}/inventory_test_data/ansible_hosts (100%)
rename {test/units => v1/tests}/inventory_test_data/broken.yml (100%)
rename {test/units => v1/tests}/inventory_test_data/common_vars.yml (100%)
rename {test/units => v1/tests}/inventory_test_data/complex_hosts (100%)
rename {test/units => v1/tests}/inventory_test_data/encrypted.yml (100%)
rename {test/units => v1/tests}/inventory_test_data/hosts_list.yml (100%)
rename {test/units => v1/tests}/inventory_test_data/inventory/test_alpha_end_before_beg (100%)
rename {test/units => v1/tests}/inventory_test_data/inventory/test_combined_range (100%)
rename {test/units => v1/tests}/inventory_test_data/inventory/test_incorrect_format (100%)
rename {test/units => v1/tests}/inventory_test_data/inventory/test_incorrect_range (100%)
rename {test/units => v1/tests}/inventory_test_data/inventory/test_leading_range (100%)
rename {test/units => v1/tests}/inventory_test_data/inventory/test_missing_end (100%)
rename {test/units => v1/tests}/inventory_test_data/inventory_api.py (100%)
rename {test/units => v1/tests}/inventory_test_data/inventory_dir/0hosts (100%)
rename {test/units => v1/tests}/inventory_test_data/inventory_dir/1mythology (100%)
rename {test/units => v1/tests}/inventory_test_data/inventory_dir/2levels (100%)
rename {test/units => v1/tests}/inventory_test_data/inventory_dir/3comments (100%)
rename {test/units => v1/tests}/inventory_test_data/inventory_dir/4skip_extensions.ini (100%)
rename {test/units => v1/tests}/inventory_test_data/large_range (100%)
rename {test/units => v1/tests}/inventory_test_data/restrict_pattern (100%)
rename {test/units => v1/tests}/inventory_test_data/simple_hosts (100%)
rename {test/units => v1/tests}/module_tests/TestApt.py (100%)
rename {test/units => v1/tests}/module_tests/TestDocker.py (100%)
rename {test/units => v1/tests}/vault_test_data/foo-ansible-1.0.yml (100%)
rename {test/units => v1/tests}/vault_test_data/foo-ansible-1.1-ansible-newline-ansible.yml (100%)
rename {test/units => v1/tests}/vault_test_data/foo-ansible-1.1.yml (100%)
delete mode 100644 v2/README-tests.md
delete mode 100644 v2/ansible/__init__.py
delete mode 100644 v2/ansible/inventory/host.py
delete mode 160000 v2/ansible/modules/core
delete mode 160000 v2/ansible/modules/extras
delete mode 100644 v2/ansible/playbook/__init__.py
delete mode 100644 v2/ansible/playbook/play.py
delete mode 100644 v2/ansible/playbook/task.py
delete mode 100644 v2/ansible/utils/vault.py
delete mode 100755 v2/bin/ansible
delete mode 120000 v2/bin/ansible-doc
delete mode 120000 v2/bin/ansible-galaxy
delete mode 120000 v2/bin/ansible-playbook
delete mode 120000 v2/bin/ansible-pull
delete mode 120000 v2/bin/ansible-vault
delete mode 100644 v2/hacking/README.md
delete mode 100755 v2/hacking/authors.sh
delete mode 100644 v2/hacking/env-setup
delete mode 100644 v2/hacking/env-setup.fish
delete mode 100755 v2/hacking/get_library.py
delete mode 100755 v2/hacking/module_formatter.py
delete mode 100644 v2/hacking/templates/rst.j2
delete mode 100755 v2/hacking/test-module
delete mode 100644 v2/scripts/ansible
delete mode 100644 v2/setup.py
delete mode 100644 v2/test/mock/__init__.py
diff --git a/.gitmodules b/.gitmodules
index 3f14953ec8..e69de29bb2 100644
--- a/.gitmodules
+++ b/.gitmodules
@@ -1,16 +0,0 @@
-[submodule "lib/ansible/modules/core"]
- path = lib/ansible/modules/core
- url = https://github.com/ansible/ansible-modules-core.git
- branch = devel
-[submodule "lib/ansible/modules/extras"]
- path = lib/ansible/modules/extras
- url = https://github.com/ansible/ansible-modules-extras.git
- branch = devel
-[submodule "v2/ansible/modules/core"]
- path = v2/ansible/modules/core
- url = https://github.com/ansible/ansible-modules-core.git
- branch = devel
-[submodule "v2/ansible/modules/extras"]
- path = v2/ansible/modules/extras
- url = https://github.com/ansible/ansible-modules-extras.git
- branch = devel
diff --git a/bin/ansible b/bin/ansible
index 7fec34ec81..467dd505a2 100755
--- a/bin/ansible
+++ b/bin/ansible
@@ -18,6 +18,8 @@
# along with Ansible. If not, see .
########################################################
+from __future__ import (absolute_import)
+__metaclass__ = type
__requires__ = ['ansible']
try:
@@ -33,175 +35,45 @@ except Exception:
import os
import sys
-from ansible.runner import Runner
-import ansible.constants as C
-from ansible import utils
-from ansible import errors
-from ansible import callbacks
-from ansible import inventory
-########################################################
-
-class Cli(object):
- ''' code behind bin/ansible '''
-
- # ----------------------------------------------
-
- def __init__(self):
- self.stats = callbacks.AggregateStats()
- self.callbacks = callbacks.CliRunnerCallbacks()
- if C.DEFAULT_LOAD_CALLBACK_PLUGINS:
- callbacks.load_callback_plugins()
-
- # ----------------------------------------------
-
- def parse(self):
- ''' create an options parser for bin/ansible '''
-
- parser = utils.base_parser(
- constants=C,
- runas_opts=True,
- subset_opts=True,
- async_opts=True,
- output_opts=True,
- connect_opts=True,
- check_opts=True,
- diff_opts=False,
- usage='%prog [options]'
- )
-
- parser.add_option('-a', '--args', dest='module_args',
- help="module arguments", default=C.DEFAULT_MODULE_ARGS)
- parser.add_option('-m', '--module-name', dest='module_name',
- help="module name to execute (default=%s)" % C.DEFAULT_MODULE_NAME,
- default=C.DEFAULT_MODULE_NAME)
-
- options, args = parser.parse_args()
- self.callbacks.options = options
-
- if len(args) == 0 or len(args) > 1:
- parser.print_help()
- sys.exit(1)
-
- # privlege escalation command line arguments need to be mutually exclusive
- utils.check_mutually_exclusive_privilege(options, parser)
-
- if (options.ask_vault_pass and options.vault_password_file):
- parser.error("--ask-vault-pass and --vault-password-file are mutually exclusive")
-
- return (options, args)
-
- # ----------------------------------------------
-
- def run(self, options, args):
- ''' use Runner lib to do SSH things '''
-
- pattern = args[0]
-
- sshpass = becomepass = vault_pass = become_method = None
-
- # Never ask for an SSH password when we run with local connection
- if options.connection == "local":
- options.ask_pass = False
- else:
- options.ask_pass = options.ask_pass or C.DEFAULT_ASK_PASS
-
- options.ask_vault_pass = options.ask_vault_pass or C.DEFAULT_ASK_VAULT_PASS
-
- # become
- utils.normalize_become_options(options)
- prompt_method = utils.choose_pass_prompt(options)
- (sshpass, becomepass, vault_pass) = utils.ask_passwords(ask_pass=options.ask_pass, become_ask_pass=options.become_ask_pass, ask_vault_pass=options.ask_vault_pass, become_method=prompt_method)
-
- # read vault_pass from a file
- if not options.ask_vault_pass and options.vault_password_file:
- vault_pass = utils.read_vault_file(options.vault_password_file)
-
- extra_vars = utils.parse_extra_vars(options.extra_vars, vault_pass)
-
- inventory_manager = inventory.Inventory(options.inventory, vault_password=vault_pass)
- if options.subset:
- inventory_manager.subset(options.subset)
- hosts = inventory_manager.list_hosts(pattern)
-
- if len(hosts) == 0:
- callbacks.display("No hosts matched", stderr=True)
- sys.exit(0)
-
- if options.listhosts:
- for host in hosts:
- callbacks.display(' %s' % host)
- sys.exit(0)
-
- if options.module_name in ['command','shell'] and not options.module_args:
- callbacks.display("No argument passed to %s module" % options.module_name, color='red', stderr=True)
- sys.exit(1)
-
- if options.tree:
- utils.prepare_writeable_dir(options.tree)
-
- runner = Runner(
- module_name=options.module_name,
- module_path=options.module_path,
- module_args=options.module_args,
- remote_user=options.remote_user,
- remote_pass=sshpass,
- inventory=inventory_manager,
- timeout=options.timeout,
- private_key_file=options.private_key_file,
- forks=options.forks,
- pattern=pattern,
- callbacks=self.callbacks,
- transport=options.connection,
- subset=options.subset,
- check=options.check,
- diff=options.check,
- vault_pass=vault_pass,
- become=options.become,
- become_method=options.become_method,
- become_pass=becomepass,
- become_user=options.become_user,
- extra_vars=extra_vars,
- )
-
- if options.seconds:
- callbacks.display("background launch...\n\n", color='cyan')
- results, poller = runner.run_async(options.seconds)
- results = self.poll_while_needed(poller, options)
- else:
- results = runner.run()
-
- return (runner, results)
-
- # ----------------------------------------------
-
- def poll_while_needed(self, poller, options):
- ''' summarize results from Runner '''
-
- # BACKGROUND POLL LOGIC when -B and -P are specified
- if options.seconds and options.poll_interval > 0:
- poller.wait(options.seconds, options.poll_interval)
-
- return poller.results
-
+from ansible.errors import AnsibleError, AnsibleOptionsError
+from ansible.utils.display import Display
########################################################
if __name__ == '__main__':
- callbacks.display("", log_only=True)
- callbacks.display(" ".join(sys.argv), log_only=True)
- callbacks.display("", log_only=True)
- cli = Cli()
- (options, args) = cli.parse()
+ cli = None
+ display = Display()
+ me = os.path.basename(__file__)
+
try:
- (runner, results) = cli.run(options, args)
- for result in results['contacted'].values():
- if 'failed' in result or result.get('rc', 0) != 0:
- sys.exit(2)
- if results['dark']:
- sys.exit(3)
- except errors.AnsibleError, e:
- # Generic handler for ansible specific errors
- callbacks.display("ERROR: %s" % str(e), stderr=True, color='red')
- sys.exit(1)
+ if me == 'ansible-playbook':
+ from ansible.cli.playbook import PlaybookCLI as mycli
+ elif me == 'ansible':
+ from ansible.cli.adhoc import AdHocCLI as mycli
+ elif me == 'ansible-pull':
+ from ansible.cli.pull import PullCLI as mycli
+ elif me == 'ansible-doc':
+ from ansible.cli.doc import DocCLI as mycli
+ elif me == 'ansible-vault':
+ from ansible.cli.vault import VaultCLI as mycli
+ elif me == 'ansible-galaxy':
+ from ansible.cli.galaxy import GalaxyCLI as mycli
+ cli = mycli(sys.argv, display=display)
+ if cli:
+ cli.parse()
+ sys.exit(cli.run())
+ else:
+ raise AnsibleError("Program not implemented: %s" % me)
+
+ except AnsibleOptionsError as e:
+ cli.parser.print_help()
+ display.display(str(e), stderr=True, color='red')
+ sys.exit(1)
+ except AnsibleError as e:
+ display.display(str(e), stderr=True, color='red')
+ sys.exit(2)
+ except KeyboardInterrupt:
+ display.error("interrupted")
+ sys.exit(4)
diff --git a/bin/ansible-doc b/bin/ansible-doc
deleted file mode 100755
index dff7cecce7..0000000000
--- a/bin/ansible-doc
+++ /dev/null
@@ -1,337 +0,0 @@
-#!/usr/bin/env python
-
-# (c) 2012, Jan-Piet Mens
-#
-# This file is part of Ansible
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see .
-#
-
-import os
-import sys
-import textwrap
-import re
-import optparse
-import datetime
-import subprocess
-import fcntl
-import termios
-import struct
-
-from ansible import utils
-from ansible.utils import module_docs
-import ansible.constants as C
-from ansible.utils import version
-import traceback
-
-MODULEDIR = C.DEFAULT_MODULE_PATH
-
-BLACKLIST_EXTS = ('.pyc', '.swp', '.bak', '~', '.rpm')
-IGNORE_FILES = [ "COPYING", "CONTRIBUTING", "LICENSE", "README", "VERSION"]
-
-_ITALIC = re.compile(r"I\(([^)]+)\)")
-_BOLD = re.compile(r"B\(([^)]+)\)")
-_MODULE = re.compile(r"M\(([^)]+)\)")
-_URL = re.compile(r"U\(([^)]+)\)")
-_CONST = re.compile(r"C\(([^)]+)\)")
-PAGER = 'less'
-LESS_OPTS = 'FRSX' # -F (quit-if-one-screen) -R (allow raw ansi control chars)
- # -S (chop long lines) -X (disable termcap init and de-init)
-
-def pager_print(text):
- ''' just print text '''
- print text
-
-def pager_pipe(text, cmd):
- ''' pipe text through a pager '''
- if 'LESS' not in os.environ:
- os.environ['LESS'] = LESS_OPTS
- try:
- cmd = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, stdout=sys.stdout)
- cmd.communicate(input=text)
- except IOError:
- pass
- except KeyboardInterrupt:
- pass
-
-def pager(text):
- ''' find reasonable way to display text '''
- # this is a much simpler form of what is in pydoc.py
- if not sys.stdout.isatty():
- pager_print(text)
- elif 'PAGER' in os.environ:
- if sys.platform == 'win32':
- pager_print(text)
- else:
- pager_pipe(text, os.environ['PAGER'])
- elif subprocess.call('(less --version) 2> /dev/null', shell = True) == 0:
- pager_pipe(text, 'less')
- else:
- pager_print(text)
-
-def tty_ify(text):
-
- t = _ITALIC.sub("`" + r"\1" + "'", text) # I(word) => `word'
- t = _BOLD.sub("*" + r"\1" + "*", t) # B(word) => *word*
- t = _MODULE.sub("[" + r"\1" + "]", t) # M(word) => [word]
- t = _URL.sub(r"\1", t) # U(word) => word
- t = _CONST.sub("`" + r"\1" + "'", t) # C(word) => `word'
-
- return t
-
-def get_man_text(doc):
-
- opt_indent=" "
- text = []
- text.append("> %s\n" % doc['module'].upper())
-
- desc = " ".join(doc['description'])
-
- text.append("%s\n" % textwrap.fill(tty_ify(desc), initial_indent=" ", subsequent_indent=" "))
-
- if 'option_keys' in doc and len(doc['option_keys']) > 0:
- text.append("Options (= is mandatory):\n")
-
- for o in sorted(doc['option_keys']):
- opt = doc['options'][o]
-
- if opt.get('required', False):
- opt_leadin = "="
- else:
- opt_leadin = "-"
-
- text.append("%s %s" % (opt_leadin, o))
-
- desc = " ".join(opt['description'])
-
- if 'choices' in opt:
- choices = ", ".join(str(i) for i in opt['choices'])
- desc = desc + " (Choices: " + choices + ")"
- if 'default' in opt:
- default = str(opt['default'])
- desc = desc + " [Default: " + default + "]"
- text.append("%s\n" % textwrap.fill(tty_ify(desc), initial_indent=opt_indent,
- subsequent_indent=opt_indent))
-
- if 'notes' in doc and len(doc['notes']) > 0:
- notes = " ".join(doc['notes'])
- text.append("Notes:%s\n" % textwrap.fill(tty_ify(notes), initial_indent=" ",
- subsequent_indent=opt_indent))
-
-
- if 'requirements' in doc and doc['requirements'] is not None and len(doc['requirements']) > 0:
- req = ", ".join(doc['requirements'])
- text.append("Requirements:%s\n" % textwrap.fill(tty_ify(req), initial_indent=" ",
- subsequent_indent=opt_indent))
-
- if 'examples' in doc and len(doc['examples']) > 0:
- text.append("Example%s:\n" % ('' if len(doc['examples']) < 2 else 's'))
- for ex in doc['examples']:
- text.append("%s\n" % (ex['code']))
-
- if 'plainexamples' in doc and doc['plainexamples'] is not None:
- text.append("EXAMPLES:")
- text.append(doc['plainexamples'])
- if 'returndocs' in doc and doc['returndocs'] is not None:
- text.append("RETURN VALUES:")
- text.append(doc['returndocs'])
- text.append('')
-
- return "\n".join(text)
-
-
-def get_snippet_text(doc):
-
- text = []
- desc = tty_ify(" ".join(doc['short_description']))
- text.append("- name: %s" % (desc))
- text.append(" action: %s" % (doc['module']))
-
- for o in sorted(doc['options'].keys()):
- opt = doc['options'][o]
- desc = tty_ify(" ".join(opt['description']))
-
- if opt.get('required', False):
- s = o + "="
- else:
- s = o
-
- text.append(" %-20s # %s" % (s, desc))
- text.append('')
-
- return "\n".join(text)
-
-def get_module_list_text(module_list):
- tty_size = 0
- if os.isatty(0):
- tty_size = struct.unpack('HHHH',
- fcntl.ioctl(0, termios.TIOCGWINSZ, struct.pack('HHHH', 0, 0, 0, 0)))[1]
- columns = max(60, tty_size)
- displace = max(len(x) for x in module_list)
- linelimit = columns - displace - 5
- text = []
- deprecated = []
- for module in sorted(set(module_list)):
-
- if module in module_docs.BLACKLIST_MODULES:
- continue
-
- filename = utils.plugins.module_finder.find_plugin(module)
-
- if filename is None:
- continue
- if filename.endswith(".ps1"):
- continue
- if os.path.isdir(filename):
- continue
-
- try:
- doc, plainexamples, returndocs = module_docs.get_docstring(filename)
- desc = tty_ify(doc.get('short_description', '?')).strip()
- if len(desc) > linelimit:
- desc = desc[:linelimit] + '...'
-
- if module.startswith('_'): # Handle deprecated
- deprecated.append("%-*s %-*.*s" % (displace, module[1:], linelimit, len(desc), desc))
- else:
- text.append("%-*s %-*.*s" % (displace, module, linelimit, len(desc), desc))
- except:
- traceback.print_exc()
- sys.stderr.write("ERROR: module %s has a documentation error formatting or is missing documentation\n" % module)
-
- if len(deprecated) > 0:
- text.append("\nDEPRECATED:")
- text.extend(deprecated)
- return "\n".join(text)
-
-def find_modules(path, module_list):
-
- if os.path.isdir(path):
- for module in os.listdir(path):
- if module.startswith('.'):
- continue
- elif os.path.isdir(module):
- find_modules(module, module_list)
- elif any(module.endswith(x) for x in BLACKLIST_EXTS):
- continue
- elif module.startswith('__'):
- continue
- elif module in IGNORE_FILES:
- continue
- elif module.startswith('_'):
- fullpath = '/'.join([path,module])
- if os.path.islink(fullpath): # avoids aliases
- continue
-
- module = os.path.splitext(module)[0] # removes the extension
- module_list.append(module)
-
-def main():
-
- p = optparse.OptionParser(
- version=version("%prog"),
- usage='usage: %prog [options] [module...]',
- description='Show Ansible module documentation',
- )
-
- p.add_option("-M", "--module-path",
- action="store",
- dest="module_path",
- default=MODULEDIR,
- help="Ansible modules/ directory")
- p.add_option("-l", "--list",
- action="store_true",
- default=False,
- dest='list_dir',
- help='List available modules')
- p.add_option("-s", "--snippet",
- action="store_true",
- default=False,
- dest='show_snippet',
- help='Show playbook snippet for specified module(s)')
- p.add_option('-v', action='version', help='Show version number and exit')
-
- (options, args) = p.parse_args()
-
- if options.module_path is not None:
- for i in options.module_path.split(os.pathsep):
- utils.plugins.module_finder.add_directory(i)
-
- if options.list_dir:
- # list modules
- paths = utils.plugins.module_finder._get_paths()
- module_list = []
- for path in paths:
- find_modules(path, module_list)
-
- pager(get_module_list_text(module_list))
- sys.exit()
-
- if len(args) == 0:
- p.print_help()
-
- def print_paths(finder):
- ''' Returns a string suitable for printing of the search path '''
-
- # Uses a list to get the order right
- ret = []
- for i in finder._get_paths():
- if i not in ret:
- ret.append(i)
- return os.pathsep.join(ret)
-
- text = ''
- for module in args:
-
- filename = utils.plugins.module_finder.find_plugin(module)
- if filename is None:
- sys.stderr.write("module %s not found in %s\n" % (module, print_paths(utils.plugins.module_finder)))
- continue
-
- if any(filename.endswith(x) for x in BLACKLIST_EXTS):
- continue
-
- try:
- doc, plainexamples, returndocs = module_docs.get_docstring(filename)
- except:
- traceback.print_exc()
- sys.stderr.write("ERROR: module %s has a documentation error formatting or is missing documentation\n" % module)
- continue
-
- if doc is not None:
-
- all_keys = []
- for (k,v) in doc['options'].iteritems():
- all_keys.append(k)
- all_keys = sorted(all_keys)
- doc['option_keys'] = all_keys
-
- doc['filename'] = filename
- doc['docuri'] = doc['module'].replace('_', '-')
- doc['now_date'] = datetime.date.today().strftime('%Y-%m-%d')
- doc['plainexamples'] = plainexamples
- doc['returndocs'] = returndocs
-
- if options.show_snippet:
- text += get_snippet_text(doc)
- else:
- text += get_man_text(doc)
- else:
- # this typically means we couldn't even parse the docstring, not just that the YAML is busted,
- # probably a quoting issue.
- sys.stderr.write("ERROR: module %s missing documentation (or could not parse documentation)\n" % module)
- pager(text)
-
-if __name__ == '__main__':
- main()
diff --git a/bin/ansible-doc b/bin/ansible-doc
new file mode 120000
index 0000000000..cabb1f519a
--- /dev/null
+++ b/bin/ansible-doc
@@ -0,0 +1 @@
+ansible
\ No newline at end of file
diff --git a/bin/ansible-galaxy b/bin/ansible-galaxy
deleted file mode 100755
index a6d625671e..0000000000
--- a/bin/ansible-galaxy
+++ /dev/null
@@ -1,957 +0,0 @@
-#!/usr/bin/env python
-
-########################################################################
-#
-# (C) 2013, James Cammarata
-#
-# This file is part of Ansible
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see .
-#
-########################################################################
-
-import datetime
-import json
-import os
-import os.path
-import shutil
-import subprocess
-import sys
-import tarfile
-import tempfile
-import urllib
-import urllib2
-import yaml
-
-from collections import defaultdict
-from distutils.version import LooseVersion
-from jinja2 import Environment
-from optparse import OptionParser
-
-import ansible.constants as C
-import ansible.utils
-from ansible.errors import AnsibleError
-
-default_meta_template = """---
-galaxy_info:
- author: {{ author }}
- description: {{description}}
- company: {{ company }}
- # If the issue tracker for your role is not on github, uncomment the
- # next line and provide a value
- # issue_tracker_url: {{ issue_tracker_url }}
- # Some suggested licenses:
- # - BSD (default)
- # - MIT
- # - GPLv2
- # - GPLv3
- # - Apache
- # - CC-BY
- license: {{ license }}
- min_ansible_version: {{ min_ansible_version }}
- #
- # Below are all platforms currently available. Just uncomment
- # the ones that apply to your role. If you don't see your
- # platform on this list, let us know and we'll get it added!
- #
- #platforms:
- {%- for platform,versions in platforms.iteritems() %}
- #- name: {{ platform }}
- # versions:
- # - all
- {%- for version in versions %}
- # - {{ version }}
- {%- endfor %}
- {%- endfor %}
- #
- # Below are all categories currently available. Just as with
- # the platforms above, uncomment those that apply to your role.
- #
- #categories:
- {%- for category in categories %}
- #- {{ category.name }}
- {%- endfor %}
-dependencies: []
- # List your role dependencies here, one per line.
- # Be sure to remove the '[]' above if you add dependencies
- # to this list.
- {% for dependency in dependencies %}
- #- {{ dependency }}
- {% endfor %}
-
-"""
-
-default_readme_template = """Role Name
-=========
-
-A brief description of the role goes here.
-
-Requirements
-------------
-
-Any pre-requisites that may not be covered by Ansible itself or the role should be mentioned here. For instance, if the role uses the EC2 module, it may be a good idea to mention in this section that the boto package is required.
-
-Role Variables
---------------
-
-A description of the settable variables for this role should go here, including any variables that are in defaults/main.yml, vars/main.yml, and any variables that can/should be set via parameters to the role. Any variables that are read from other roles and/or the global scope (ie. hostvars, group vars, etc.) should be mentioned here as well.
-
-Dependencies
-------------
-
-A list of other roles hosted on Galaxy should go here, plus any details in regards to parameters that may need to be set for other roles, or variables that are used from other roles.
-
-Example Playbook
-----------------
-
-Including an example of how to use your role (for instance, with variables passed in as parameters) is always nice for users too:
-
- - hosts: servers
- roles:
- - { role: username.rolename, x: 42 }
-
-License
--------
-
-BSD
-
-Author Information
-------------------
-
-An optional section for the role authors to include contact information, or a website (HTML is not allowed).
-"""
-
-#-------------------------------------------------------------------------------------
-# Utility functions for parsing actions/options
-#-------------------------------------------------------------------------------------
-
-VALID_ACTIONS = ("init", "info", "install", "list", "remove")
-SKIP_INFO_KEYS = ("platforms","readme_html", "related", "summary_fields", "average_aw_composite", "average_aw_score", "url" )
-
-def get_action(args):
- """
- Get the action the user wants to execute from the
- sys argv list.
- """
- for i in range(0,len(args)):
- arg = args[i]
- if arg in VALID_ACTIONS:
- del args[i]
- return arg
- return None
-
-def build_option_parser(action):
- """
- Builds an option parser object based on the action
- the user wants to execute.
- """
-
- usage = "usage: %%prog [%s] [--help] [options] ..." % "|".join(VALID_ACTIONS)
- epilog = "\nSee '%s --help' for more information on a specific command.\n\n" % os.path.basename(sys.argv[0])
- OptionParser.format_epilog = lambda self, formatter: self.epilog
- parser = OptionParser(usage=usage, epilog=epilog)
-
- if not action:
- parser.print_help()
- sys.exit()
-
- # options for all actions
- # - none yet
-
- # options specific to actions
- if action == "info":
- parser.set_usage("usage: %prog info [options] role_name[,version]")
- elif action == "init":
- parser.set_usage("usage: %prog init [options] role_name")
- parser.add_option(
- '-p', '--init-path', dest='init_path', default="./",
- help='The path in which the skeleton role will be created. '
- 'The default is the current working directory.')
- parser.add_option(
- '--offline', dest='offline', default=False, action='store_true',
- help="Don't query the galaxy API when creating roles")
- elif action == "install":
- parser.set_usage("usage: %prog install [options] [-r FILE | role_name(s)[,version] | scm+role_repo_url[,version] | tar_file(s)]")
- parser.add_option(
- '-i', '--ignore-errors', dest='ignore_errors', action='store_true', default=False,
- help='Ignore errors and continue with the next specified role.')
- parser.add_option(
- '-n', '--no-deps', dest='no_deps', action='store_true', default=False,
- help='Don\'t download roles listed as dependencies')
- parser.add_option(
- '-r', '--role-file', dest='role_file',
- help='A file containing a list of roles to be imported')
- elif action == "remove":
- parser.set_usage("usage: %prog remove role1 role2 ...")
- elif action == "list":
- parser.set_usage("usage: %prog list [role_name]")
-
- # options that apply to more than one action
- if action != "init":
- parser.add_option(
- '-p', '--roles-path', dest='roles_path', default=C.DEFAULT_ROLES_PATH,
- help='The path to the directory containing your roles. '
- 'The default is the roles_path configured in your '
- 'ansible.cfg file (/etc/ansible/roles if not configured)')
-
- if action in ("info","init","install"):
- parser.add_option(
- '-s', '--server', dest='api_server', default="galaxy.ansible.com",
- help='The API server destination')
-
- if action in ("init","install"):
- parser.add_option(
- '-f', '--force', dest='force', action='store_true', default=False,
- help='Force overwriting an existing role')
- # done, return the parser
- return parser
-
-def get_opt(options, k, defval=""):
- """
- Returns an option from an Optparse values instance.
- """
- try:
- data = getattr(options, k)
- except:
- return defval
- if k == "roles_path":
- if os.pathsep in data:
- data = data.split(os.pathsep)[0]
- return data
-
-def exit_without_ignore(options, rc=1):
- """
- Exits with the specified return code unless the
- option --ignore-errors was specified
- """
-
- if not get_opt(options, "ignore_errors", False):
- print '- you can use --ignore-errors to skip failed roles.'
- sys.exit(rc)
-
-
-#-------------------------------------------------------------------------------------
-# Galaxy API functions
-#-------------------------------------------------------------------------------------
-
-def api_get_config(api_server):
- """
- Fetches the Galaxy API current version to ensure
- the API server is up and reachable.
- """
-
- try:
- url = 'https://%s/api/' % api_server
- data = json.load(urllib2.urlopen(url))
- if not data.get("current_version",None):
- return None
- else:
- return data
- except:
- return None
-
-def api_lookup_role_by_name(api_server, role_name, notify=True):
- """
- Uses the Galaxy API to do a lookup on the role owner/name.
- """
-
- role_name = urllib.quote(role_name)
-
- try:
- parts = role_name.split(".")
- user_name = ".".join(parts[0:-1])
- role_name = parts[-1]
- if notify:
- print "- downloading role '%s', owned by %s" % (role_name, user_name)
- except:
- parser.print_help()
- print "- invalid role name (%s). Specify role as format: username.rolename" % role_name
- sys.exit(1)
-
- url = 'https://%s/api/v1/roles/?owner__username=%s&name=%s' % (api_server,user_name,role_name)
- try:
- data = json.load(urllib2.urlopen(url))
- if len(data["results"]) == 0:
- return None
- else:
- return data["results"][0]
- except:
- return None
-
-def api_fetch_role_related(api_server, related, role_id):
- """
- Uses the Galaxy API to fetch the list of related items for
- the given role. The url comes from the 'related' field of
- the role.
- """
-
- try:
- url = 'https://%s/api/v1/roles/%d/%s/?page_size=50' % (api_server, int(role_id), related)
- data = json.load(urllib2.urlopen(url))
- results = data['results']
- done = (data.get('next', None) == None)
- while not done:
- url = 'https://%s%s' % (api_server, data['next'])
- print url
- data = json.load(urllib2.urlopen(url))
- results += data['results']
- done = (data.get('next', None) == None)
- return results
- except:
- return None
-
-def api_get_list(api_server, what):
- """
- Uses the Galaxy API to fetch the list of items specified.
- """
-
- try:
- url = 'https://%s/api/v1/%s/?page_size' % (api_server, what)
- data = json.load(urllib2.urlopen(url))
- if "results" in data:
- results = data['results']
- else:
- results = data
- done = True
- if "next" in data:
- done = (data.get('next', None) == None)
- while not done:
- url = 'https://%s%s' % (api_server, data['next'])
- print url
- data = json.load(urllib2.urlopen(url))
- results += data['results']
- done = (data.get('next', None) == None)
- return results
- except:
- print "- failed to download the %s list" % what
- return None
-
-#-------------------------------------------------------------------------------------
-# scm repo utility functions
-#-------------------------------------------------------------------------------------
-
-def scm_archive_role(scm, role_url, role_version, role_name):
- if scm not in ['hg', 'git']:
- print "- scm %s is not currently supported" % scm
- return False
- tempdir = tempfile.mkdtemp()
- clone_cmd = [scm, 'clone', role_url, role_name]
- with open('/dev/null', 'w') as devnull:
- try:
- print "- executing: %s" % " ".join(clone_cmd)
- popen = subprocess.Popen(clone_cmd, cwd=tempdir, stdout=devnull, stderr=devnull)
- except:
- raise AnsibleError("error executing: %s" % " ".join(clone_cmd))
- rc = popen.wait()
- if rc != 0:
- print "- command %s failed" % ' '.join(clone_cmd)
- print " in directory %s" % tempdir
- return False
-
- temp_file = tempfile.NamedTemporaryFile(delete=False, suffix='.tar')
- if scm == 'hg':
- archive_cmd = ['hg', 'archive', '--prefix', "%s/" % role_name]
- if role_version:
- archive_cmd.extend(['-r', role_version])
- archive_cmd.append(temp_file.name)
- if scm == 'git':
- archive_cmd = ['git', 'archive', '--prefix=%s/' % role_name, '--output=%s' % temp_file.name]
- if role_version:
- archive_cmd.append(role_version)
- else:
- archive_cmd.append('HEAD')
-
- with open('/dev/null', 'w') as devnull:
- print "- executing: %s" % " ".join(archive_cmd)
- popen = subprocess.Popen(archive_cmd, cwd=os.path.join(tempdir, role_name),
- stderr=devnull, stdout=devnull)
- rc = popen.wait()
- if rc != 0:
- print "- command %s failed" % ' '.join(archive_cmd)
- print " in directory %s" % tempdir
- return False
-
- shutil.rmtree(tempdir, ignore_errors=True)
-
- return temp_file.name
-
-
-#-------------------------------------------------------------------------------------
-# Role utility functions
-#-------------------------------------------------------------------------------------
-
-def get_role_path(role_name, options):
- """
- Returns the role path based on the roles_path option
- and the role name.
- """
- roles_path = get_opt(options,'roles_path')
- roles_path = os.path.join(roles_path, role_name)
- roles_path = os.path.expanduser(roles_path)
- return roles_path
-
-def get_role_metadata(role_name, options):
- """
- Returns the metadata as YAML, if the file 'meta/main.yml'
- exists in the specified role_path
- """
- role_path = os.path.join(get_role_path(role_name, options), 'meta/main.yml')
- try:
- if os.path.isfile(role_path):
- f = open(role_path, 'r')
- meta_data = yaml.safe_load(f)
- f.close()
- return meta_data
- else:
- return None
- except:
- return None
-
-def get_galaxy_install_info(role_name, options):
- """
- Returns the YAML data contained in 'meta/.galaxy_install_info',
- if it exists.
- """
-
- try:
- info_path = os.path.join(get_role_path(role_name, options), 'meta/.galaxy_install_info')
- if os.path.isfile(info_path):
- f = open(info_path, 'r')
- info_data = yaml.safe_load(f)
- f.close()
- return info_data
- else:
- return None
- except:
- return None
-
-def write_galaxy_install_info(role_name, role_version, options):
- """
- Writes a YAML-formatted file to the role's meta/ directory
- (named .galaxy_install_info) which contains some information
- we can use later for commands like 'list' and 'info'.
- """
-
- info = dict(
- version = role_version,
- install_date = datetime.datetime.utcnow().strftime("%c"),
- )
- try:
- info_path = os.path.join(get_role_path(role_name, options), 'meta/.galaxy_install_info')
- f = open(info_path, 'w+')
- info_data = yaml.safe_dump(info, f)
- f.close()
- except:
- return False
- return True
-
-
-def remove_role(role_name, options):
- """
- Removes the specified role from the roles path. There is a
- sanity check to make sure there's a meta/main.yml file at this
- path so the user doesn't blow away random directories
- """
- if get_role_metadata(role_name, options):
- role_path = get_role_path(role_name, options)
- shutil.rmtree(role_path)
- return True
- else:
- return False
-
-def fetch_role(role_name, target, role_data, options):
- """
- Downloads the archived role from github to a temp location, extracts
- it, and then copies the extracted role to the role library path.
- """
-
- # first grab the file and save it to a temp location
- if '://' in role_name:
- archive_url = role_name
- else:
- archive_url = 'https://github.com/%s/%s/archive/%s.tar.gz' % (role_data["github_user"], role_data["github_repo"], target)
- print "- downloading role from %s" % archive_url
-
- try:
- url_file = urllib2.urlopen(archive_url)
- temp_file = tempfile.NamedTemporaryFile(delete=False)
- data = url_file.read()
- while data:
- temp_file.write(data)
- data = url_file.read()
- temp_file.close()
- return temp_file.name
- except Exception, e:
- # TODO: better urllib2 error handling for error
- # messages that are more exact
- print "- error: failed to download the file."
- return False
-
-def install_role(role_name, role_version, role_filename, options):
- # the file is a tar, so open it that way and extract it
- # to the specified (or default) roles directory
-
- if not tarfile.is_tarfile(role_filename):
- print "- error: the file downloaded was not a tar.gz"
- return False
- else:
- if role_filename.endswith('.gz'):
- role_tar_file = tarfile.open(role_filename, "r:gz")
- else:
- role_tar_file = tarfile.open(role_filename, "r")
- # verify the role's meta file
- meta_file = None
- members = role_tar_file.getmembers()
- # next find the metadata file
- for member in members:
- if "/meta/main.yml" in member.name:
- meta_file = member
- break
- if not meta_file:
- print "- error: this role does not appear to have a meta/main.yml file."
- return False
- else:
- try:
- meta_file_data = yaml.safe_load(role_tar_file.extractfile(meta_file))
- except:
- print "- error: this role does not appear to have a valid meta/main.yml file."
- return False
-
- # we strip off the top-level directory for all of the files contained within
- # the tar file here, since the default is 'github_repo-target', and change it
- # to the specified role's name
- role_path = os.path.join(get_opt(options, 'roles_path'), role_name)
- role_path = os.path.expanduser(role_path)
- print "- extracting %s to %s" % (role_name, role_path)
- try:
- if os.path.exists(role_path):
- if not os.path.isdir(role_path):
- print "- error: the specified roles path exists and is not a directory."
- return False
- elif not get_opt(options, "force", False):
- print "- error: the specified role %s appears to already exist. Use --force to replace it." % role_name
- return False
- else:
- # using --force, remove the old path
- if not remove_role(role_name, options):
- print "- error: %s doesn't appear to contain a role." % role_path
- print " please remove this directory manually if you really want to put the role here."
- return False
- else:
- os.makedirs(role_path)
-
- # now we do the actual extraction to the role_path
- for member in members:
- # we only extract files, and remove any relative path
- # bits that might be in the file for security purposes
- # and drop the leading directory, as mentioned above
- if member.isreg() or member.issym():
- parts = member.name.split("/")[1:]
- final_parts = []
- for part in parts:
- if part != '..' and '~' not in part and '$' not in part:
- final_parts.append(part)
- member.name = os.path.join(*final_parts)
- role_tar_file.extract(member, role_path)
-
- # write out the install info file for later use
- write_galaxy_install_info(role_name, role_version, options)
- except OSError, e:
- print "- error: you do not have permission to modify files in %s" % role_path
- return False
-
- # return the parsed yaml metadata
- print "- %s was installed successfully" % role_name
- return meta_file_data
-
-#-------------------------------------------------------------------------------------
-# Action functions
-#-------------------------------------------------------------------------------------
-
-def execute_init(args, options, parser):
- """
- Executes the init action, which creates the skeleton framework
- of a role that complies with the galaxy metadata format.
- """
-
- init_path = get_opt(options, 'init_path', './')
- api_server = get_opt(options, "api_server", "galaxy.ansible.com")
- force = get_opt(options, 'force', False)
- offline = get_opt(options, 'offline', False)
-
- if not offline:
- api_config = api_get_config(api_server)
- if not api_config:
- print "- the API server (%s) is not responding, please try again later." % api_server
- sys.exit(1)
-
- try:
- role_name = args.pop(0).strip()
- if role_name == "":
- raise Exception("")
- role_path = os.path.join(init_path, role_name)
- if os.path.exists(role_path):
- if os.path.isfile(role_path):
- print "- the path %s already exists, but is a file - aborting" % role_path
- sys.exit(1)
- elif not force:
- print "- the directory %s already exists." % role_path
- print " you can use --force to re-initialize this directory,\n" + \
- " however it will reset any main.yml files that may have\n" + \
- " been modified there already."
- sys.exit(1)
- except Exception, e:
- parser.print_help()
- print "- no role name specified for init"
- sys.exit(1)
-
- ROLE_DIRS = ('defaults','files','handlers','meta','tasks','templates','vars')
-
- # create the default README.md
- if not os.path.exists(role_path):
- os.makedirs(role_path)
- readme_path = os.path.join(role_path, "README.md")
- f = open(readme_path, "wb")
- f.write(default_readme_template)
- f.close
-
- for dir in ROLE_DIRS:
- dir_path = os.path.join(init_path, role_name, dir)
- main_yml_path = os.path.join(dir_path, 'main.yml')
- # create the directory if it doesn't exist already
- if not os.path.exists(dir_path):
- os.makedirs(dir_path)
-
- # now create the main.yml file for that directory
- if dir == "meta":
- # create a skeleton meta/main.yml with a valid galaxy_info
- # datastructure in place, plus with all of the available
- # tags/platforms included (but commented out) and the
- # dependencies section
- platforms = []
- if not offline:
- platforms = api_get_list(api_server, "platforms") or []
- categories = []
- if not offline:
- categories = api_get_list(api_server, "categories") or []
-
- # group the list of platforms from the api based
- # on their names, with the release field being
- # appended to a list of versions
- platform_groups = defaultdict(list)
- for platform in platforms:
- platform_groups[platform['name']].append(platform['release'])
- platform_groups[platform['name']].sort()
-
- inject = dict(
- author = 'your name',
- company = 'your company (optional)',
- license = 'license (GPLv2, CC-BY, etc)',
- issue_tracker_url = 'http://example.com/issue/tracker',
- min_ansible_version = '1.2',
- platforms = platform_groups,
- categories = categories,
- )
- rendered_meta = Environment().from_string(default_meta_template).render(inject)
- f = open(main_yml_path, 'w')
- f.write(rendered_meta)
- f.close()
- pass
- elif dir not in ('files','templates'):
- # just write a (mostly) empty YAML file for main.yml
- f = open(main_yml_path, 'w')
- f.write('---\n# %s file for %s\n' % (dir,role_name))
- f.close()
- print "- %s was created successfully" % role_name
-
-def execute_info(args, options, parser):
- """
- Executes the info action. This action prints out detailed
- information about an installed role as well as info available
- from the galaxy API.
- """
-
- if len(args) == 0:
- # the user needs to specify a role
- parser.print_help()
- print "- you must specify a user/role name"
- sys.exit(1)
-
- api_server = get_opt(options, "api_server", "galaxy.ansible.com")
- api_config = api_get_config(api_server)
- roles_path = get_opt(options, "roles_path")
-
- for role in args:
-
- role_info = {}
-
- install_info = get_galaxy_install_info(role, options)
- if install_info:
- if 'version' in install_info:
- install_info['intalled_version'] = install_info['version']
- del install_info['version']
- role_info.update(install_info)
-
- remote_data = api_lookup_role_by_name(api_server, role, False)
- if remote_data:
- role_info.update(remote_data)
-
- metadata = get_role_metadata(role, options)
- if metadata:
- role_info.update(metadata)
-
- role_spec = ansible.utils.role_spec_parse(role)
- if role_spec:
- role_info.update(role_spec)
-
- if role_info:
- print "- %s:" % (role)
- for k in sorted(role_info.keys()):
-
- if k in SKIP_INFO_KEYS:
- continue
-
- if isinstance(role_info[k], dict):
- print "\t%s: " % (k)
- for key in sorted(role_info[k].keys()):
- if key in SKIP_INFO_KEYS:
- continue
- print "\t\t%s: %s" % (key, role_info[k][key])
- else:
- print "\t%s: %s" % (k, role_info[k])
- else:
- print "- the role %s was not found" % role
-
-def execute_install(args, options, parser):
- """
- Executes the installation action. The args list contains the
- roles to be installed, unless -f was specified. The list of roles
- can be a name (which will be downloaded via the galaxy API and github),
- or it can be a local .tar.gz file.
- """
-
- role_file = get_opt(options, "role_file", None)
-
- if len(args) == 0 and role_file is None:
- # the user needs to specify one of either --role-file
- # or specify a single user/role name
- parser.print_help()
- print "- you must specify a user/role name or a roles file"
- sys.exit()
- elif len(args) == 1 and not role_file is None:
- # using a role file is mutually exclusive of specifying
- # the role name on the command line
- parser.print_help()
- print "- please specify a user/role name, or a roles file, but not both"
- sys.exit(1)
-
- api_server = get_opt(options, "api_server", "galaxy.ansible.com")
- no_deps = get_opt(options, "no_deps", False)
- roles_path = get_opt(options, "roles_path")
-
- roles_done = []
- if role_file:
- f = open(role_file, 'r')
- if role_file.endswith('.yaml') or role_file.endswith('.yml'):
- roles_left = map(ansible.utils.role_yaml_parse, yaml.safe_load(f))
- else:
- # roles listed in a file, one per line
- roles_left = map(ansible.utils.role_spec_parse, f.readlines())
- f.close()
- else:
- # roles were specified directly, so we'll just go out grab them
- # (and their dependencies, unless the user doesn't want us to).
- roles_left = map(ansible.utils.role_spec_parse, args)
-
- while len(roles_left) > 0:
- # query the galaxy API for the role data
- role_data = None
- role = roles_left.pop(0)
- role_src = role.get("src")
- role_scm = role.get("scm")
- role_path = role.get("path")
-
- if role_path:
- options.roles_path = role_path
- else:
- options.roles_path = roles_path
-
- if os.path.isfile(role_src):
- # installing a local tar.gz
- tmp_file = role_src
- else:
- if role_scm:
- # create tar file from scm url
- tmp_file = scm_archive_role(role_scm, role_src, role.get("version"), role.get("name"))
- elif '://' in role_src:
- # just download a URL - version will probably be in the URL
- tmp_file = fetch_role(role_src, None, None, options)
- else:
- # installing from galaxy
- api_config = api_get_config(api_server)
- if not api_config:
- print "- the API server (%s) is not responding, please try again later." % api_server
- sys.exit(1)
-
- role_data = api_lookup_role_by_name(api_server, role_src)
- if not role_data:
- print "- sorry, %s was not found on %s." % (role_src, api_server)
- exit_without_ignore(options)
- continue
-
- role_versions = api_fetch_role_related(api_server, 'versions', role_data['id'])
- if "version" not in role or role['version'] == '':
- # convert the version names to LooseVersion objects
- # and sort them to get the latest version. If there
- # are no versions in the list, we'll grab the head
- # of the master branch
- if len(role_versions) > 0:
- loose_versions = [LooseVersion(a.get('name',None)) for a in role_versions]
- loose_versions.sort()
- role["version"] = str(loose_versions[-1])
- else:
- role["version"] = 'master'
- elif role['version'] != 'master':
- if role_versions and role["version"] not in [a.get('name', None) for a in role_versions]:
- print 'role is %s' % role
- print "- the specified version (%s) was not found in the list of available versions (%s)." % (role['version'], role_versions)
- exit_without_ignore(options)
- continue
-
- # download the role. if --no-deps was specified, we stop here,
- # otherwise we recursively grab roles and all of their deps.
- tmp_file = fetch_role(role_src, role["version"], role_data, options)
- installed = False
- if tmp_file:
- installed = install_role(role.get("name"), role.get("version"), tmp_file, options)
- # we're done with the temp file, clean it up
- if tmp_file != role_src:
- os.unlink(tmp_file)
- # install dependencies, if we want them
- if not no_deps and installed:
- if not role_data:
- role_data = get_role_metadata(role.get("name"), options)
- role_dependencies = role_data['dependencies']
- else:
- role_dependencies = role_data['summary_fields']['dependencies'] # api_fetch_role_related(api_server, 'dependencies', role_data['id'])
- for dep in role_dependencies:
- if isinstance(dep, basestring):
- dep = ansible.utils.role_spec_parse(dep)
- else:
- dep = ansible.utils.role_yaml_parse(dep)
- if not get_role_metadata(dep["name"], options):
- if dep not in roles_left:
- print '- adding dependency: %s' % dep["name"]
- roles_left.append(dep)
- else:
- print '- dependency %s already pending installation.' % dep["name"]
- else:
- print '- dependency %s is already installed, skipping.' % dep["name"]
- if not tmp_file or not installed:
- print "- %s was NOT installed successfully." % role.get("name")
- exit_without_ignore(options)
- sys.exit(0)
-
-def execute_remove(args, options, parser):
- """
- Executes the remove action. The args list contains the list
- of roles to be removed. This list can contain more than one role.
- """
-
- if len(args) == 0:
- parser.print_help()
- print '- you must specify at least one role to remove.'
- sys.exit()
-
- for role in args:
- if get_role_metadata(role, options):
- if remove_role(role, options):
- print '- successfully removed %s' % role
- else:
- print "- failed to remove role: %s" % role
- else:
- print '- %s is not installed, skipping.' % role
- sys.exit(0)
-
-def execute_list(args, options, parser):
- """
- Executes the list action. The args list can contain zero
- or one role. If one is specified, only that role will be
- shown, otherwise all roles in the specified directory will
- be shown.
- """
-
- if len(args) > 1:
- print "- please specify only one role to list, or specify no roles to see a full list"
- sys.exit(1)
-
- if len(args) == 1:
- # show only the request role, if it exists
- role_name = args[0]
- metadata = get_role_metadata(role_name, options)
- if metadata:
- install_info = get_galaxy_install_info(role_name, options)
- version = None
- if install_info:
- version = install_info.get("version", None)
- if not version:
- version = "(unknown version)"
- # show some more info about single roles here
- print "- %s, %s" % (role_name, version)
- else:
- print "- the role %s was not found" % role_name
- else:
- # show all valid roles in the roles_path directory
- roles_path = get_opt(options, 'roles_path')
- roles_path = os.path.expanduser(roles_path)
- if not os.path.exists(roles_path):
- parser.print_help()
- print "- the path %s does not exist. Please specify a valid path with --roles-path" % roles_path
- sys.exit(1)
- elif not os.path.isdir(roles_path):
- print "- %s exists, but it is not a directory. Please specify a valid path with --roles-path" % roles_path
- parser.print_help()
- sys.exit(1)
- path_files = os.listdir(roles_path)
- for path_file in path_files:
- if get_role_metadata(path_file, options):
- install_info = get_galaxy_install_info(path_file, options)
- version = None
- if install_info:
- version = install_info.get("version", None)
- if not version:
- version = "(unknown version)"
- print "- %s, %s" % (path_file, version)
- sys.exit(0)
-
-#-------------------------------------------------------------------------------------
-# The main entry point
-#-------------------------------------------------------------------------------------
-
-def main():
- # parse the CLI options
- action = get_action(sys.argv)
- parser = build_option_parser(action)
- (options, args) = parser.parse_args()
-
- # execute the desired action
- if 1: #try:
- fn = globals()["execute_%s" % action]
- fn(args, options, parser)
- #except KeyError, e:
- # print "- error: %s is not a valid action. Valid actions are: %s" % (action, ", ".join(VALID_ACTIONS))
- # sys.exit(1)
-
-if __name__ == "__main__":
- main()
diff --git a/bin/ansible-galaxy b/bin/ansible-galaxy
new file mode 120000
index 0000000000..cabb1f519a
--- /dev/null
+++ b/bin/ansible-galaxy
@@ -0,0 +1 @@
+ansible
\ No newline at end of file
diff --git a/bin/ansible-playbook b/bin/ansible-playbook
deleted file mode 100755
index 3d6e1f9f40..0000000000
--- a/bin/ansible-playbook
+++ /dev/null
@@ -1,330 +0,0 @@
-#!/usr/bin/env python
-# (C) 2012, Michael DeHaan,
-
-# This file is part of Ansible
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see .
-
-#######################################################
-
-__requires__ = ['ansible']
-try:
- import pkg_resources
-except Exception:
- # Use pkg_resources to find the correct versions of libraries and set
- # sys.path appropriately when there are multiversion installs. But we
- # have code that better expresses the errors in the places where the code
- # is actually used (the deps are optional for many code paths) so we don't
- # want to fail here.
- pass
-
-import sys
-import os
-import stat
-
-# Augment PYTHONPATH to find Python modules relative to this file path
-# This is so that we can find the modules when running from a local checkout
-# installed as editable with `pip install -e ...` or `python setup.py develop`
-local_module_path = os.path.abspath(
- os.path.join(os.path.dirname(__file__), '..', 'lib')
-)
-sys.path.append(local_module_path)
-
-import ansible.playbook
-import ansible.constants as C
-import ansible.utils.template
-from ansible import errors
-from ansible import callbacks
-from ansible import utils
-from ansible.color import ANSIBLE_COLOR, stringc
-from ansible.callbacks import display
-
-def colorize(lead, num, color):
- """ Print 'lead' = 'num' in 'color' """
- if num != 0 and ANSIBLE_COLOR and color is not None:
- return "%s%s%-15s" % (stringc(lead, color), stringc("=", color), stringc(str(num), color))
- else:
- return "%s=%-4s" % (lead, str(num))
-
-def hostcolor(host, stats, color=True):
- if ANSIBLE_COLOR and color:
- if stats['failures'] != 0 or stats['unreachable'] != 0:
- return "%-37s" % stringc(host, 'red')
- elif stats['changed'] != 0:
- return "%-37s" % stringc(host, 'yellow')
- else:
- return "%-37s" % stringc(host, 'green')
- return "%-26s" % host
-
-
-def main(args):
- ''' run ansible-playbook operations '''
-
- # create parser for CLI options
- parser = utils.base_parser(
- constants=C,
- usage = "%prog playbook.yml",
- connect_opts=True,
- runas_opts=True,
- subset_opts=True,
- check_opts=True,
- diff_opts=True
- )
- #parser.add_option('--vault-password', dest="vault_password",
- # help="password for vault encrypted files")
- parser.add_option('-t', '--tags', dest='tags', default='all',
- help="only run plays and tasks tagged with these values")
- parser.add_option('--skip-tags', dest='skip_tags',
- help="only run plays and tasks whose tags do not match these values")
- parser.add_option('--syntax-check', dest='syntax', action='store_true',
- help="perform a syntax check on the playbook, but do not execute it")
- parser.add_option('--list-tasks', dest='listtasks', action='store_true',
- help="list all tasks that would be executed")
- parser.add_option('--list-tags', dest='listtags', action='store_true',
- help="list all available tags")
- parser.add_option('--step', dest='step', action='store_true',
- help="one-step-at-a-time: confirm each task before running")
- parser.add_option('--start-at-task', dest='start_at',
- help="start the playbook at the task matching this name")
- parser.add_option('--force-handlers', dest='force_handlers',
- default=C.DEFAULT_FORCE_HANDLERS, action='store_true',
- help="run handlers even if a task fails")
- parser.add_option('--flush-cache', dest='flush_cache', action='store_true',
- help="clear the fact cache")
-
- options, args = parser.parse_args(args)
-
- if len(args) == 0:
- parser.print_help(file=sys.stderr)
- return 1
-
- # privlege escalation command line arguments need to be mutually exclusive
- utils.check_mutually_exclusive_privilege(options, parser)
-
- if (options.ask_vault_pass and options.vault_password_file):
- parser.error("--ask-vault-pass and --vault-password-file are mutually exclusive")
-
- sshpass = None
- becomepass = None
- vault_pass = None
-
- options.ask_vault_pass = options.ask_vault_pass or C.DEFAULT_ASK_VAULT_PASS
-
- if options.listhosts or options.syntax or options.listtasks or options.listtags:
- (_, _, vault_pass) = utils.ask_passwords(ask_vault_pass=options.ask_vault_pass)
- else:
- options.ask_pass = options.ask_pass or C.DEFAULT_ASK_PASS
- # Never ask for an SSH password when we run with local connection
- if options.connection == "local":
- options.ask_pass = False
-
- # set pe options
- utils.normalize_become_options(options)
- prompt_method = utils.choose_pass_prompt(options)
- (sshpass, becomepass, vault_pass) = utils.ask_passwords(ask_pass=options.ask_pass,
- become_ask_pass=options.become_ask_pass,
- ask_vault_pass=options.ask_vault_pass,
- become_method=prompt_method)
-
- # read vault_pass from a file
- if not options.ask_vault_pass and options.vault_password_file:
- vault_pass = utils.read_vault_file(options.vault_password_file)
-
- extra_vars = utils.parse_extra_vars(options.extra_vars, vault_pass)
-
- only_tags = options.tags.split(",")
- skip_tags = options.skip_tags
- if options.skip_tags is not None:
- skip_tags = options.skip_tags.split(",")
-
- for playbook in args:
- if not os.path.exists(playbook):
- raise errors.AnsibleError("the playbook: %s could not be found" % playbook)
- if not (os.path.isfile(playbook) or stat.S_ISFIFO(os.stat(playbook).st_mode)):
- raise errors.AnsibleError("the playbook: %s does not appear to be a file" % playbook)
-
- inventory = ansible.inventory.Inventory(options.inventory, vault_password=vault_pass)
-
- # Note: slightly wrong, this is written so that implicit localhost
- # (which is not returned in list_hosts()) is taken into account for
- # warning if inventory is empty. But it can't be taken into account for
- # checking if limit doesn't match any hosts. Instead we don't worry about
- # limit if only implicit localhost was in inventory to start with.
- #
- # Fix this in v2
- no_hosts = False
- if len(inventory.list_hosts()) == 0:
- # Empty inventory
- utils.warning("provided hosts list is empty, only localhost is available")
- no_hosts = True
- inventory.subset(options.subset)
- if len(inventory.list_hosts()) == 0 and no_hosts is False:
- # Invalid limit
- raise errors.AnsibleError("Specified --limit does not match any hosts")
-
- # run all playbooks specified on the command line
- for playbook in args:
-
- stats = callbacks.AggregateStats()
- playbook_cb = callbacks.PlaybookCallbacks(verbose=utils.VERBOSITY)
- if options.step:
- playbook_cb.step = options.step
- if options.start_at:
- playbook_cb.start_at = options.start_at
- runner_cb = callbacks.PlaybookRunnerCallbacks(stats, verbose=utils.VERBOSITY)
-
- pb = ansible.playbook.PlayBook(
- playbook=playbook,
- module_path=options.module_path,
- inventory=inventory,
- forks=options.forks,
- remote_user=options.remote_user,
- remote_pass=sshpass,
- callbacks=playbook_cb,
- runner_callbacks=runner_cb,
- stats=stats,
- timeout=options.timeout,
- transport=options.connection,
- become=options.become,
- become_method=options.become_method,
- become_user=options.become_user,
- become_pass=becomepass,
- extra_vars=extra_vars,
- private_key_file=options.private_key_file,
- only_tags=only_tags,
- skip_tags=skip_tags,
- check=options.check,
- diff=options.diff,
- vault_password=vault_pass,
- force_handlers=options.force_handlers,
- )
-
- if options.flush_cache:
- display(callbacks.banner("FLUSHING FACT CACHE"))
- pb.SETUP_CACHE.flush()
-
- if options.listhosts or options.listtasks or options.syntax or options.listtags:
- print ''
- print 'playbook: %s' % playbook
- print ''
- playnum = 0
- for (play_ds, play_basedir) in zip(pb.playbook, pb.play_basedirs):
- playnum += 1
- play = ansible.playbook.Play(pb, play_ds, play_basedir,
- vault_password=pb.vault_password)
- label = play.name
- hosts = pb.inventory.list_hosts(play.hosts)
-
- if options.listhosts:
- print ' play #%d (%s): host count=%d' % (playnum, label, len(hosts))
- for host in hosts:
- print ' %s' % host
-
- if options.listtags or options.listtasks:
- print ' play #%d (%s):\tTAGS: [%s]' % (playnum, label,','.join(sorted(set(play.tags))))
-
- if options.listtags:
- tags = []
- for task in pb.tasks_to_run_in_play(play):
- tags.extend(task.tags)
- print ' TASK TAGS: [%s]' % (', '.join(sorted(set(tags).difference(['untagged']))))
-
- if options.listtasks:
-
- for task in pb.tasks_to_run_in_play(play):
- if getattr(task, 'name', None) is not None:
- # meta tasks have no names
- print ' %s\tTAGS: [%s]' % (task.name, ', '.join(sorted(set(task.tags).difference(['untagged']))))
-
- if options.listhosts or options.listtasks or options.listtags:
- print ''
- continue
-
- if options.syntax:
- # if we've not exited by now then we are fine.
- print 'Playbook Syntax is fine'
- return 0
-
- failed_hosts = []
- unreachable_hosts = []
-
- try:
-
- pb.run()
-
- hosts = sorted(pb.stats.processed.keys())
- display(callbacks.banner("PLAY RECAP"))
- playbook_cb.on_stats(pb.stats)
-
- for h in hosts:
- t = pb.stats.summarize(h)
- if t['failures'] > 0:
- failed_hosts.append(h)
- if t['unreachable'] > 0:
- unreachable_hosts.append(h)
-
- retries = failed_hosts + unreachable_hosts
-
- if C.RETRY_FILES_ENABLED and len(retries) > 0:
- filename = pb.generate_retry_inventory(retries)
- if filename:
- display(" to retry, use: --limit @%s\n" % filename)
-
- for h in hosts:
- t = pb.stats.summarize(h)
-
- display("%s : %s %s %s %s" % (
- hostcolor(h, t),
- colorize('ok', t['ok'], 'green'),
- colorize('changed', t['changed'], 'yellow'),
- colorize('unreachable', t['unreachable'], 'red'),
- colorize('failed', t['failures'], 'red')),
- screen_only=True
- )
-
- display("%s : %s %s %s %s" % (
- hostcolor(h, t, False),
- colorize('ok', t['ok'], None),
- colorize('changed', t['changed'], None),
- colorize('unreachable', t['unreachable'], None),
- colorize('failed', t['failures'], None)),
- log_only=True
- )
-
-
- print ""
- if len(failed_hosts) > 0:
- return 2
- if len(unreachable_hosts) > 0:
- return 3
-
- except errors.AnsibleError, e:
- display("ERROR: %s" % e, color='red')
- return 1
-
- return 0
-
-
-if __name__ == "__main__":
- display(" ", log_only=True)
- display(" ".join(sys.argv), log_only=True)
- display(" ", log_only=True)
- try:
- sys.exit(main(sys.argv[1:]))
- except errors.AnsibleError, e:
- display("ERROR: %s" % e, color='red', stderr=True)
- sys.exit(1)
- except KeyboardInterrupt, ke:
- display("ERROR: interrupted", color='red', stderr=True)
- sys.exit(1)
diff --git a/bin/ansible-playbook b/bin/ansible-playbook
new file mode 120000
index 0000000000..cabb1f519a
--- /dev/null
+++ b/bin/ansible-playbook
@@ -0,0 +1 @@
+ansible
\ No newline at end of file
diff --git a/bin/ansible-pull b/bin/ansible-pull
deleted file mode 100755
index d4887631e0..0000000000
--- a/bin/ansible-pull
+++ /dev/null
@@ -1,257 +0,0 @@
-#!/usr/bin/env python
-
-# (c) 2012, Stephen Fromm
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see .
-#
-# ansible-pull is a script that runs ansible in local mode
-# after checking out a playbooks directory from source repo. There is an
-# example playbook to bootstrap this script in the examples/ dir which
-# installs ansible and sets it up to run on cron.
-
-# usage:
-# ansible-pull -d /var/lib/ansible \
-# -U http://example.net/content.git [-C production] \
-# [path/playbook.yml]
-#
-# the -d and -U arguments are required; the -C argument is optional.
-#
-# ansible-pull accepts an optional argument to specify a playbook
-# location underneath the workdir and then searches the source repo
-# for playbooks in the following order, stopping at the first match:
-#
-# 1. $workdir/path/playbook.yml, if specified
-# 2. $workdir/$fqdn.yml
-# 3. $workdir/$hostname.yml
-# 4. $workdir/local.yml
-#
-# the source repo must contain at least one of these playbooks.
-
-import os
-import shutil
-import sys
-import datetime
-import socket
-import random
-import time
-from ansible import utils
-from ansible.utils import cmd_functions
-from ansible import errors
-from ansible import inventory
-
-DEFAULT_REPO_TYPE = 'git'
-DEFAULT_PLAYBOOK = 'local.yml'
-PLAYBOOK_ERRORS = {1: 'File does not exist',
- 2: 'File is not readable'}
-
-VERBOSITY=0
-
-def increment_debug(option, opt, value, parser):
- global VERBOSITY
- VERBOSITY += 1
-
-def try_playbook(path):
- if not os.path.exists(path):
- return 1
- if not os.access(path, os.R_OK):
- return 2
- return 0
-
-
-def select_playbook(path, args):
- playbook = None
- if len(args) > 0 and args[0] is not None:
- playbook = "%s/%s" % (path, args[0])
- rc = try_playbook(playbook)
- if rc != 0:
- print >>sys.stderr, "%s: %s" % (playbook, PLAYBOOK_ERRORS[rc])
- return None
- return playbook
- else:
- fqdn = socket.getfqdn()
- hostpb = "%s/%s.yml" % (path, fqdn)
- shorthostpb = "%s/%s.yml" % (path, fqdn.split('.')[0])
- localpb = "%s/%s" % (path, DEFAULT_PLAYBOOK)
- errors = []
- for pb in [hostpb, shorthostpb, localpb]:
- rc = try_playbook(pb)
- if rc == 0:
- playbook = pb
- break
- else:
- errors.append("%s: %s" % (pb, PLAYBOOK_ERRORS[rc]))
- if playbook is None:
- print >>sys.stderr, "\n".join(errors)
- return playbook
-
-
-def main(args):
- """ Set up and run a local playbook """
- usage = "%prog [options] [playbook.yml]"
- parser = utils.SortedOptParser(usage=usage)
- parser.add_option('--purge', default=False, action='store_true',
- help='purge checkout after playbook run')
- parser.add_option('-o', '--only-if-changed', dest='ifchanged', default=False, action='store_true',
- help='only run the playbook if the repository has been updated')
- parser.add_option('-s', '--sleep', dest='sleep', default=None,
- help='sleep for random interval (between 0 and n number of seconds) before starting. this is a useful way to disperse git requests')
- parser.add_option('-f', '--force', dest='force', default=False,
- action='store_true',
- help='run the playbook even if the repository could '
- 'not be updated')
- parser.add_option('-d', '--directory', dest='dest', default=None,
- help='directory to checkout repository to')
- #parser.add_option('-l', '--live', default=True, action='store_live',
- # help='Print the ansible-playbook output while running')
- parser.add_option('-U', '--url', dest='url', default=None,
- help='URL of the playbook repository')
- parser.add_option('-C', '--checkout', dest='checkout',
- help='branch/tag/commit to checkout. '
- 'Defaults to behavior of repository module.')
- parser.add_option('-i', '--inventory-file', dest='inventory',
- help="location of the inventory host file")
- parser.add_option('-e', '--extra-vars', dest="extra_vars", action="append",
- help="set additional variables as key=value or YAML/JSON", default=[])
- parser.add_option('-v', '--verbose', default=False, action="callback",
- callback=increment_debug,
- help='Pass -vvvv to ansible-playbook')
- parser.add_option('-m', '--module-name', dest='module_name',
- default=DEFAULT_REPO_TYPE,
- help='Module name used to check out repository. '
- 'Default is %s.' % DEFAULT_REPO_TYPE)
- parser.add_option('--vault-password-file', dest='vault_password_file',
- help="vault password file")
- parser.add_option('-K', '--ask-sudo-pass', default=False, dest='ask_sudo_pass', action='store_true',
- help='ask for sudo password')
- parser.add_option('-t', '--tags', dest='tags', default=False,
- help='only run plays and tasks tagged with these values')
- parser.add_option('--accept-host-key', default=False, dest='accept_host_key', action='store_true',
- help='adds the hostkey for the repo url if not already added')
- parser.add_option('--key-file', dest='key_file',
- help="Pass '-i ' to the SSH arguments used by git.")
- options, args = parser.parse_args(args)
-
- hostname = socket.getfqdn()
- if not options.dest:
- # use a hostname dependent directory, in case of $HOME on nfs
- options.dest = utils.prepare_writeable_dir('~/.ansible/pull/%s' % hostname)
-
- options.dest = os.path.abspath(options.dest)
-
- if not options.url:
- parser.error("URL for repository not specified, use -h for help")
- return 1
-
- now = datetime.datetime.now()
- print now.strftime("Starting ansible-pull at %F %T")
-
- # Attempt to use the inventory passed in as an argument
- # It might not yet have been downloaded so use localhost if note
- if not options.inventory or not os.path.exists(options.inventory):
- inv_opts = 'localhost,'
- else:
- inv_opts = options.inventory
- limit_opts = 'localhost:%s:127.0.0.1' % hostname
- repo_opts = "name=%s dest=%s" % (options.url, options.dest)
-
- if VERBOSITY == 0:
- base_opts = '-c local --limit "%s"' % limit_opts
- elif VERBOSITY > 0:
- debug_level = ''.join([ "v" for x in range(0, VERBOSITY) ])
- base_opts = '-%s -c local --limit "%s"' % (debug_level, limit_opts)
-
- if options.checkout:
- repo_opts += ' version=%s' % options.checkout
-
- # Only git module is supported
- if options.module_name == DEFAULT_REPO_TYPE:
- if options.accept_host_key:
- repo_opts += ' accept_hostkey=yes'
-
- if options.key_file:
- repo_opts += ' key_file=%s' % options.key_file
-
- path = utils.plugins.module_finder.find_plugin(options.module_name)
- if path is None:
- sys.stderr.write("module '%s' not found.\n" % options.module_name)
- return 1
-
- bin_path = os.path.dirname(os.path.abspath(__file__))
- cmd = '%s/ansible localhost -i "%s" %s -m %s -a "%s"' % (
- bin_path, inv_opts, base_opts, options.module_name, repo_opts
- )
-
- for ev in options.extra_vars:
- cmd += ' -e "%s"' % ev
-
- if options.sleep:
- try:
- secs = random.randint(0,int(options.sleep));
- except ValueError:
- parser.error("%s is not a number." % options.sleep)
- return 1
-
- print >>sys.stderr, "Sleeping for %d seconds..." % secs
- time.sleep(secs);
-
-
- # RUN THe CHECKOUT COMMAND
- rc, out, err = cmd_functions.run_cmd(cmd, live=True)
-
- if rc != 0:
- if options.force:
- print >>sys.stderr, "Unable to update repository. Continuing with (forced) run of playbook."
- else:
- return rc
- elif options.ifchanged and '"changed": true' not in out:
- print "Repository has not changed, quitting."
- return 0
-
- playbook = select_playbook(options.dest, args)
-
- if playbook is None:
- print >>sys.stderr, "Could not find a playbook to run."
- return 1
-
- cmd = '%s/ansible-playbook %s %s' % (bin_path, base_opts, playbook)
- if options.vault_password_file:
- cmd += " --vault-password-file=%s" % options.vault_password_file
- if options.inventory:
- cmd += ' -i "%s"' % options.inventory
- for ev in options.extra_vars:
- cmd += ' -e "%s"' % ev
- if options.ask_sudo_pass:
- cmd += ' -K'
- if options.tags:
- cmd += ' -t "%s"' % options.tags
- os.chdir(options.dest)
-
- # RUN THE PLAYBOOK COMMAND
- rc, out, err = cmd_functions.run_cmd(cmd, live=True)
-
- if options.purge:
- os.chdir('/')
- try:
- shutil.rmtree(options.dest)
- except Exception, e:
- print >>sys.stderr, "Failed to remove %s: %s" % (options.dest, str(e))
-
- return rc
-
-if __name__ == '__main__':
- try:
- sys.exit(main(sys.argv[1:]))
- except KeyboardInterrupt, e:
- print >>sys.stderr, "Exit on user request.\n"
- sys.exit(1)
diff --git a/bin/ansible-pull b/bin/ansible-pull
new file mode 120000
index 0000000000..cabb1f519a
--- /dev/null
+++ b/bin/ansible-pull
@@ -0,0 +1 @@
+ansible
\ No newline at end of file
diff --git a/bin/ansible-vault b/bin/ansible-vault
deleted file mode 100755
index 22cfc0e148..0000000000
--- a/bin/ansible-vault
+++ /dev/null
@@ -1,241 +0,0 @@
-#!/usr/bin/env python
-
-# (c) 2014, James Tanner
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see .
-#
-# ansible-vault is a script that encrypts/decrypts YAML files. See
-# http://docs.ansible.com/playbooks_vault.html for more details.
-
-__requires__ = ['ansible']
-try:
- import pkg_resources
-except Exception:
- # Use pkg_resources to find the correct versions of libraries and set
- # sys.path appropriately when there are multiversion installs. But we
- # have code that better expresses the errors in the places where the code
- # is actually used (the deps are optional for many code paths) so we don't
- # want to fail here.
- pass
-
-import os
-import sys
-import traceback
-
-import ansible.constants as C
-
-from ansible import utils
-from ansible import errors
-from ansible.utils.vault import VaultEditor
-
-from optparse import OptionParser
-
-#-------------------------------------------------------------------------------------
-# Utility functions for parsing actions/options
-#-------------------------------------------------------------------------------------
-
-VALID_ACTIONS = ("create", "decrypt", "edit", "encrypt", "rekey", "view")
-
-def build_option_parser(action):
- """
- Builds an option parser object based on the action
- the user wants to execute.
- """
-
- usage = "usage: %%prog [%s] [--help] [options] file_name" % "|".join(VALID_ACTIONS)
- epilog = "\nSee '%s --help' for more information on a specific command.\n\n" % os.path.basename(sys.argv[0])
- OptionParser.format_epilog = lambda self, formatter: self.epilog
- parser = OptionParser(usage=usage, epilog=epilog)
-
- if not action:
- parser.print_help()
- sys.exit()
-
- # options for all actions
- #parser.add_option('-c', '--cipher', dest='cipher', default="AES256", help="cipher to use")
- parser.add_option('--debug', dest='debug', action="store_true", help="debug")
- parser.add_option('--vault-password-file', dest='password_file',
- help="vault password file", default=C.DEFAULT_VAULT_PASSWORD_FILE)
-
- # options specific to actions
- if action == "create":
- parser.set_usage("usage: %prog create [options] file_name")
- elif action == "decrypt":
- parser.set_usage("usage: %prog decrypt [options] file_name")
- elif action == "edit":
- parser.set_usage("usage: %prog edit [options] file_name")
- elif action == "view":
- parser.set_usage("usage: %prog view [options] file_name")
- elif action == "encrypt":
- parser.set_usage("usage: %prog encrypt [options] file_name")
- elif action == "rekey":
- parser.set_usage("usage: %prog rekey [options] file_name")
-
- # done, return the parser
- return parser
-
-def get_action(args):
- """
- Get the action the user wants to execute from the
- sys argv list.
- """
- for i in range(0,len(args)):
- arg = args[i]
- if arg in VALID_ACTIONS:
- del args[i]
- return arg
- return None
-
-def get_opt(options, k, defval=""):
- """
- Returns an option from an Optparse values instance.
- """
- try:
- data = getattr(options, k)
- except:
- return defval
- if k == "roles_path":
- if os.pathsep in data:
- data = data.split(os.pathsep)[0]
- return data
-
-#-------------------------------------------------------------------------------------
-# Command functions
-#-------------------------------------------------------------------------------------
-
-def execute_create(args, options, parser):
- if len(args) > 1:
- raise errors.AnsibleError("'create' does not accept more than one filename")
-
- if not options.password_file:
- password, new_password = utils.ask_vault_passwords(ask_vault_pass=True, confirm_vault=True)
- else:
- password = utils.read_vault_file(options.password_file)
-
- cipher = 'AES256'
- if hasattr(options, 'cipher'):
- cipher = options.cipher
-
- this_editor = VaultEditor(cipher, password, args[0])
- this_editor.create_file()
-
-def execute_decrypt(args, options, parser):
-
- if not options.password_file:
- password, new_password = utils.ask_vault_passwords(ask_vault_pass=True)
- else:
- password = utils.read_vault_file(options.password_file)
-
- cipher = 'AES256'
- if hasattr(options, 'cipher'):
- cipher = options.cipher
-
- for f in args:
- this_editor = VaultEditor(cipher, password, f)
- this_editor.decrypt_file()
-
- print "Decryption successful"
-
-def execute_edit(args, options, parser):
-
- if len(args) > 1:
- raise errors.AnsibleError("edit does not accept more than one filename")
-
- if not options.password_file:
- password, new_password = utils.ask_vault_passwords(ask_vault_pass=True)
- else:
- password = utils.read_vault_file(options.password_file)
-
- cipher = None
-
- for f in args:
- this_editor = VaultEditor(cipher, password, f)
- this_editor.edit_file()
-
-def execute_view(args, options, parser):
-
- if len(args) > 1:
- raise errors.AnsibleError("view does not accept more than one filename")
-
- if not options.password_file:
- password, new_password = utils.ask_vault_passwords(ask_vault_pass=True)
- else:
- password = utils.read_vault_file(options.password_file)
-
- cipher = None
-
- for f in args:
- this_editor = VaultEditor(cipher, password, f)
- this_editor.view_file()
-
-def execute_encrypt(args, options, parser):
-
- if not options.password_file:
- password, new_password = utils.ask_vault_passwords(ask_vault_pass=True, confirm_vault=True)
- else:
- password = utils.read_vault_file(options.password_file)
-
- cipher = 'AES256'
- if hasattr(options, 'cipher'):
- cipher = options.cipher
-
- for f in args:
- this_editor = VaultEditor(cipher, password, f)
- this_editor.encrypt_file()
-
- print "Encryption successful"
-
-def execute_rekey(args, options, parser):
-
- if not options.password_file:
- password, __ = utils.ask_vault_passwords(ask_vault_pass=True)
- else:
- password = utils.read_vault_file(options.password_file)
-
- __, new_password = utils.ask_vault_passwords(ask_vault_pass=False, ask_new_vault_pass=True, confirm_new=True)
-
- cipher = None
- for f in args:
- this_editor = VaultEditor(cipher, password, f)
- this_editor.rekey_file(new_password)
-
- print "Rekey successful"
-
-#-------------------------------------------------------------------------------------
-# MAIN
-#-------------------------------------------------------------------------------------
-
-def main():
-
- action = get_action(sys.argv)
- parser = build_option_parser(action)
- (options, args) = parser.parse_args()
-
- if not len(args):
- raise errors.AnsibleError(
- "The '%s' command requires a filename as the first argument" % action
- )
-
- # execute the desired action
- try:
- fn = globals()["execute_%s" % action]
- fn(args, options, parser)
- except Exception, err:
- if options.debug:
- print traceback.format_exc()
- print "ERROR:",err
- sys.exit(1)
-
-if __name__ == "__main__":
- main()
diff --git a/bin/ansible-vault b/bin/ansible-vault
new file mode 120000
index 0000000000..cabb1f519a
--- /dev/null
+++ b/bin/ansible-vault
@@ -0,0 +1 @@
+ansible
\ No newline at end of file
diff --git a/lib/ansible/__init__.py b/lib/ansible/__init__.py
index ba5ca83b72..8637adb54d 100644
--- a/lib/ansible/__init__.py
+++ b/lib/ansible/__init__.py
@@ -14,5 +14,9 @@
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see .
-__version__ = '2.0.0'
-__author__ = 'Michael DeHaan'
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+__version__ = '2.0'
diff --git a/v2/ansible/cli/__init__.py b/lib/ansible/cli/__init__.py
similarity index 100%
rename from v2/ansible/cli/__init__.py
rename to lib/ansible/cli/__init__.py
diff --git a/v2/ansible/cli/adhoc.py b/lib/ansible/cli/adhoc.py
similarity index 100%
rename from v2/ansible/cli/adhoc.py
rename to lib/ansible/cli/adhoc.py
diff --git a/v2/ansible/cli/doc.py b/lib/ansible/cli/doc.py
similarity index 100%
rename from v2/ansible/cli/doc.py
rename to lib/ansible/cli/doc.py
diff --git a/v2/ansible/cli/galaxy.py b/lib/ansible/cli/galaxy.py
similarity index 100%
rename from v2/ansible/cli/galaxy.py
rename to lib/ansible/cli/galaxy.py
diff --git a/v2/ansible/cli/playbook.py b/lib/ansible/cli/playbook.py
similarity index 100%
rename from v2/ansible/cli/playbook.py
rename to lib/ansible/cli/playbook.py
diff --git a/v2/ansible/cli/pull.py b/lib/ansible/cli/pull.py
similarity index 100%
rename from v2/ansible/cli/pull.py
rename to lib/ansible/cli/pull.py
diff --git a/v2/ansible/cli/vault.py b/lib/ansible/cli/vault.py
similarity index 100%
rename from v2/ansible/cli/vault.py
rename to lib/ansible/cli/vault.py
diff --git a/v2/ansible/compat/__init__.py b/lib/ansible/compat/__init__.py
similarity index 100%
rename from v2/ansible/compat/__init__.py
rename to lib/ansible/compat/__init__.py
diff --git a/v2/ansible/compat/tests/__init__.py b/lib/ansible/compat/tests/__init__.py
similarity index 100%
rename from v2/ansible/compat/tests/__init__.py
rename to lib/ansible/compat/tests/__init__.py
diff --git a/v2/ansible/compat/tests/mock.py b/lib/ansible/compat/tests/mock.py
similarity index 100%
rename from v2/ansible/compat/tests/mock.py
rename to lib/ansible/compat/tests/mock.py
diff --git a/v2/ansible/compat/tests/unittest.py b/lib/ansible/compat/tests/unittest.py
similarity index 100%
rename from v2/ansible/compat/tests/unittest.py
rename to lib/ansible/compat/tests/unittest.py
diff --git a/v2/ansible/config/__init__.py b/lib/ansible/config/__init__.py
similarity index 100%
rename from v2/ansible/config/__init__.py
rename to lib/ansible/config/__init__.py
diff --git a/lib/ansible/constants.py b/lib/ansible/constants.py
index 089de5b7c5..456beb8bbc 100644
--- a/lib/ansible/constants.py
+++ b/lib/ansible/constants.py
@@ -15,10 +15,15 @@
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see .
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
import os
import pwd
import sys
-import ConfigParser
+
+from six.moves import configparser
from string import ascii_letters, digits
# copied from utils, avoid circular reference fun :)
@@ -35,13 +40,15 @@ def get_config(p, section, key, env_var, default, boolean=False, integer=False,
''' return a configuration variable with casting '''
value = _get_config(p, section, key, env_var, default)
if boolean:
- return mk_boolean(value)
- if value and integer:
- return int(value)
- if value and floating:
- return float(value)
- if value and islist:
- return [x.strip() for x in value.split(',')]
+ value = mk_boolean(value)
+ if value:
+ if integer:
+ value = int(value)
+ elif floating:
+ value = float(value)
+ elif islist:
+ if isinstance(value, basestring):
+ value = [x.strip() for x in value.split(',')]
return value
def _get_config(p, section, key, env_var, default):
@@ -60,7 +67,7 @@ def _get_config(p, section, key, env_var, default):
def load_config_file():
''' Load Config File order(first found is used): ENV, CWD, HOME, /etc/ansible '''
- p = ConfigParser.ConfigParser()
+ p = configparser.ConfigParser()
path0 = os.getenv("ANSIBLE_CONFIG", None)
if path0 is not None:
@@ -73,8 +80,8 @@ def load_config_file():
if path is not None and os.path.exists(path):
try:
p.read(path)
- except ConfigParser.Error as e:
- print "Error reading config file: \n%s" % e
+ except configparser.Error as e:
+ print("Error reading config file: \n{0}".format(e))
sys.exit(1)
return p
return None
@@ -98,7 +105,8 @@ YAML_FILENAME_EXTENSIONS = [ "", ".yml", ".yaml", ".json" ]
DEFAULTS='defaults'
# configurable things
-DEFAULT_HOST_LIST = shell_expand_path(get_config(p, DEFAULTS, 'inventory', 'ANSIBLE_INVENTORY', get_config(p, DEFAULTS,'hostfile','ANSIBLE_HOSTS', '/etc/ansible/hosts')))
+DEFAULT_DEBUG = get_config(p, DEFAULTS, 'debug', 'ANSIBLE_DEBUG', False, boolean=True)
+DEFAULT_HOST_LIST = shell_expand_path(get_config(p, DEFAULTS, 'hostfile', 'ANSIBLE_HOSTS', get_config(p, DEFAULTS,'inventory','ANSIBLE_INVENTORY', '/etc/ansible/hosts')))
DEFAULT_MODULE_PATH = get_config(p, DEFAULTS, 'library', 'ANSIBLE_LIBRARY', None)
DEFAULT_ROLES_PATH = shell_expand_path(get_config(p, DEFAULTS, 'roles_path', 'ANSIBLE_ROLES_PATH', '/etc/ansible/roles'))
DEFAULT_REMOTE_TMP = get_config(p, DEFAULTS, 'remote_tmp', 'ANSIBLE_REMOTE_TEMP', '$HOME/.ansible/tmp')
@@ -112,6 +120,7 @@ DEFAULT_POLL_INTERVAL = get_config(p, DEFAULTS, 'poll_interval', 'ANSIBLE
DEFAULT_REMOTE_USER = get_config(p, DEFAULTS, 'remote_user', 'ANSIBLE_REMOTE_USER', active_user)
DEFAULT_ASK_PASS = get_config(p, DEFAULTS, 'ask_pass', 'ANSIBLE_ASK_PASS', False, boolean=True)
DEFAULT_PRIVATE_KEY_FILE = shell_expand_path(get_config(p, DEFAULTS, 'private_key_file', 'ANSIBLE_PRIVATE_KEY_FILE', None))
+DEFAULT_SUDO_USER = get_config(p, DEFAULTS, 'sudo_user', 'ANSIBLE_SUDO_USER', 'root')
DEFAULT_ASK_SUDO_PASS = get_config(p, DEFAULTS, 'ask_sudo_pass', 'ANSIBLE_ASK_SUDO_PASS', False, boolean=True)
DEFAULT_REMOTE_PORT = get_config(p, DEFAULTS, 'remote_port', 'ANSIBLE_REMOTE_PORT', None, integer=True)
DEFAULT_ASK_VAULT_PASS = get_config(p, DEFAULTS, 'ask_vault_pass', 'ANSIBLE_ASK_VAULT_PASS', False, boolean=True)
@@ -122,7 +131,6 @@ DEFAULT_MANAGED_STR = get_config(p, DEFAULTS, 'ansible_managed', None,
DEFAULT_SYSLOG_FACILITY = get_config(p, DEFAULTS, 'syslog_facility', 'ANSIBLE_SYSLOG_FACILITY', 'LOG_USER')
DEFAULT_KEEP_REMOTE_FILES = get_config(p, DEFAULTS, 'keep_remote_files', 'ANSIBLE_KEEP_REMOTE_FILES', False, boolean=True)
DEFAULT_SUDO = get_config(p, DEFAULTS, 'sudo', 'ANSIBLE_SUDO', False, boolean=True)
-DEFAULT_SUDO_USER = get_config(p, DEFAULTS, 'sudo_user', 'ANSIBLE_SUDO_USER', 'root')
DEFAULT_SUDO_EXE = get_config(p, DEFAULTS, 'sudo_exe', 'ANSIBLE_SUDO_EXE', 'sudo')
DEFAULT_SUDO_FLAGS = get_config(p, DEFAULTS, 'sudo_flags', 'ANSIBLE_SUDO_FLAGS', '-H')
DEFAULT_HASH_BEHAVIOUR = get_config(p, DEFAULTS, 'hash_behaviour', 'ANSIBLE_HASH_BEHAVIOUR', 'replace')
@@ -141,7 +149,7 @@ BECOME_METHODS = ['sudo','su','pbrun','pfexec','runas']
BECOME_ERROR_STRINGS = {'sudo': 'Sorry, try again.', 'su': 'Authentication failure', 'pbrun': '', 'pfexec': '', 'runas': ''}
DEFAULT_BECOME = get_config(p, 'privilege_escalation', 'become', 'ANSIBLE_BECOME',False, boolean=True)
DEFAULT_BECOME_METHOD = get_config(p, 'privilege_escalation', 'become_method', 'ANSIBLE_BECOME_METHOD','sudo' if DEFAULT_SUDO else 'su' if DEFAULT_SU else 'sudo' ).lower()
-DEFAULT_BECOME_USER = get_config(p, 'privilege_escalation', 'become_user', 'ANSIBLE_BECOME_USER',default=None)
+DEFAULT_BECOME_USER = get_config(p, 'privilege_escalation', 'become_user', 'ANSIBLE_BECOME_USER', 'root')
DEFAULT_BECOME_ASK_PASS = get_config(p, 'privilege_escalation', 'become_ask_pass', 'ANSIBLE_BECOME_ASK_PASS', False, boolean=True)
# need to rethink impementing these 2
DEFAULT_BECOME_EXE = None
@@ -156,6 +164,7 @@ DEFAULT_CONNECTION_PLUGIN_PATH = get_config(p, DEFAULTS, 'connection_plugins', '
DEFAULT_LOOKUP_PLUGIN_PATH = get_config(p, DEFAULTS, 'lookup_plugins', 'ANSIBLE_LOOKUP_PLUGINS', '~/.ansible/plugins/lookup_plugins:/usr/share/ansible_plugins/lookup_plugins')
DEFAULT_VARS_PLUGIN_PATH = get_config(p, DEFAULTS, 'vars_plugins', 'ANSIBLE_VARS_PLUGINS', '~/.ansible/plugins/vars_plugins:/usr/share/ansible_plugins/vars_plugins')
DEFAULT_FILTER_PLUGIN_PATH = get_config(p, DEFAULTS, 'filter_plugins', 'ANSIBLE_FILTER_PLUGINS', '~/.ansible/plugins/filter_plugins:/usr/share/ansible_plugins/filter_plugins')
+DEFAULT_STDOUT_CALLBACK = get_config(p, DEFAULTS, 'stdout_callback', 'ANSIBLE_STDOUT_CALLBACK', 'default')
CACHE_PLUGIN = get_config(p, DEFAULTS, 'fact_caching', 'ANSIBLE_CACHE_PLUGIN', 'memory')
CACHE_PLUGIN_CONNECTION = get_config(p, DEFAULTS, 'fact_caching_connection', 'ANSIBLE_CACHE_PLUGIN_CONNECTION', None)
@@ -173,8 +182,8 @@ DEPRECATION_WARNINGS = get_config(p, DEFAULTS, 'deprecation_warnings',
DEFAULT_CALLABLE_WHITELIST = get_config(p, DEFAULTS, 'callable_whitelist', 'ANSIBLE_CALLABLE_WHITELIST', [], islist=True)
COMMAND_WARNINGS = get_config(p, DEFAULTS, 'command_warnings', 'ANSIBLE_COMMAND_WARNINGS', False, boolean=True)
DEFAULT_LOAD_CALLBACK_PLUGINS = get_config(p, DEFAULTS, 'bin_ansible_callbacks', 'ANSIBLE_LOAD_CALLBACK_PLUGINS', False, boolean=True)
-DEFAULT_FORCE_HANDLERS = get_config(p, DEFAULTS, 'force_handlers', 'ANSIBLE_FORCE_HANDLERS', False, boolean=True)
-
+RETRY_FILES_ENABLED = get_config(p, DEFAULTS, 'retry_files_enabled', 'ANSIBLE_RETRY_FILES_ENABLED', True, boolean=True)
+RETRY_FILES_SAVE_PATH = get_config(p, DEFAULTS, 'retry_files_save_path', 'ANSIBLE_RETRY_FILES_SAVE_PATH', '~/')
RETRY_FILES_ENABLED = get_config(p, DEFAULTS, 'retry_files_enabled', 'ANSIBLE_RETRY_FILES_ENABLED', True, boolean=True)
RETRY_FILES_SAVE_PATH = get_config(p, DEFAULTS, 'retry_files_save_path', 'ANSIBLE_RETRY_FILES_SAVE_PATH', '~/')
@@ -196,10 +205,16 @@ ACCELERATE_KEYS_FILE_PERMS = get_config(p, 'accelerate', 'accelerate_keys_fi
ACCELERATE_MULTI_KEY = get_config(p, 'accelerate', 'accelerate_multi_key', 'ACCELERATE_MULTI_KEY', False, boolean=True)
PARAMIKO_PTY = get_config(p, 'paramiko_connection', 'pty', 'ANSIBLE_PARAMIKO_PTY', True, boolean=True)
+# galaxy related
+DEFAULT_GALAXY_URI = get_config(p, 'galaxy', 'server_uri', 'ANSIBLE_GALAXY_SERVER_URI', 'https://galaxy.ansible.com')
+# this can be configured to blacklist SCMS but cannot add new ones unless the code is also updated
+GALAXY_SCMS = get_config(p, 'galaxy', 'scms', 'ANSIBLE_GALAXY_SCMS', ['git','hg'], islist=True)
+
# characters included in auto-generated passwords
DEFAULT_PASSWORD_CHARS = ascii_letters + digits + ".,:-_"
# non-configurable things
+MODULE_REQUIRE_ARGS = ['command', 'shell', 'raw', 'script']
DEFAULT_BECOME_PASS = None
DEFAULT_SUDO_PASS = None
DEFAULT_REMOTE_PASS = None
diff --git a/v2/ansible/errors/__init__.py b/lib/ansible/errors/__init__.py
similarity index 100%
rename from v2/ansible/errors/__init__.py
rename to lib/ansible/errors/__init__.py
diff --git a/v2/ansible/errors/yaml_strings.py b/lib/ansible/errors/yaml_strings.py
similarity index 100%
rename from v2/ansible/errors/yaml_strings.py
rename to lib/ansible/errors/yaml_strings.py
diff --git a/v2/ansible/executor/__init__.py b/lib/ansible/executor/__init__.py
similarity index 100%
rename from v2/ansible/executor/__init__.py
rename to lib/ansible/executor/__init__.py
diff --git a/v2/ansible/executor/connection_info.py b/lib/ansible/executor/connection_info.py
similarity index 100%
rename from v2/ansible/executor/connection_info.py
rename to lib/ansible/executor/connection_info.py
diff --git a/v2/ansible/executor/module_common.py b/lib/ansible/executor/module_common.py
similarity index 100%
rename from v2/ansible/executor/module_common.py
rename to lib/ansible/executor/module_common.py
diff --git a/v2/ansible/executor/play_iterator.py b/lib/ansible/executor/play_iterator.py
similarity index 100%
rename from v2/ansible/executor/play_iterator.py
rename to lib/ansible/executor/play_iterator.py
diff --git a/v2/ansible/executor/playbook_executor.py b/lib/ansible/executor/playbook_executor.py
similarity index 100%
rename from v2/ansible/executor/playbook_executor.py
rename to lib/ansible/executor/playbook_executor.py
diff --git a/v2/ansible/executor/process/__init__.py b/lib/ansible/executor/process/__init__.py
similarity index 100%
rename from v2/ansible/executor/process/__init__.py
rename to lib/ansible/executor/process/__init__.py
diff --git a/v2/ansible/executor/process/result.py b/lib/ansible/executor/process/result.py
similarity index 100%
rename from v2/ansible/executor/process/result.py
rename to lib/ansible/executor/process/result.py
diff --git a/v2/ansible/executor/process/worker.py b/lib/ansible/executor/process/worker.py
similarity index 100%
rename from v2/ansible/executor/process/worker.py
rename to lib/ansible/executor/process/worker.py
diff --git a/v2/ansible/executor/stats.py b/lib/ansible/executor/stats.py
similarity index 100%
rename from v2/ansible/executor/stats.py
rename to lib/ansible/executor/stats.py
diff --git a/v2/ansible/executor/task_executor.py b/lib/ansible/executor/task_executor.py
similarity index 100%
rename from v2/ansible/executor/task_executor.py
rename to lib/ansible/executor/task_executor.py
diff --git a/v2/ansible/executor/task_queue_manager.py b/lib/ansible/executor/task_queue_manager.py
similarity index 100%
rename from v2/ansible/executor/task_queue_manager.py
rename to lib/ansible/executor/task_queue_manager.py
diff --git a/v2/ansible/executor/task_queue_manager.py: b/lib/ansible/executor/task_queue_manager.py:
similarity index 100%
rename from v2/ansible/executor/task_queue_manager.py:
rename to lib/ansible/executor/task_queue_manager.py:
diff --git a/v2/ansible/executor/task_result.py b/lib/ansible/executor/task_result.py
similarity index 100%
rename from v2/ansible/executor/task_result.py
rename to lib/ansible/executor/task_result.py
diff --git a/v2/ansible/galaxy/__init__.py b/lib/ansible/galaxy/__init__.py
similarity index 100%
rename from v2/ansible/galaxy/__init__.py
rename to lib/ansible/galaxy/__init__.py
diff --git a/v2/ansible/galaxy/api.py b/lib/ansible/galaxy/api.py
similarity index 100%
rename from v2/ansible/galaxy/api.py
rename to lib/ansible/galaxy/api.py
diff --git a/v2/ansible/galaxy/data/metadata_template.j2 b/lib/ansible/galaxy/data/metadata_template.j2
similarity index 100%
rename from v2/ansible/galaxy/data/metadata_template.j2
rename to lib/ansible/galaxy/data/metadata_template.j2
diff --git a/v2/ansible/galaxy/data/readme b/lib/ansible/galaxy/data/readme
similarity index 100%
rename from v2/ansible/galaxy/data/readme
rename to lib/ansible/galaxy/data/readme
diff --git a/v2/ansible/galaxy/role.py b/lib/ansible/galaxy/role.py
similarity index 100%
rename from v2/ansible/galaxy/role.py
rename to lib/ansible/galaxy/role.py
diff --git a/lib/ansible/inventory/__init__.py b/lib/ansible/inventory/__init__.py
index 2048046d3c..063398f17f 100644
--- a/lib/ansible/inventory/__init__.py
+++ b/lib/ansible/inventory/__init__.py
@@ -16,36 +16,44 @@
# along with Ansible. If not, see .
#############################################
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
import fnmatch
import os
import sys
import re
+import stat
import subprocess
-import ansible.constants as C
+from ansible import constants as C
+from ansible.errors import *
+
from ansible.inventory.ini import InventoryParser
from ansible.inventory.script import InventoryScript
from ansible.inventory.dir import InventoryDirectory
from ansible.inventory.group import Group
from ansible.inventory.host import Host
-from ansible import errors
-from ansible import utils
+from ansible.plugins import vars_loader
+from ansible.utils.path import is_executable
+from ansible.utils.vars import combine_vars
class Inventory(object):
"""
Host inventory for ansible.
"""
- __slots__ = [ 'host_list', 'groups', '_restriction', '_also_restriction', '_subset',
- 'parser', '_vars_per_host', '_vars_per_group', '_hosts_cache', '_groups_list',
- '_pattern_cache', '_vault_password', '_vars_plugins', '_playbook_basedir']
+ #__slots__ = [ 'host_list', 'groups', '_restriction', '_also_restriction', '_subset',
+ # 'parser', '_vars_per_host', '_vars_per_group', '_hosts_cache', '_groups_list',
+ # '_pattern_cache', '_vault_password', '_vars_plugins', '_playbook_basedir']
- def __init__(self, host_list=C.DEFAULT_HOST_LIST, vault_password=None):
+ def __init__(self, loader, variable_manager, host_list=C.DEFAULT_HOST_LIST):
# the host file file, or script path, or list of hosts
# if a list, inventory data will NOT be loaded
self.host_list = host_list
- self._vault_password=vault_password
+ self._loader = loader
+ self._variable_manager = variable_manager
# caching to avoid repeated calculations, particularly with
# external inventory scripts.
@@ -97,7 +105,7 @@ class Inventory(object):
if os.path.isdir(host_list):
# Ensure basedir is inside the directory
self.host_list = os.path.join(self.host_list, "")
- self.parser = InventoryDirectory(filename=host_list)
+ self.parser = InventoryDirectory(loader=self._loader, filename=host_list)
self.groups = self.parser.groups.values()
else:
# check to see if the specified file starts with a
@@ -113,9 +121,9 @@ class Inventory(object):
except:
pass
- if utils.is_executable(host_list):
+ if is_executable(host_list):
try:
- self.parser = InventoryScript(filename=host_list)
+ self.parser = InventoryScript(loader=self._loader, filename=host_list)
self.groups = self.parser.groups.values()
except:
if not shebang_present:
@@ -134,19 +142,23 @@ class Inventory(object):
else:
raise
- utils.plugins.vars_loader.add_directory(self.basedir(), with_subdir=True)
+ vars_loader.add_directory(self.basedir(), with_subdir=True)
else:
raise errors.AnsibleError("Unable to find an inventory file, specify one with -i ?")
- self._vars_plugins = [ x for x in utils.plugins.vars_loader.all(self) ]
+ self._vars_plugins = [ x for x in vars_loader.all(self) ]
+ # FIXME: shouldn't be required, since the group/host vars file
+ # management will be done in VariableManager
# get group vars from group_vars/ files and vars plugins
for group in self.groups:
- group.vars = utils.combine_vars(group.vars, self.get_group_variables(group.name, vault_password=self._vault_password))
+ # FIXME: combine_vars
+ group.vars = combine_vars(group.vars, self.get_group_variables(group.name))
# get host vars from host_vars/ files and vars plugins
for host in self.get_hosts():
- host.vars = utils.combine_vars(host.vars, self.get_host_variables(host.name, vault_password=self._vault_password))
+ # FIXME: combine_vars
+ host.vars = combine_vars(host.vars, self.get_host_variables(host.name))
def _match(self, str, pattern_str):
@@ -192,9 +204,9 @@ class Inventory(object):
# exclude hosts mentioned in any restriction (ex: failed hosts)
if self._restriction is not None:
- hosts = [ h for h in hosts if h.name in self._restriction ]
+ hosts = [ h for h in hosts if h in self._restriction ]
if self._also_restriction is not None:
- hosts = [ h for h in hosts if h.name in self._also_restriction ]
+ hosts = [ h for h in hosts if h in self._also_restriction ]
return hosts
@@ -320,6 +332,8 @@ class Inventory(object):
new_host = Host(pattern)
new_host.set_variable("ansible_python_interpreter", sys.executable)
new_host.set_variable("ansible_connection", "local")
+ new_host.ipv4_address = '127.0.0.1'
+
ungrouped = self.get_group("ungrouped")
if ungrouped is None:
self.add_group(Group('ungrouped'))
@@ -420,7 +434,7 @@ class Inventory(object):
group = self.get_group(groupname)
if group is None:
- raise errors.AnsibleError("group not found: %s" % groupname)
+ raise Exception("group not found: %s" % groupname)
vars = {}
@@ -428,19 +442,21 @@ class Inventory(object):
vars_results = [ plugin.get_group_vars(group, vault_password=vault_password) for plugin in self._vars_plugins if hasattr(plugin, 'get_group_vars')]
for updated in vars_results:
if updated is not None:
- vars = utils.combine_vars(vars, updated)
+ # FIXME: combine_vars
+ vars = combine_vars(vars, updated)
# Read group_vars/ files
- vars = utils.combine_vars(vars, self.get_group_vars(group))
+ # FIXME: combine_vars
+ vars = combine_vars(vars, self.get_group_vars(group))
return vars
- def get_variables(self, hostname, update_cached=False, vault_password=None):
+ def get_vars(self, hostname, update_cached=False, vault_password=None):
host = self.get_host(hostname)
if not host:
- raise errors.AnsibleError("host not found: %s" % hostname)
- return host.get_variables()
+ raise Exception("host not found: %s" % hostname)
+ return host.get_vars()
def get_host_variables(self, hostname, update_cached=False, vault_password=None):
@@ -460,22 +476,26 @@ class Inventory(object):
vars_results = [ plugin.run(host, vault_password=vault_password) for plugin in self._vars_plugins if hasattr(plugin, 'run')]
for updated in vars_results:
if updated is not None:
- vars = utils.combine_vars(vars, updated)
+ # FIXME: combine_vars
+ vars = combine_vars(vars, updated)
# plugin.get_host_vars retrieves just vars for specific host
vars_results = [ plugin.get_host_vars(host, vault_password=vault_password) for plugin in self._vars_plugins if hasattr(plugin, 'get_host_vars')]
for updated in vars_results:
if updated is not None:
- vars = utils.combine_vars(vars, updated)
+ # FIXME: combine_vars
+ vars = combine_vars(vars, updated)
# still need to check InventoryParser per host vars
# which actually means InventoryScript per host,
# which is not performant
if self.parser is not None:
- vars = utils.combine_vars(vars, self.parser.get_host_variables(host))
+ # FIXME: combine_vars
+ vars = combine_vars(vars, self.parser.get_host_variables(host))
# Read host_vars/ files
- vars = utils.combine_vars(vars, self.get_host_vars(host))
+ # FIXME: combine_vars
+ vars = combine_vars(vars, self.get_host_vars(host))
return vars
@@ -490,7 +510,7 @@ class Inventory(object):
""" return a list of hostnames for a pattern """
- result = [ h.name for h in self.get_hosts(pattern) ]
+ result = [ h for h in self.get_hosts(pattern) ]
if len(result) == 0 and pattern in ["localhost", "127.0.0.1"]:
result = [pattern]
return result
@@ -498,11 +518,7 @@ class Inventory(object):
def list_groups(self):
return sorted([ g.name for g in self.groups ], key=lambda x: x)
- # TODO: remove this function
- def get_restriction(self):
- return self._restriction
-
- def restrict_to(self, restriction):
+ def restrict_to_hosts(self, restriction):
"""
Restrict list operations to the hosts given in restriction. This is used
to exclude failed hosts in main playbook code, don't use this for other
@@ -544,7 +560,7 @@ class Inventory(object):
results.append(x)
self._subset = results
- def lift_restriction(self):
+ def remove_restriction(self):
""" Do not restrict list operations """
self._restriction = None
@@ -588,10 +604,12 @@ class Inventory(object):
self._playbook_basedir = dir
# get group vars from group_vars/ files
for group in self.groups:
- group.vars = utils.combine_vars(group.vars, self.get_group_vars(group, new_pb_basedir=True))
+ # FIXME: combine_vars
+ group.vars = combine_vars(group.vars, self.get_group_vars(group, new_pb_basedir=True))
# get host vars from host_vars/ files
for host in self.get_hosts():
- host.vars = utils.combine_vars(host.vars, self.get_host_vars(host, new_pb_basedir=True))
+ # FIXME: combine_vars
+ host.vars = combine_vars(host.vars, self.get_host_vars(host, new_pb_basedir=True))
# invalidate cache
self._vars_per_host = {}
self._vars_per_group = {}
@@ -639,15 +657,15 @@ class Inventory(object):
if _basedir == self._playbook_basedir and scan_pass != 1:
continue
+ # FIXME: these should go to VariableManager
if group and host is None:
# load vars in dir/group_vars/name_of_group
base_path = os.path.join(basedir, "group_vars/%s" % group.name)
- results = utils.load_vars(base_path, results, vault_password=self._vault_password)
-
+ self._variable_manager.add_group_vars_file(base_path, self._loader)
elif host and group is None:
# same for hostvars in dir/host_vars/name_of_host
base_path = os.path.join(basedir, "host_vars/%s" % host.name)
- results = utils.load_vars(base_path, results, vault_password=self._vault_password)
+ self._variable_manager.add_host_vars_file(base_path, self._loader)
# all done, results is a dictionary of variables for this particular host.
return results
diff --git a/lib/ansible/inventory/dir.py b/lib/ansible/inventory/dir.py
index 9ac23fff89..735f32d62c 100644
--- a/lib/ansible/inventory/dir.py
+++ b/lib/ansible/inventory/dir.py
@@ -17,20 +17,25 @@
# along with Ansible. If not, see .
#############################################
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
import os
-import ansible.constants as C
+
+from ansible import constants as C
+from ansible.errors import AnsibleError
+
from ansible.inventory.host import Host
from ansible.inventory.group import Group
from ansible.inventory.ini import InventoryParser
from ansible.inventory.script import InventoryScript
-from ansible import utils
-from ansible import errors
+from ansible.utils.path import is_executable
+from ansible.utils.vars import combine_vars
class InventoryDirectory(object):
''' Host inventory parser for ansible using a directory of inventories. '''
- def __init__(self, filename=C.DEFAULT_HOST_LIST):
+ def __init__(self, loader, filename=C.DEFAULT_HOST_LIST):
self.names = os.listdir(filename)
self.names.sort()
self.directory = filename
@@ -38,10 +43,12 @@ class InventoryDirectory(object):
self.hosts = {}
self.groups = {}
+ self._loader = loader
+
for i in self.names:
# Skip files that end with certain extensions or characters
- if any(i.endswith(ext) for ext in ("~", ".orig", ".bak", ".ini", ".retry", ".pyc", ".pyo")):
+ if any(i.endswith(ext) for ext in ("~", ".orig", ".bak", ".ini", ".cfg", ".retry", ".pyc", ".pyo")):
continue
# Skip hidden files
if i.startswith('.') and not i.startswith('./'):
@@ -51,9 +58,9 @@ class InventoryDirectory(object):
continue
fullpath = os.path.join(self.directory, i)
if os.path.isdir(fullpath):
- parser = InventoryDirectory(filename=fullpath)
- elif utils.is_executable(fullpath):
- parser = InventoryScript(filename=fullpath)
+ parser = InventoryDirectory(loader=loader, filename=fullpath)
+ elif is_executable(fullpath):
+ parser = InventoryScript(loader=loader, filename=fullpath)
else:
parser = InventoryParser(filename=fullpath)
self.parsers.append(parser)
@@ -153,7 +160,7 @@ class InventoryDirectory(object):
# name
if group.name != newgroup.name:
- raise errors.AnsibleError("Cannot merge group %s with %s" % (group.name, newgroup.name))
+ raise AnsibleError("Cannot merge group %s with %s" % (group.name, newgroup.name))
# depth
group.depth = max([group.depth, newgroup.depth])
@@ -196,14 +203,14 @@ class InventoryDirectory(object):
self.groups[newparent.name].add_child_group(group)
# variables
- group.vars = utils.combine_vars(group.vars, newgroup.vars)
+ group.vars = combine_vars(group.vars, newgroup.vars)
def _merge_hosts(self,host, newhost):
""" Merge all of instance newhost into host """
# name
if host.name != newhost.name:
- raise errors.AnsibleError("Cannot merge host %s with %s" % (host.name, newhost.name))
+ raise AnsibleError("Cannot merge host %s with %s" % (host.name, newhost.name))
# group membership relation
for newgroup in newhost.groups:
@@ -218,7 +225,7 @@ class InventoryDirectory(object):
self.groups[newgroup.name].add_host(host)
# variables
- host.vars = utils.combine_vars(host.vars, newhost.vars)
+ host.vars = combine_vars(host.vars, newhost.vars)
def get_host_variables(self, host):
""" Gets additional host variables from all inventories """
diff --git a/lib/ansible/inventory/expand_hosts.py b/lib/ansible/inventory/expand_hosts.py
index f129740935..b5a957c53f 100644
--- a/lib/ansible/inventory/expand_hosts.py
+++ b/lib/ansible/inventory/expand_hosts.py
@@ -30,6 +30,9 @@ expanded into 001, 002 ...009, 010.
Note that when beg is specified with left zero padding, then the length of
end must be the same as that of beg, else an exception is raised.
'''
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
import string
from ansible import errors
diff --git a/lib/ansible/inventory/group.py b/lib/ansible/inventory/group.py
index 262558e69c..6525e69b46 100644
--- a/lib/ansible/inventory/group.py
+++ b/lib/ansible/inventory/group.py
@@ -14,11 +14,15 @@
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see .
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
-class Group(object):
+from ansible.utils.debug import debug
+
+class Group:
''' a group of ansible hosts '''
- __slots__ = [ 'name', 'hosts', 'vars', 'child_groups', 'parent_groups', 'depth', '_hosts_cache' ]
+ #__slots__ = [ 'name', 'hosts', 'vars', 'child_groups', 'parent_groups', 'depth', '_hosts_cache' ]
def __init__(self, name=None):
@@ -29,9 +33,49 @@ class Group(object):
self.child_groups = []
self.parent_groups = []
self._hosts_cache = None
+
#self.clear_hosts_cache()
- if self.name is None:
- raise Exception("group name is required")
+ #if self.name is None:
+ # raise Exception("group name is required")
+
+ def __repr__(self):
+ return self.get_name()
+
+ def __getstate__(self):
+ return self.serialize()
+
+ def __setstate__(self, data):
+ return self.deserialize(data)
+
+ def serialize(self):
+ parent_groups = []
+ for parent in self.parent_groups:
+ parent_groups.append(parent.serialize())
+
+ result = dict(
+ name=self.name,
+ vars=self.vars.copy(),
+ parent_groups=parent_groups,
+ depth=self.depth,
+ )
+
+ debug("serializing group, result is: %s" % result)
+ return result
+
+ def deserialize(self, data):
+ debug("deserializing group, data is: %s" % data)
+ self.__init__()
+ self.name = data.get('name')
+ self.vars = data.get('vars', dict())
+
+ parent_groups = data.get('parent_groups', [])
+ for parent_data in parent_groups:
+ g = Group()
+ g.deserialize(parent_data)
+ self.parent_groups.append(g)
+
+ def get_name(self):
+ return self.name
def add_child_group(self, group):
@@ -100,7 +144,7 @@ class Group(object):
hosts.append(mine)
return hosts
- def get_variables(self):
+ def get_vars(self):
return self.vars.copy()
def _get_ancestors(self):
diff --git a/lib/ansible/inventory/host.py b/lib/ansible/inventory/host.py
index d4dc20fa46..29d6afd991 100644
--- a/lib/ansible/inventory/host.py
+++ b/lib/ansible/inventory/host.py
@@ -15,24 +15,88 @@
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see .
-import ansible.constants as C
-from ansible import utils
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
-class Host(object):
+from ansible import constants as C
+from ansible.inventory.group import Group
+from ansible.utils.vars import combine_vars
+
+__all__ = ['Host']
+
+class Host:
''' a single ansible host '''
- __slots__ = [ 'name', 'vars', 'groups' ]
+ #__slots__ = [ 'name', 'vars', 'groups' ]
+
+ def __getstate__(self):
+ return self.serialize()
+
+ def __setstate__(self, data):
+ return self.deserialize(data)
+
+ def __eq__(self, other):
+ return self.name == other.name
+
+ def serialize(self):
+ groups = []
+ for group in self.groups:
+ groups.append(group.serialize())
+
+ return dict(
+ name=self.name,
+ vars=self.vars.copy(),
+ ipv4_address=self.ipv4_address,
+ ipv6_address=self.ipv6_address,
+ port=self.port,
+ gathered_facts=self._gathered_facts,
+ groups=groups,
+ )
+
+ def deserialize(self, data):
+ self.__init__()
+
+ self.name = data.get('name')
+ self.vars = data.get('vars', dict())
+ self.ipv4_address = data.get('ipv4_address', '')
+ self.ipv6_address = data.get('ipv6_address', '')
+ self.port = data.get('port')
+
+ groups = data.get('groups', [])
+ for group_data in groups:
+ g = Group()
+ g.deserialize(group_data)
+ self.groups.append(g)
def __init__(self, name=None, port=None):
self.name = name
self.vars = {}
self.groups = []
- if port and port != C.DEFAULT_REMOTE_PORT:
- self.set_variable('ansible_ssh_port', int(port))
- if self.name is None:
- raise Exception("host name is required")
+ self.ipv4_address = name
+ self.ipv6_address = name
+
+ if port and port != C.DEFAULT_REMOTE_PORT:
+ self.port = int(port)
+ else:
+ self.port = C.DEFAULT_REMOTE_PORT
+
+ self._gathered_facts = False
+
+ def __repr__(self):
+ return self.get_name()
+
+ def get_name(self):
+ return self.name
+
+ @property
+ def gathered_facts(self):
+ return self._gathered_facts
+
+ def set_gathered_facts(self, gathered):
+ self._gathered_facts = gathered
def add_group(self, group):
@@ -52,16 +116,15 @@ class Host(object):
groups[a.name] = a
return groups.values()
- def get_variables(self):
+ def get_vars(self):
results = {}
groups = self.get_groups()
for group in sorted(groups, key=lambda g: g.depth):
- results = utils.combine_vars(results, group.get_variables())
- results = utils.combine_vars(results, self.vars)
+ results = combine_vars(results, group.get_vars())
+ results = combine_vars(results, self.vars)
results['inventory_hostname'] = self.name
results['inventory_hostname_short'] = self.name.split('.')[0]
results['group_names'] = sorted([ g.name for g in groups if g.name != 'all'])
return results
-
diff --git a/lib/ansible/inventory/ini.py b/lib/ansible/inventory/ini.py
index bd9a98e7f8..e004ee8bb7 100644
--- a/lib/ansible/inventory/ini.py
+++ b/lib/ansible/inventory/ini.py
@@ -16,17 +16,20 @@
# along with Ansible. If not, see .
#############################################
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
-import ansible.constants as C
+import ast
+import shlex
+import re
+
+from ansible import constants as C
+from ansible.errors import *
from ansible.inventory.host import Host
from ansible.inventory.group import Group
from ansible.inventory.expand_hosts import detect_range
from ansible.inventory.expand_hosts import expand_hostname_range
-from ansible import errors
-from ansible import utils
-import shlex
-import re
-import ast
+from ansible.utils.unicode import to_unicode
class InventoryParser(object):
"""
@@ -34,9 +37,8 @@ class InventoryParser(object):
"""
def __init__(self, filename=C.DEFAULT_HOST_LIST):
-
+ self.filename = filename
with open(filename) as fh:
- self.filename = filename
self.lines = fh.readlines()
self.groups = {}
self.hosts = {}
@@ -54,10 +56,7 @@ class InventoryParser(object):
def _parse_value(v):
if "#" not in v:
try:
- ret = ast.literal_eval(v)
- if not isinstance(ret, float):
- # Do not trim floats. Eg: "1.20" to 1.2
- return ret
+ v = ast.literal_eval(v)
# Using explicit exceptions.
# Likely a string that literal_eval does not like. We wil then just set it.
except ValueError:
@@ -66,7 +65,7 @@ class InventoryParser(object):
except SyntaxError:
# Is this a hash with an equals at the end?
pass
- return v
+ return to_unicode(v, nonstring='passthru', errors='strict')
# [webservers]
# alpha
@@ -91,8 +90,8 @@ class InventoryParser(object):
self.groups = dict(all=all, ungrouped=ungrouped)
active_group_name = 'ungrouped'
- for lineno in range(len(self.lines)):
- line = utils.before_comment(self.lines[lineno]).strip()
+ for line in self.lines:
+ line = self._before_comment(line).strip()
if line.startswith("[") and line.endswith("]"):
active_group_name = line.replace("[","").replace("]","")
if ":vars" in line or ":children" in line:
@@ -146,8 +145,11 @@ class InventoryParser(object):
try:
(k,v) = t.split("=", 1)
except ValueError, e:
- raise errors.AnsibleError("%s:%s: Invalid ini entry: %s - %s" % (self.filename, lineno + 1, t, str(e)))
- host.set_variable(k, self._parse_value(v))
+ raise AnsibleError("Invalid ini entry in %s: %s - %s" % (self.filename, t, str(e)))
+ if k == 'ansible_ssh_host':
+ host.ipv4_address = self._parse_value(v)
+ else:
+ host.set_variable(k, self._parse_value(v))
self.groups[active_group_name].add_host(host)
# [southeast:children]
@@ -157,8 +159,8 @@ class InventoryParser(object):
def _parse_group_children(self):
group = None
- for lineno in range(len(self.lines)):
- line = self.lines[lineno].strip()
+ for line in self.lines:
+ line = line.strip()
if line is None or line == '':
continue
if line.startswith("[") and ":children]" in line:
@@ -173,7 +175,7 @@ class InventoryParser(object):
elif group:
kid_group = self.groups.get(line, None)
if kid_group is None:
- raise errors.AnsibleError("%s:%d: child group is not defined: (%s)" % (self.filename, lineno + 1, line))
+ raise AnsibleError("child group is not defined: (%s)" % line)
else:
group.add_child_group(kid_group)
@@ -184,13 +186,13 @@ class InventoryParser(object):
def _parse_group_variables(self):
group = None
- for lineno in range(len(self.lines)):
- line = self.lines[lineno].strip()
+ for line in self.lines:
+ line = line.strip()
if line.startswith("[") and ":vars]" in line:
line = line.replace("[","").replace(":vars]","")
group = self.groups.get(line, None)
if group is None:
- raise errors.AnsibleError("%s:%d: can't add vars to undefined group: %s" % (self.filename, lineno + 1, line))
+ raise AnsibleError("can't add vars to undefined group: %s" % line)
elif line.startswith("#") or line.startswith(";"):
pass
elif line.startswith("["):
@@ -199,10 +201,18 @@ class InventoryParser(object):
pass
elif group:
if "=" not in line:
- raise errors.AnsibleError("%s:%d: variables assigned to group must be in key=value form" % (self.filename, lineno + 1))
+ raise AnsibleError("variables assigned to group must be in key=value form")
else:
(k, v) = [e.strip() for e in line.split("=", 1)]
group.set_variable(k, self._parse_value(v))
def get_host_variables(self, host):
return {}
+
+ def _before_comment(self, msg):
+ ''' what's the part of a string before a comment? '''
+ msg = msg.replace("\#","**NOT_A_COMMENT**")
+ msg = msg.split("#")[0]
+ msg = msg.replace("**NOT_A_COMMENT**","#")
+ return msg
+
diff --git a/lib/ansible/inventory/script.py b/lib/ansible/inventory/script.py
index b83cb9bcc7..9675d70f69 100644
--- a/lib/ansible/inventory/script.py
+++ b/lib/ansible/inventory/script.py
@@ -16,22 +16,26 @@
# along with Ansible. If not, see .
#############################################
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
import os
import subprocess
-import ansible.constants as C
+import sys
+
+from ansible import constants as C
+from ansible.errors import *
from ansible.inventory.host import Host
from ansible.inventory.group import Group
from ansible.module_utils.basic import json_dict_bytes_to_unicode
-from ansible import utils
-from ansible import errors
-import sys
-class InventoryScript(object):
+class InventoryScript:
''' Host inventory parser for ansible using external inventory scripts. '''
- def __init__(self, filename=C.DEFAULT_HOST_LIST):
+ def __init__(self, loader, filename=C.DEFAULT_HOST_LIST):
+
+ self._loader = loader
# Support inventory scripts that are not prefixed with some
# path information but happen to be in the current working
@@ -41,11 +45,11 @@ class InventoryScript(object):
try:
sp = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
except OSError, e:
- raise errors.AnsibleError("problem running %s (%s)" % (' '.join(cmd), e))
+ raise AnsibleError("problem running %s (%s)" % (' '.join(cmd), e))
(stdout, stderr) = sp.communicate()
if sp.returncode != 0:
- raise errors.AnsibleError("Inventory script (%s) had an execution error: %s " % (filename,stderr))
+ raise AnsibleError("Inventory script (%s) had an execution error: %s " % (filename,stderr))
self.data = stdout
# see comment about _meta below
@@ -58,7 +62,7 @@ class InventoryScript(object):
all_hosts = {}
# not passing from_remote because data from CMDB is trusted
- self.raw = utils.parse_json(self.data)
+ self.raw = self._loader.load(self.data)
self.raw = json_dict_bytes_to_unicode(self.raw)
all = Group('all')
@@ -68,7 +72,7 @@ class InventoryScript(object):
if 'failed' in self.raw:
sys.stderr.write(err + "\n")
- raise errors.AnsibleError("failed to parse executable inventory script results: %s" % self.raw)
+ raise AnsibleError("failed to parse executable inventory script results: %s" % self.raw)
for (group_name, data) in self.raw.items():
@@ -92,12 +96,12 @@ class InventoryScript(object):
if not isinstance(data, dict):
data = {'hosts': data}
# is not those subkeys, then simplified syntax, host with vars
- elif not any(k in data for k in ('hosts','vars','children')):
+ elif not any(k in data for k in ('hosts','vars')):
data = {'hosts': [group_name], 'vars': data}
if 'hosts' in data:
if not isinstance(data['hosts'], list):
- raise errors.AnsibleError("You defined a group \"%s\" with bad "
+ raise AnsibleError("You defined a group \"%s\" with bad "
"data for the host list:\n %s" % (group_name, data))
for hostname in data['hosts']:
@@ -108,7 +112,7 @@ class InventoryScript(object):
if 'vars' in data:
if not isinstance(data['vars'], dict):
- raise errors.AnsibleError("You defined a group \"%s\" with bad "
+ raise AnsibleError("You defined a group \"%s\" with bad "
"data for variables:\n %s" % (group_name, data))
for k, v in data['vars'].iteritems():
@@ -143,12 +147,12 @@ class InventoryScript(object):
try:
sp = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
except OSError, e:
- raise errors.AnsibleError("problem running %s (%s)" % (' '.join(cmd), e))
+ raise AnsibleError("problem running %s (%s)" % (' '.join(cmd), e))
(out, err) = sp.communicate()
if out.strip() == '':
return dict()
try:
- return json_dict_bytes_to_unicode(utils.parse_json(out))
+ return json_dict_bytes_to_unicode(self._loader.load(out))
except ValueError:
- raise errors.AnsibleError("could not parse post variable response: %s, %s" % (cmd, out))
+ raise AnsibleError("could not parse post variable response: %s, %s" % (cmd, out))
diff --git a/lib/ansible/inventory/vars_plugins/noop.py b/lib/ansible/inventory/vars_plugins/noop.py
index 5d4b4b6658..8f0c98cad5 100644
--- a/lib/ansible/inventory/vars_plugins/noop.py
+++ b/lib/ansible/inventory/vars_plugins/noop.py
@@ -15,6 +15,8 @@
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see .
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
class VarsModule(object):
diff --git a/lib/ansible/module_utils/basic.py b/lib/ansible/module_utils/basic.py
index 54a1a9cfff..8f9b03f882 100644
--- a/lib/ansible/module_utils/basic.py
+++ b/lib/ansible/module_utils/basic.py
@@ -43,7 +43,7 @@ BOOLEANS = BOOLEANS_TRUE + BOOLEANS_FALSE
# can be inserted in any module source automatically by including
# #<> on a blank line by itself inside
# of an ansible module. The source of this common code lives
-# in lib/ansible/module_common.py
+# in ansible/executor/module_common.py
import locale
import os
@@ -65,6 +65,7 @@ import pwd
import platform
import errno
import tempfile
+from itertools import imap, repeat
try:
import json
@@ -234,7 +235,7 @@ def load_platform_subclass(cls, *args, **kwargs):
return super(cls, subclass).__new__(subclass)
-def json_dict_unicode_to_bytes(d):
+def json_dict_unicode_to_bytes(d, encoding='utf-8'):
''' Recursively convert dict keys and values to byte str
Specialized for json return because this only handles, lists, tuples,
@@ -242,17 +243,17 @@ def json_dict_unicode_to_bytes(d):
'''
if isinstance(d, unicode):
- return d.encode('utf-8')
+ return d.encode(encoding)
elif isinstance(d, dict):
- return dict(map(json_dict_unicode_to_bytes, d.iteritems()))
+ return dict(imap(json_dict_unicode_to_bytes, d.iteritems(), repeat(encoding)))
elif isinstance(d, list):
- return list(map(json_dict_unicode_to_bytes, d))
+ return list(imap(json_dict_unicode_to_bytes, d, repeat(encoding)))
elif isinstance(d, tuple):
- return tuple(map(json_dict_unicode_to_bytes, d))
+ return tuple(imap(json_dict_unicode_to_bytes, d, repeat(encoding)))
else:
return d
-def json_dict_bytes_to_unicode(d):
+def json_dict_bytes_to_unicode(d, encoding='utf-8'):
''' Recursively convert dict keys and values to byte str
Specialized for json return because this only handles, lists, tuples,
@@ -260,13 +261,13 @@ def json_dict_bytes_to_unicode(d):
'''
if isinstance(d, str):
- return unicode(d, 'utf-8')
+ return unicode(d, encoding)
elif isinstance(d, dict):
- return dict(map(json_dict_bytes_to_unicode, d.iteritems()))
+ return dict(imap(json_dict_bytes_to_unicode, d.iteritems(), repeat(encoding)))
elif isinstance(d, list):
- return list(map(json_dict_bytes_to_unicode, d))
+ return list(imap(json_dict_bytes_to_unicode, d, repeat(encoding)))
elif isinstance(d, tuple):
- return tuple(map(json_dict_bytes_to_unicode, d))
+ return tuple(imap(json_dict_bytes_to_unicode, d, repeat(encoding)))
else:
return d
@@ -359,9 +360,9 @@ class AnsibleModule(object):
# reset to LANG=C if it's an invalid/unavailable locale
self._check_locale()
- (self.params, self.args) = self._load_params()
+ self.params = self._load_params()
- self._legal_inputs = ['CHECKMODE', 'NO_LOG']
+ self._legal_inputs = ['_ansible_check_mode', '_ansible_no_log']
self.aliases = self._handle_aliases()
@@ -888,7 +889,7 @@ class AnsibleModule(object):
def _check_for_check_mode(self):
for (k,v) in self.params.iteritems():
- if k == 'CHECKMODE':
+ if k == '_ansible_check_mode':
if not self.supports_check_mode:
self.exit_json(skipped=True, msg="remote module does not support check mode")
if self.supports_check_mode:
@@ -896,13 +897,13 @@ class AnsibleModule(object):
def _check_for_no_log(self):
for (k,v) in self.params.iteritems():
- if k == 'NO_LOG':
+ if k == '_ansible_no_log':
self.no_log = self.boolean(v)
def _check_invalid_arguments(self):
for (k,v) in self.params.iteritems():
# these should be in legal inputs already
- #if k in ('CHECKMODE', 'NO_LOG'):
+ #if k in ('_ansible_check_mode', '_ansible_no_log'):
# continue
if k not in self._legal_inputs:
self.fail_json(msg="unsupported parameter for module: %s" % k)
@@ -1075,20 +1076,11 @@ class AnsibleModule(object):
def _load_params(self):
''' read the input and return a dictionary and the arguments string '''
- args = MODULE_ARGS
- items = shlex.split(args)
- params = {}
- for x in items:
- try:
- (k, v) = x.split("=",1)
- except Exception, e:
- self.fail_json(msg="this module requires key=value arguments (%s)" % (items))
- if k in params:
- self.fail_json(msg="duplicate parameter: %s (value=%s)" % (k, v))
- params[k] = v
- params2 = json_dict_unicode_to_bytes(json.loads(MODULE_COMPLEX_ARGS))
- params2.update(params)
- return (params2, args)
+ params = json_dict_unicode_to_bytes(json.loads(MODULE_COMPLEX_ARGS))
+ if params is None:
+ params = dict()
+ return params
+
def _log_invocation(self):
''' log that ansible ran the module '''
@@ -1209,13 +1201,17 @@ class AnsibleModule(object):
self.fail_json(msg='Boolean %s not in either boolean list' % arg)
def jsonify(self, data):
- for encoding in ("utf-8", "latin-1", "unicode_escape"):
+ for encoding in ("utf-8", "latin-1"):
try:
return json.dumps(data, encoding=encoding)
- # Old systems using simplejson module does not support encoding keyword.
- except TypeError, e:
- return json.dumps(data)
- except UnicodeDecodeError, e:
+ # Old systems using old simplejson module does not support encoding keyword.
+ except TypeError:
+ try:
+ new_data = json_dict_bytes_to_unicode(data, encoding=encoding)
+ except UnicodeDecodeError:
+ continue
+ return json.dumps(new_data)
+ except UnicodeDecodeError:
continue
self.fail_json(msg='Invalid unicode encoding encountered')
@@ -1452,7 +1448,7 @@ class AnsibleModule(object):
msg = None
st_in = None
- # Set a temporart env path if a prefix is passed
+ # Set a temporary env path if a prefix is passed
env=os.environ
if path_prefix:
env['PATH']="%s:%s" % (path_prefix, env['PATH'])
diff --git a/lib/ansible/module_utils/powershell.ps1 b/lib/ansible/module_utils/powershell.ps1
index ee7d3ddeca..57d2c1b101 100644
--- a/lib/ansible/module_utils/powershell.ps1
+++ b/lib/ansible/module_utils/powershell.ps1
@@ -142,14 +142,14 @@ Function ConvertTo-Bool
return
}
-# Helper function to calculate a hash of a file in a way which powershell 3
+# Helper function to calculate md5 of a file in a way which powershell 3
# and above can handle:
-Function Get-FileChecksum($path)
+Function Get-FileMd5($path)
{
$hash = ""
If (Test-Path -PathType Leaf $path)
{
- $sp = new-object -TypeName System.Security.Cryptography.SHA1CryptoServiceProvider;
+ $sp = new-object -TypeName System.Security.Cryptography.MD5CryptoServiceProvider;
$fp = [System.IO.File]::Open($path, [System.IO.Filemode]::Open, [System.IO.FileAccess]::Read);
[System.BitConverter]::ToString($sp.ComputeHash($fp)).Replace("-", "").ToLower();
$fp.Dispose();
diff --git a/lib/ansible/modules/__init__.py b/lib/ansible/modules/__init__.py
index e69de29bb2..ae8ccff595 100644
--- a/lib/ansible/modules/__init__.py
+++ b/lib/ansible/modules/__init__.py
@@ -0,0 +1,20 @@
+# (c) 2012-2014, Michael DeHaan
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see .
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
diff --git a/lib/ansible/modules/core b/lib/ansible/modules/core
deleted file mode 160000
index 9028e9d4be..0000000000
--- a/lib/ansible/modules/core
+++ /dev/null
@@ -1 +0,0 @@
-Subproject commit 9028e9d4be8a3dbb96c81a799e18f3adf63d9fd0
diff --git a/lib/ansible/modules/extras b/lib/ansible/modules/extras
deleted file mode 160000
index dd80fa221c..0000000000
--- a/lib/ansible/modules/extras
+++ /dev/null
@@ -1 +0,0 @@
-Subproject commit dd80fa221ce0adb3abd658fbd1aa09bf7cf8a6dc
diff --git a/v2/ansible/new_inventory/__init__.py b/lib/ansible/new_inventory/__init__.py
similarity index 100%
rename from v2/ansible/new_inventory/__init__.py
rename to lib/ansible/new_inventory/__init__.py
diff --git a/v2/ansible/new_inventory/group.py b/lib/ansible/new_inventory/group.py
similarity index 100%
rename from v2/ansible/new_inventory/group.py
rename to lib/ansible/new_inventory/group.py
diff --git a/v2/ansible/new_inventory/host.py b/lib/ansible/new_inventory/host.py
similarity index 100%
rename from v2/ansible/new_inventory/host.py
rename to lib/ansible/new_inventory/host.py
diff --git a/v2/ansible/parsing/__init__.py b/lib/ansible/parsing/__init__.py
similarity index 100%
rename from v2/ansible/parsing/__init__.py
rename to lib/ansible/parsing/__init__.py
diff --git a/v2/ansible/parsing/mod_args.py b/lib/ansible/parsing/mod_args.py
similarity index 100%
rename from v2/ansible/parsing/mod_args.py
rename to lib/ansible/parsing/mod_args.py
diff --git a/v2/ansible/parsing/splitter.py b/lib/ansible/parsing/splitter.py
similarity index 100%
rename from v2/ansible/parsing/splitter.py
rename to lib/ansible/parsing/splitter.py
diff --git a/v2/ansible/parsing/utils/__init__.py b/lib/ansible/parsing/utils/__init__.py
similarity index 100%
rename from v2/ansible/parsing/utils/__init__.py
rename to lib/ansible/parsing/utils/__init__.py
diff --git a/v2/ansible/parsing/utils/jsonify.py b/lib/ansible/parsing/utils/jsonify.py
similarity index 100%
rename from v2/ansible/parsing/utils/jsonify.py
rename to lib/ansible/parsing/utils/jsonify.py
diff --git a/v2/ansible/parsing/vault/__init__.py b/lib/ansible/parsing/vault/__init__.py
similarity index 100%
rename from v2/ansible/parsing/vault/__init__.py
rename to lib/ansible/parsing/vault/__init__.py
diff --git a/v2/ansible/parsing/yaml/__init__.py b/lib/ansible/parsing/yaml/__init__.py
similarity index 100%
rename from v2/ansible/parsing/yaml/__init__.py
rename to lib/ansible/parsing/yaml/__init__.py
diff --git a/v2/ansible/parsing/yaml/constructor.py b/lib/ansible/parsing/yaml/constructor.py
similarity index 100%
rename from v2/ansible/parsing/yaml/constructor.py
rename to lib/ansible/parsing/yaml/constructor.py
diff --git a/v2/ansible/parsing/yaml/loader.py b/lib/ansible/parsing/yaml/loader.py
similarity index 100%
rename from v2/ansible/parsing/yaml/loader.py
rename to lib/ansible/parsing/yaml/loader.py
diff --git a/v2/ansible/parsing/yaml/objects.py b/lib/ansible/parsing/yaml/objects.py
similarity index 100%
rename from v2/ansible/parsing/yaml/objects.py
rename to lib/ansible/parsing/yaml/objects.py
diff --git a/lib/ansible/playbook/__init__.py b/lib/ansible/playbook/__init__.py
index 24ba2d3c6e..40e6638f23 100644
--- a/lib/ansible/playbook/__init__.py
+++ b/lib/ansible/playbook/__init__.py
@@ -15,860 +15,71 @@
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see .
-import ansible.inventory
-import ansible.constants as C
-import ansible.runner
-from ansible.utils.template import template
-from ansible import utils
-from ansible import errors
-from ansible.module_utils.splitter import split_args, unquote
-import ansible.callbacks
-import ansible.cache
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
import os
-import shlex
-import collections
-from play import Play
-import StringIO
-import pipes
-# the setup cache stores all variables about a host
-# gathered during the setup step, while the vars cache
-# holds all other variables about a host
-SETUP_CACHE = ansible.cache.FactCache()
-VARS_CACHE = collections.defaultdict(dict)
-RESERVED_TAGS = ['all','tagged','untagged','always']
+from ansible.errors import AnsibleError, AnsibleParserError
+from ansible.parsing import DataLoader
+from ansible.playbook.attribute import Attribute, FieldAttribute
+from ansible.playbook.play import Play
+from ansible.playbook.playbook_include import PlaybookInclude
+from ansible.plugins import push_basedir
-class PlayBook(object):
- '''
- runs an ansible playbook, given as a datastructure or YAML filename.
- A playbook is a deployment, config management, or automation based
- set of commands to run in series.
+__all__ = ['Playbook']
- multiple plays/tasks do not execute simultaneously, but tasks in each
- pattern do execute in parallel (according to the number of forks
- requested) among the hosts they address
- '''
- # *****************************************************
+class Playbook:
- def __init__(self,
- playbook = None,
- host_list = C.DEFAULT_HOST_LIST,
- module_path = None,
- forks = C.DEFAULT_FORKS,
- timeout = C.DEFAULT_TIMEOUT,
- remote_user = C.DEFAULT_REMOTE_USER,
- remote_pass = C.DEFAULT_REMOTE_PASS,
- remote_port = None,
- transport = C.DEFAULT_TRANSPORT,
- private_key_file = C.DEFAULT_PRIVATE_KEY_FILE,
- callbacks = None,
- runner_callbacks = None,
- stats = None,
- extra_vars = None,
- only_tags = None,
- skip_tags = None,
- subset = C.DEFAULT_SUBSET,
- inventory = None,
- check = False,
- diff = False,
- any_errors_fatal = False,
- vault_password = False,
- force_handlers = False,
- # privilege escalation
- become = C.DEFAULT_BECOME,
- become_method = C.DEFAULT_BECOME_METHOD,
- become_user = C.DEFAULT_BECOME_USER,
- become_pass = None,
- ):
+ def __init__(self, loader):
+ # Entries in the datastructure of a playbook may
+ # be either a play or an include statement
+ self._entries = []
+ self._basedir = os.getcwd()
+ self._loader = loader
- """
- playbook: path to a playbook file
- host_list: path to a file like /etc/ansible/hosts
- module_path: path to ansible modules, like /usr/share/ansible/
- forks: desired level of parallelism
- timeout: connection timeout
- remote_user: run as this user if not specified in a particular play
- remote_pass: use this remote password (for all plays) vs using SSH keys
- remote_port: default remote port to use if not specified with the host or play
- transport: how to connect to hosts that don't specify a transport (local, paramiko, etc)
- callbacks output callbacks for the playbook
- runner_callbacks: more callbacks, this time for the runner API
- stats: holds aggregrate data about events occurring to each host
- inventory: can be specified instead of host_list to use a pre-existing inventory object
- check: don't change anything, just try to detect some potential changes
- any_errors_fatal: terminate the entire execution immediately when one of the hosts has failed
- force_handlers: continue to notify and run handlers even if a task fails
- """
+ @staticmethod
+ def load(file_name, variable_manager=None, loader=None):
+ pb = Playbook(loader=loader)
+ pb._load_playbook_data(file_name=file_name, variable_manager=variable_manager)
+ return pb
- self.SETUP_CACHE = SETUP_CACHE
- self.VARS_CACHE = VARS_CACHE
+ def _load_playbook_data(self, file_name, variable_manager):
- arguments = []
- if playbook is None:
- arguments.append('playbook')
- if callbacks is None:
- arguments.append('callbacks')
- if runner_callbacks is None:
- arguments.append('runner_callbacks')
- if stats is None:
- arguments.append('stats')
- if arguments:
- raise Exception('PlayBook missing required arguments: %s' % ', '.join(arguments))
-
- if extra_vars is None:
- extra_vars = {}
- if only_tags is None:
- only_tags = [ 'all' ]
- if skip_tags is None:
- skip_tags = []
-
- self.check = check
- self.diff = diff
- self.module_path = module_path
- self.forks = forks
- self.timeout = timeout
- self.remote_user = remote_user
- self.remote_pass = remote_pass
- self.remote_port = remote_port
- self.transport = transport
- self.callbacks = callbacks
- self.runner_callbacks = runner_callbacks
- self.stats = stats
- self.extra_vars = extra_vars
- self.global_vars = {}
- self.private_key_file = private_key_file
- self.only_tags = only_tags
- self.skip_tags = skip_tags
- self.any_errors_fatal = any_errors_fatal
- self.vault_password = vault_password
- self.force_handlers = force_handlers
-
- self.become = become
- self.become_method = become_method
- self.become_user = become_user
- self.become_pass = become_pass
-
- self.callbacks.playbook = self
- self.runner_callbacks.playbook = self
-
- if inventory is None:
- self.inventory = ansible.inventory.Inventory(host_list)
- self.inventory.subset(subset)
+ if os.path.isabs(file_name):
+ self._basedir = os.path.dirname(file_name)
else:
- self.inventory = inventory
+ self._basedir = os.path.normpath(os.path.join(self._basedir, os.path.dirname(file_name)))
- if self.module_path is not None:
- utils.plugins.module_finder.add_directory(self.module_path)
+ # set the loaders basedir
+ self._loader.set_basedir(self._basedir)
- self.basedir = os.path.dirname(playbook) or '.'
- utils.plugins.push_basedir(self.basedir)
+ # also add the basedir to the list of module directories
+ push_basedir(self._basedir)
- # let inventory know the playbook basedir so it can load more vars
- self.inventory.set_playbook_basedir(self.basedir)
+ ds = self._loader.load_from_file(os.path.basename(file_name))
+ if not isinstance(ds, list):
+ raise AnsibleParserError("playbooks must be a list of plays", obj=ds)
- vars = extra_vars.copy()
- vars['playbook_dir'] = os.path.abspath(self.basedir)
- if self.inventory.basedir() is not None:
- vars['inventory_dir'] = self.inventory.basedir()
-
- if self.inventory.src() is not None:
- vars['inventory_file'] = self.inventory.src()
-
- self.filename = playbook
- (self.playbook, self.play_basedirs) = self._load_playbook_from_file(playbook, vars)
- ansible.callbacks.load_callback_plugins()
- ansible.callbacks.set_playbook(self.callbacks, self)
-
- self._ansible_version = utils.version_info(gitinfo=True)
-
- # *****************************************************
-
- def _get_playbook_vars(self, play_ds, existing_vars):
- '''
- Gets the vars specified with the play and blends them
- with any existing vars that have already been read in
- '''
- new_vars = existing_vars.copy()
- if 'vars' in play_ds:
- if isinstance(play_ds['vars'], dict):
- new_vars.update(play_ds['vars'])
- elif isinstance(play_ds['vars'], list):
- for v in play_ds['vars']:
- new_vars.update(v)
- return new_vars
-
- # *****************************************************
-
- def _get_include_info(self, play_ds, basedir, existing_vars={}):
- '''
- Gets any key=value pairs specified with the included file
- name and returns the merged vars along with the path
- '''
- new_vars = existing_vars.copy()
- tokens = split_args(play_ds.get('include', ''))
- for t in tokens[1:]:
- try:
- (k,v) = unquote(t).split("=", 1)
- new_vars[k] = template(basedir, v, new_vars)
- except ValueError, e:
- raise errors.AnsibleError('included playbook variables must be in the form k=v, got: %s' % t)
-
- return (new_vars, unquote(tokens[0]))
-
- # *****************************************************
-
- def _get_playbook_vars_files(self, play_ds, existing_vars_files):
- new_vars_files = list(existing_vars_files)
- if 'vars_files' in play_ds:
- new_vars_files = utils.list_union(new_vars_files, play_ds['vars_files'])
- return new_vars_files
-
- # *****************************************************
-
- def _extend_play_vars(self, play, vars={}):
- '''
- Extends the given play's variables with the additional specified vars.
- '''
-
- if 'vars' not in play or not play['vars']:
- # someone left out or put an empty "vars:" entry in their playbook
- return vars.copy()
-
- play_vars = None
- if isinstance(play['vars'], dict):
- play_vars = play['vars'].copy()
- play_vars.update(vars)
- elif isinstance(play['vars'], list):
- # nobody should really do this, but handle vars: a=1 b=2
- play_vars = play['vars'][:]
- play_vars.extend([{k:v} for k,v in vars.iteritems()])
-
- return play_vars
-
- # *****************************************************
-
- def _load_playbook_from_file(self, path, vars={}, vars_files=[]):
- '''
- run top level error checking on playbooks and allow them to include other playbooks.
- '''
-
- playbook_data = utils.parse_yaml_from_file(path, vault_password=self.vault_password)
- accumulated_plays = []
- play_basedirs = []
-
- if type(playbook_data) != list:
- raise errors.AnsibleError("parse error: playbooks must be formatted as a YAML list, got %s" % type(playbook_data))
-
- basedir = os.path.dirname(path) or '.'
- utils.plugins.push_basedir(basedir)
- for play in playbook_data:
- if type(play) != dict:
- raise errors.AnsibleError("parse error: each play in a playbook must be a YAML dictionary (hash), received: %s" % play)
-
- if 'include' in play:
- # a playbook (list of plays) decided to include some other list of plays
- # from another file. The result is a flat list of plays in the end.
-
- play_vars = self._get_playbook_vars(play, vars)
- play_vars_files = self._get_playbook_vars_files(play, vars_files)
- inc_vars, inc_path = self._get_include_info(play, basedir, play_vars)
- play_vars.update(inc_vars)
-
- included_path = utils.path_dwim(basedir, template(basedir, inc_path, play_vars))
- (plays, basedirs) = self._load_playbook_from_file(included_path, vars=play_vars, vars_files=play_vars_files)
- for p in plays:
- # support for parameterized play includes works by passing
- # those variables along to the subservient play
- p['vars'] = self._extend_play_vars(p, play_vars)
- # now add in the vars_files
- p['vars_files'] = utils.list_union(p.get('vars_files', []), play_vars_files)
-
- accumulated_plays.extend(plays)
- play_basedirs.extend(basedirs)
+ # Parse the playbook entries. For plays, we simply parse them
+ # using the Play() object, and includes are parsed using the
+ # PlaybookInclude() object
+ for entry in ds:
+ if not isinstance(entry, dict):
+ raise AnsibleParserError("playbook entries must be either a valid play or an include statement", obj=entry)
+ if 'include' in entry:
+ pb = PlaybookInclude.load(entry, basedir=self._basedir, variable_manager=variable_manager, loader=self._loader)
+ self._entries.extend(pb._entries)
else:
+ entry_obj = Play.load(entry, variable_manager=variable_manager, loader=self._loader)
+ self._entries.append(entry_obj)
- # this is a normal (non-included play)
- accumulated_plays.append(play)
- play_basedirs.append(basedir)
+ def get_loader(self):
+ return self._loader
- return (accumulated_plays, play_basedirs)
-
- # *****************************************************
-
- def run(self):
- ''' run all patterns in the playbook '''
- plays = []
- matched_tags_all = set()
- unmatched_tags_all = set()
-
- # loop through all patterns and run them
- self.callbacks.on_start()
- for (play_ds, play_basedir) in zip(self.playbook, self.play_basedirs):
- play = Play(self, play_ds, play_basedir, vault_password=self.vault_password)
- assert play is not None
-
- matched_tags, unmatched_tags = play.compare_tags(self.only_tags)
-
- matched_tags_all = matched_tags_all | matched_tags
- unmatched_tags_all = unmatched_tags_all | unmatched_tags
-
- # Remove tasks we wish to skip
- matched_tags = matched_tags - set(self.skip_tags)
-
- # if we have matched_tags, the play must be run.
- # if the play contains no tasks, assume we just want to gather facts
- # in this case there are actually 3 meta tasks (handler flushes) not 0
- # tasks, so that's why there's a check against 3
- if (len(matched_tags) > 0 or len(play.tasks()) == 3):
- plays.append(play)
-
- # if the playbook is invoked with --tags or --skip-tags that don't
- # exist at all in the playbooks then we need to raise an error so that
- # the user can correct the arguments.
- unknown_tags = ((set(self.only_tags) | set(self.skip_tags)) -
- (matched_tags_all | unmatched_tags_all))
-
- for t in RESERVED_TAGS:
- unknown_tags.discard(t)
-
- if len(unknown_tags) > 0:
- for t in RESERVED_TAGS:
- unmatched_tags_all.discard(t)
- msg = 'tag(s) not found in playbook: %s. possible values: %s'
- unknown = ','.join(sorted(unknown_tags))
- unmatched = ','.join(sorted(unmatched_tags_all))
- raise errors.AnsibleError(msg % (unknown, unmatched))
-
- for play in plays:
- ansible.callbacks.set_play(self.callbacks, play)
- ansible.callbacks.set_play(self.runner_callbacks, play)
- if not self._run_play(play):
- break
-
- ansible.callbacks.set_play(self.callbacks, None)
- ansible.callbacks.set_play(self.runner_callbacks, None)
-
- # summarize the results
- results = {}
- for host in self.stats.processed.keys():
- results[host] = self.stats.summarize(host)
- return results
-
- # *****************************************************
-
- def _async_poll(self, poller, async_seconds, async_poll_interval):
- ''' launch an async job, if poll_interval is set, wait for completion '''
-
- results = poller.wait(async_seconds, async_poll_interval)
-
- # mark any hosts that are still listed as started as failed
- # since these likely got killed by async_wrapper
- for host in poller.hosts_to_poll:
- reason = { 'failed' : 1, 'rc' : None, 'msg' : 'timed out' }
- self.runner_callbacks.on_async_failed(host, reason, poller.runner.vars_cache[host]['ansible_job_id'])
- results['contacted'][host] = reason
-
- return results
-
- # *****************************************************
-
- def _trim_unavailable_hosts(self, hostlist=[], keep_failed=False):
- ''' returns a list of hosts that haven't failed and aren't dark '''
-
- return [ h for h in hostlist if (keep_failed or h not in self.stats.failures) and (h not in self.stats.dark)]
-
- # *****************************************************
-
- def _run_task_internal(self, task, include_failed=False):
- ''' run a particular module step in a playbook '''
-
- hosts = self._trim_unavailable_hosts(self.inventory.list_hosts(task.play._play_hosts), keep_failed=include_failed)
- self.inventory.restrict_to(hosts)
-
- runner = ansible.runner.Runner(
- pattern=task.play.hosts,
- inventory=self.inventory,
- module_name=task.module_name,
- module_args=task.module_args,
- forks=self.forks,
- remote_pass=self.remote_pass,
- module_path=self.module_path,
- timeout=self.timeout,
- remote_user=task.remote_user,
- remote_port=task.play.remote_port,
- module_vars=task.module_vars,
- play_vars=task.play_vars,
- play_file_vars=task.play_file_vars,
- role_vars=task.role_vars,
- role_params=task.role_params,
- default_vars=task.default_vars,
- extra_vars=self.extra_vars,
- private_key_file=self.private_key_file,
- setup_cache=self.SETUP_CACHE,
- vars_cache=self.VARS_CACHE,
- basedir=task.play.basedir,
- conditional=task.when,
- callbacks=self.runner_callbacks,
- transport=task.transport,
- is_playbook=True,
- check=self.check,
- diff=self.diff,
- environment=task.environment,
- complex_args=task.args,
- accelerate=task.play.accelerate,
- accelerate_port=task.play.accelerate_port,
- accelerate_ipv6=task.play.accelerate_ipv6,
- error_on_undefined_vars=C.DEFAULT_UNDEFINED_VAR_BEHAVIOR,
- vault_pass = self.vault_password,
- run_hosts=hosts,
- no_log=task.no_log,
- run_once=task.run_once,
- become=task.become,
- become_method=task.become_method,
- become_user=task.become_user,
- become_pass=task.become_pass,
- )
-
- runner.module_vars.update({'play_hosts': hosts})
- runner.module_vars.update({'ansible_version': self._ansible_version})
-
- if task.async_seconds == 0:
- results = runner.run()
- else:
- results, poller = runner.run_async(task.async_seconds)
- self.stats.compute(results)
- if task.async_poll_interval > 0:
- # if not polling, playbook requested fire and forget, so don't poll
- results = self._async_poll(poller, task.async_seconds, task.async_poll_interval)
- else:
- for (host, res) in results.get('contacted', {}).iteritems():
- self.runner_callbacks.on_async_ok(host, res, poller.runner.vars_cache[host]['ansible_job_id'])
-
- contacted = results.get('contacted',{})
- dark = results.get('dark', {})
-
- self.inventory.lift_restriction()
-
- if len(contacted.keys()) == 0 and len(dark.keys()) == 0:
- return None
-
- return results
-
- # *****************************************************
-
- def _run_task(self, play, task, is_handler):
- ''' run a single task in the playbook and recursively run any subtasks. '''
-
- ansible.callbacks.set_task(self.callbacks, task)
- ansible.callbacks.set_task(self.runner_callbacks, task)
-
- if task.role_name:
- name = '%s | %s' % (task.role_name, task.name)
- else:
- name = task.name
-
- try:
- # v1 HACK: we don't have enough information to template many names
- # at this point. Rather than making this work for all cases in
- # v1, just make this degrade gracefully. Will fix in v2
- name = template(play.basedir, name, task.module_vars, lookup_fatal=False, filter_fatal=False)
- except:
- pass
-
- self.callbacks.on_task_start(name, is_handler)
- if hasattr(self.callbacks, 'skip_task') and self.callbacks.skip_task:
- ansible.callbacks.set_task(self.callbacks, None)
- ansible.callbacks.set_task(self.runner_callbacks, None)
- return True
-
- # template ignore_errors
- # TODO: Is this needed here? cond is templated again in
- # check_conditional after some more manipulations.
- # TODO: we don't have enough information here to template cond either
- # (see note on templating name above)
- cond = template(play.basedir, task.ignore_errors, task.module_vars, expand_lists=False)
- task.ignore_errors = utils.check_conditional(cond, play.basedir, task.module_vars, fail_on_undefined=C.DEFAULT_UNDEFINED_VAR_BEHAVIOR)
-
- # load up an appropriate ansible runner to run the task in parallel
- include_failed = is_handler and play.force_handlers
- results = self._run_task_internal(task, include_failed=include_failed)
-
- # if no hosts are matched, carry on
- hosts_remaining = True
- if results is None:
- hosts_remaining = False
- results = {}
-
- contacted = results.get('contacted', {})
- self.stats.compute(results, ignore_errors=task.ignore_errors)
-
- def _register_play_vars(host, result):
- # when 'register' is used, persist the result in the vars cache
- # rather than the setup cache - vars should be transient between
- # playbook executions
- if 'stdout' in result and 'stdout_lines' not in result:
- result['stdout_lines'] = result['stdout'].splitlines()
- utils.update_hash(self.VARS_CACHE, host, {task.register: result})
-
- def _save_play_facts(host, facts):
- # saves play facts in SETUP_CACHE, unless the module executed was
- # set_fact, in which case we add them to the VARS_CACHE
- if task.module_name in ('set_fact', 'include_vars'):
- utils.update_hash(self.VARS_CACHE, host, facts)
- else:
- utils.update_hash(self.SETUP_CACHE, host, facts)
-
- # add facts to the global setup cache
- for host, result in contacted.iteritems():
- if 'results' in result:
- # task ran with_ lookup plugin, so facts are encapsulated in
- # multiple list items in the results key
- for res in result['results']:
- if type(res) == dict:
- facts = res.get('ansible_facts', {})
- _save_play_facts(host, facts)
- else:
- # when facts are returned, persist them in the setup cache
- facts = result.get('ansible_facts', {})
- _save_play_facts(host, facts)
-
- # if requested, save the result into the registered variable name
- if task.register:
- _register_play_vars(host, result)
-
- # also have to register some failed, but ignored, tasks
- if task.ignore_errors and task.register:
- failed = results.get('failed', {})
- for host, result in failed.iteritems():
- _register_play_vars(host, result)
-
- # flag which notify handlers need to be run
- if len(task.notify) > 0:
- for host, results in results.get('contacted',{}).iteritems():
- if results.get('changed', False):
- for handler_name in task.notify:
- self._flag_handler(play, template(play.basedir, handler_name, task.module_vars), host)
-
- ansible.callbacks.set_task(self.callbacks, None)
- ansible.callbacks.set_task(self.runner_callbacks, None)
- return hosts_remaining
-
- # *****************************************************
-
- def _flag_handler(self, play, handler_name, host):
- '''
- if a task has any notify elements, flag handlers for run
- at end of execution cycle for hosts that have indicated
- changes have been made
- '''
-
- found = False
- for x in play.handlers():
- if handler_name == template(play.basedir, x.name, x.module_vars):
- found = True
- self.callbacks.on_notify(host, x.name)
- x.notified_by.append(host)
- if not found:
- raise errors.AnsibleError("change handler (%s) is not defined" % handler_name)
-
- # *****************************************************
-
- def _do_setup_step(self, play):
- ''' get facts from the remote system '''
-
- host_list = self._trim_unavailable_hosts(play._play_hosts)
-
- if play.gather_facts is None and C.DEFAULT_GATHERING == 'smart':
- host_list = [h for h in host_list if h not in self.SETUP_CACHE or 'module_setup' not in self.SETUP_CACHE[h]]
- if len(host_list) == 0:
- return {}
- elif play.gather_facts is False or (play.gather_facts is None and C.DEFAULT_GATHERING == 'explicit'):
- return {}
-
- self.callbacks.on_setup()
- self.inventory.restrict_to(host_list)
-
- ansible.callbacks.set_task(self.callbacks, None)
- ansible.callbacks.set_task(self.runner_callbacks, None)
-
- # push any variables down to the system
- setup_results = ansible.runner.Runner(
- basedir=self.basedir,
- pattern=play.hosts,
- module_name='setup',
- module_args={},
- inventory=self.inventory,
- forks=self.forks,
- module_path=self.module_path,
- timeout=self.timeout,
- remote_user=play.remote_user,
- remote_pass=self.remote_pass,
- remote_port=play.remote_port,
- private_key_file=self.private_key_file,
- setup_cache=self.SETUP_CACHE,
- vars_cache=self.VARS_CACHE,
- callbacks=self.runner_callbacks,
- become=play.become,
- become_method=play.become_method,
- become_user=play.become_user,
- become_pass=self.become_pass,
- vault_pass=self.vault_password,
- transport=play.transport,
- is_playbook=True,
- module_vars=play.vars,
- play_vars=play.vars,
- play_file_vars=play.vars_file_vars,
- role_vars=play.role_vars,
- default_vars=play.default_vars,
- check=self.check,
- diff=self.diff,
- accelerate=play.accelerate,
- accelerate_port=play.accelerate_port,
- ).run()
- self.stats.compute(setup_results, setup=True)
-
- self.inventory.lift_restriction()
-
- # now for each result, load into the setup cache so we can
- # let runner template out future commands
- setup_ok = setup_results.get('contacted', {})
- for (host, result) in setup_ok.iteritems():
- utils.update_hash(self.SETUP_CACHE, host, {'module_setup': True})
- utils.update_hash(self.SETUP_CACHE, host, result.get('ansible_facts', {}))
- return setup_results
-
- # *****************************************************
-
-
- def generate_retry_inventory(self, replay_hosts):
- '''
- called by /usr/bin/ansible when a playbook run fails. It generates an inventory
- that allows re-running on ONLY the failed hosts. This may duplicate some
- variable information in group_vars/host_vars but that is ok, and expected.
- '''
-
- buf = StringIO.StringIO()
- for x in replay_hosts:
- buf.write("%s\n" % x)
- basedir = C.shell_expand_path(C.RETRY_FILES_SAVE_PATH)
- filename = "%s.retry" % os.path.basename(self.filename)
- filename = filename.replace(".yml","")
- filename = os.path.join(basedir, filename)
-
- try:
- if not os.path.exists(basedir):
- os.makedirs(basedir)
-
- fd = open(filename, 'w')
- fd.write(buf.getvalue())
- fd.close()
- except:
- ansible.callbacks.display(
- "\nERROR: could not create retry file. Check the value of \n"
- + "the configuration variable 'retry_files_save_path' or set \n"
- + "'retry_files_enabled' to False to avoid this message.\n",
- color='red'
- )
- return None
-
- return filename
-
- # *****************************************************
- def tasks_to_run_in_play(self, play):
-
- tasks = []
-
- for task in play.tasks():
- # only run the task if the requested tags match or has 'always' tag
- u = set(['untagged'])
- task_set = set(task.tags)
-
- if 'always' in task.tags:
- should_run = True
- else:
- if 'all' in self.only_tags:
- should_run = True
- else:
- should_run = False
- if 'tagged' in self.only_tags:
- if task_set != u:
- should_run = True
- elif 'untagged' in self.only_tags:
- if task_set == u:
- should_run = True
- else:
- if task_set.intersection(self.only_tags):
- should_run = True
-
- # Check for tags that we need to skip
- if 'all' in self.skip_tags:
- should_run = False
- else:
- if 'tagged' in self.skip_tags:
- if task_set != u:
- should_run = False
- elif 'untagged' in self.skip_tags:
- if task_set == u:
- should_run = False
- else:
- if should_run:
- if task_set.intersection(self.skip_tags):
- should_run = False
-
- if should_run:
- tasks.append(task)
-
- return tasks
-
- # *****************************************************
- def _run_play(self, play):
- ''' run a list of tasks for a given pattern, in order '''
-
- self.callbacks.on_play_start(play.name)
- # Get the hosts for this play
- play._play_hosts = self.inventory.list_hosts(play.hosts)
- # if no hosts matches this play, drop out
- if not play._play_hosts:
- self.callbacks.on_no_hosts_matched()
- return True
-
- # get facts from system
- self._do_setup_step(play)
-
- # now with that data, handle contentional variable file imports!
- all_hosts = self._trim_unavailable_hosts(play._play_hosts)
- play.update_vars_files(all_hosts, vault_password=self.vault_password)
- hosts_count = len(all_hosts)
-
- if play.serial.endswith("%"):
-
- # This is a percentage, so calculate it based on the
- # number of hosts
- serial_pct = int(play.serial.replace("%",""))
- serial = int((serial_pct/100.0) * len(all_hosts))
-
- # Ensure that no matter how small the percentage, serial
- # can never fall below 1, so that things actually happen
- serial = max(serial, 1)
- else:
- serial = int(play.serial)
-
- serialized_batch = []
- if serial <= 0:
- serialized_batch = [all_hosts]
- else:
- # do N forks all the way through before moving to next
- while len(all_hosts) > 0:
- play_hosts = []
- for x in range(serial):
- if len(all_hosts) > 0:
- play_hosts.append(all_hosts.pop(0))
- serialized_batch.append(play_hosts)
-
- task_errors = False
- for on_hosts in serialized_batch:
-
- # restrict the play to just the hosts we have in our on_hosts block that are
- # available.
- play._play_hosts = self._trim_unavailable_hosts(on_hosts)
- self.inventory.also_restrict_to(on_hosts)
-
- for task in self.tasks_to_run_in_play(play):
-
- if task.meta is not None:
- # meta tasks can force handlers to run mid-play
- if task.meta == 'flush_handlers':
- self.run_handlers(play)
-
- # skip calling the handler till the play is finished
- continue
-
- if not self._run_task(play, task, False):
- # whether no hosts matched is fatal or not depends if it was on the initial step.
- # if we got exactly no hosts on the first step (setup!) then the host group
- # just didn't match anything and that's ok
- return False
-
- # Get a new list of what hosts are left as available, the ones that
- # did not go fail/dark during the task
- host_list = self._trim_unavailable_hosts(play._play_hosts)
-
- # Set max_fail_pct to 0, So if any hosts fails, bail out
- if task.any_errors_fatal and len(host_list) < hosts_count:
- play.max_fail_pct = 0
-
- # If threshold for max nodes failed is exceeded, bail out.
- if play.serial > 0:
- # if serial is set, we need to shorten the size of host_count
- play_count = len(play._play_hosts)
- if (play_count - len(host_list)) > int((play.max_fail_pct)/100.0 * play_count):
- host_list = None
- else:
- if (hosts_count - len(host_list)) > int((play.max_fail_pct)/100.0 * hosts_count):
- host_list = None
-
- # if no hosts remain, drop out
- if not host_list:
- if play.force_handlers:
- task_errors = True
- break
- else:
- self.callbacks.on_no_hosts_remaining()
- return False
-
- # lift restrictions after each play finishes
- self.inventory.lift_also_restriction()
-
- if task_errors and not play.force_handlers:
- # if there were failed tasks and handler execution
- # is not forced, quit the play with an error
- return False
- else:
- # no errors, go ahead and execute all handlers
- if not self.run_handlers(play):
- return False
-
- return True
-
-
- def run_handlers(self, play):
- on_hosts = play._play_hosts
- hosts_count = len(on_hosts)
- for task in play.tasks():
- if task.meta is not None:
-
- fired_names = {}
- for handler in play.handlers():
- if len(handler.notified_by) > 0:
- self.inventory.restrict_to(handler.notified_by)
-
- # Resolve the variables first
- handler_name = template(play.basedir, handler.name, handler.module_vars)
- if handler_name not in fired_names:
- self._run_task(play, handler, True)
- # prevent duplicate handler includes from running more than once
- fired_names[handler_name] = 1
-
- host_list = self._trim_unavailable_hosts(play._play_hosts)
- if handler.any_errors_fatal and len(host_list) < hosts_count:
- play.max_fail_pct = 0
- if (hosts_count - len(host_list)) > int((play.max_fail_pct)/100.0 * hosts_count):
- host_list = None
- if not host_list and not play.force_handlers:
- self.callbacks.on_no_hosts_remaining()
- return False
-
- self.inventory.lift_restriction()
- new_list = handler.notified_by[:]
- for host in handler.notified_by:
- if host in on_hosts:
- while host in new_list:
- new_list.remove(host)
- handler.notified_by = new_list
-
- continue
-
- return True
+ def get_plays(self):
+ return self._entries[:]
diff --git a/v2/ansible/playbook/attribute.py b/lib/ansible/playbook/attribute.py
similarity index 100%
rename from v2/ansible/playbook/attribute.py
rename to lib/ansible/playbook/attribute.py
diff --git a/v2/ansible/playbook/base.py b/lib/ansible/playbook/base.py
similarity index 100%
rename from v2/ansible/playbook/base.py
rename to lib/ansible/playbook/base.py
diff --git a/v2/ansible/playbook/become.py b/lib/ansible/playbook/become.py
similarity index 100%
rename from v2/ansible/playbook/become.py
rename to lib/ansible/playbook/become.py
diff --git a/v2/ansible/playbook/block.py b/lib/ansible/playbook/block.py
similarity index 100%
rename from v2/ansible/playbook/block.py
rename to lib/ansible/playbook/block.py
diff --git a/v2/ansible/playbook/conditional.py b/lib/ansible/playbook/conditional.py
similarity index 100%
rename from v2/ansible/playbook/conditional.py
rename to lib/ansible/playbook/conditional.py
diff --git a/v2/ansible/playbook/handler.py b/lib/ansible/playbook/handler.py
similarity index 100%
rename from v2/ansible/playbook/handler.py
rename to lib/ansible/playbook/handler.py
diff --git a/v2/ansible/playbook/helpers.py b/lib/ansible/playbook/helpers.py
similarity index 100%
rename from v2/ansible/playbook/helpers.py
rename to lib/ansible/playbook/helpers.py
diff --git a/lib/ansible/playbook/play.py b/lib/ansible/playbook/play.py
index 6ee85e0bf4..b99c01fdf7 100644
--- a/lib/ansible/playbook/play.py
+++ b/lib/ansible/playbook/play.py
@@ -15,935 +15,249 @@
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see .
-#############################################
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
-from ansible.utils.template import template
-from ansible import utils
-from ansible import errors
-from ansible.playbook.task import Task
-from ansible.module_utils.splitter import split_args, unquote
-import ansible.constants as C
-import pipes
-import shlex
-import os
-import sys
-import uuid
+from ansible.errors import AnsibleError, AnsibleParserError
+
+from ansible.playbook.attribute import Attribute, FieldAttribute
+from ansible.playbook.base import Base
+from ansible.playbook.become import Become
+from ansible.playbook.helpers import load_list_of_blocks, load_list_of_roles
+from ansible.playbook.role import Role
+from ansible.playbook.taggable import Taggable
+from ansible.playbook.block import Block
+
+from ansible.utils.vars import combine_vars
-class Play(object):
+__all__ = ['Play']
- _pb_common = [
- 'accelerate', 'accelerate_ipv6', 'accelerate_port', 'any_errors_fatal', 'become',
- 'become_method', 'become_user', 'environment', 'force_handlers', 'gather_facts',
- 'handlers', 'hosts', 'name', 'no_log', 'remote_user', 'roles', 'serial', 'su',
- 'su_user', 'sudo', 'sudo_user', 'tags', 'vars', 'vars_files', 'vars_prompt',
- 'vault_password',
- ]
- __slots__ = _pb_common + [
- '_ds', '_handlers', '_play_hosts', '_tasks', 'any_errors_fatal', 'basedir',
- 'default_vars', 'included_roles', 'max_fail_pct', 'playbook', 'remote_port',
- 'role_vars', 'transport', 'vars_file_vars',
- ]
+class Play(Base, Taggable, Become):
- # to catch typos and so forth -- these are userland names
- # and don't line up 1:1 with how they are stored
- VALID_KEYS = frozenset(_pb_common + [
- 'connection', 'include', 'max_fail_percentage', 'port', 'post_tasks',
- 'pre_tasks', 'role_names', 'tasks', 'user',
- ])
+ """
+ A play is a language feature that represents a list of roles and/or
+ task/handler blocks to execute on a given set of hosts.
- # *************************************************
+ Usage:
- def __init__(self, playbook, ds, basedir, vault_password=None):
- ''' constructor loads from a play datastructure '''
+ Play.load(datastructure) -> Play
+ Play.something(...)
+ """
- for x in ds.keys():
- if not x in Play.VALID_KEYS:
- raise errors.AnsibleError("%s is not a legal parameter of an Ansible Play" % x)
+ # =================================================================================
+ # Connection-Related Attributes
- # allow all playbook keys to be set by --extra-vars
- self.vars = ds.get('vars', {})
- self.vars_prompt = ds.get('vars_prompt', {})
- self.playbook = playbook
- self.vars = self._get_vars()
- self.vars_file_vars = dict() # these are vars read in from vars_files:
- self.role_vars = dict() # these are vars read in from vars/main.yml files in roles
- self.basedir = basedir
- self.roles = ds.get('roles', None)
- self.tags = ds.get('tags', None)
- self.vault_password = vault_password
- self.environment = ds.get('environment', {})
+ # TODO: generalize connection
+ _accelerate = FieldAttribute(isa='bool', default=False)
+ _accelerate_ipv6 = FieldAttribute(isa='bool', default=False)
+ _accelerate_port = FieldAttribute(isa='int', default=5099) # should be alias of port
- if self.tags is None:
- self.tags = []
- elif type(self.tags) in [ str, unicode ]:
- self.tags = self.tags.split(",")
- elif type(self.tags) != list:
- self.tags = []
+ # Connection
+ _gather_facts = FieldAttribute(isa='string', default='smart')
+ _hosts = FieldAttribute(isa='list', default=[], required=True)
+ _name = FieldAttribute(isa='string', default='')
- # make sure we have some special internal variables set, which
- # we use later when loading tasks and handlers
- load_vars = dict()
- load_vars['playbook_dir'] = os.path.abspath(self.basedir)
- if self.playbook.inventory.basedir() is not None:
- load_vars['inventory_dir'] = self.playbook.inventory.basedir()
- if self.playbook.inventory.src() is not None:
- load_vars['inventory_file'] = self.playbook.inventory.src()
+ # Variable Attributes
+ _vars_files = FieldAttribute(isa='list', default=[])
+ _vars_prompt = FieldAttribute(isa='dict', default=dict())
+ _vault_password = FieldAttribute(isa='string')
- # We first load the vars files from the datastructure
- # so we have the default variables to pass into the roles
- self.vars_files = ds.get('vars_files', [])
- if not isinstance(self.vars_files, list):
- raise errors.AnsibleError('vars_files must be a list')
- processed_vars_files = self._update_vars_files_for_host(None)
+ # Block (Task) Lists Attributes
+ _handlers = FieldAttribute(isa='list', default=[])
+ _pre_tasks = FieldAttribute(isa='list', default=[])
+ _post_tasks = FieldAttribute(isa='list', default=[])
+ _tasks = FieldAttribute(isa='list', default=[])
- # now we load the roles into the datastructure
- self.included_roles = []
- ds = self._load_roles(self.roles, ds)
+ # Role Attributes
+ _roles = FieldAttribute(isa='list', default=[])
- # and finally re-process the vars files as they may have been updated
- # by the included roles, but exclude any which have been processed
- self.vars_files = utils.list_difference(ds.get('vars_files', []), processed_vars_files)
- if not isinstance(self.vars_files, list):
- raise errors.AnsibleError('vars_files must be a list')
+ # Flag/Setting Attributes
+ _any_errors_fatal = FieldAttribute(isa='bool', default=False)
+ _max_fail_percentage = FieldAttribute(isa='string', default='0')
+ _serial = FieldAttribute(isa='int', default=0)
+ _strategy = FieldAttribute(isa='string', default='linear')
- self._update_vars_files_for_host(None)
+ # =================================================================================
- # template everything to be efficient, but do not pre-mature template
- # tasks/handlers as they may have inventory scope overrides. We also
- # create a set of temporary variables for templating, so we don't
- # trample on the existing vars structures
- _tasks = ds.pop('tasks', [])
- _handlers = ds.pop('handlers', [])
+ def __init__(self):
+ super(Play, self).__init__()
- temp_vars = utils.combine_vars(self.vars, self.vars_file_vars)
- temp_vars = utils.combine_vars(temp_vars, self.playbook.extra_vars)
+ def __repr__(self):
+ return self.get_name()
+
+ def get_name(self):
+ ''' return the name of the Play '''
+ return "PLAY: %s" % self._attributes.get('name')
+
+ @staticmethod
+ def load(data, variable_manager=None, loader=None):
+ p = Play()
+ return p.load_data(data, variable_manager=variable_manager, loader=loader)
+
+ def preprocess_data(self, ds):
+ '''
+ Adjusts play datastructure to cleanup old/legacy items
+ '''
+
+ assert isinstance(ds, dict)
+
+ # The use of 'user' in the Play datastructure was deprecated to
+ # line up with the same change for Tasks, due to the fact that
+ # 'user' conflicted with the user module.
+ if 'user' in ds:
+ # this should never happen, but error out with a helpful message
+ # to the user if it does...
+ if 'remote_user' in ds:
+ raise AnsibleParserError("both 'user' and 'remote_user' are set for %s. The use of 'user' is deprecated, and should be removed" % self.get_name(), obj=ds)
+
+ ds['remote_user'] = ds['user']
+ del ds['user']
+
+ return super(Play, self).preprocess_data(ds)
+
+ def _load_vars(self, attr, ds):
+ '''
+ Vars in a play can be specified either as a dictionary directly, or
+ as a list of dictionaries. If the later, this method will turn the
+ list into a single dictionary.
+ '''
try:
- ds = template(basedir, ds, temp_vars)
- except errors.AnsibleError, e:
- utils.warning("non fatal error while trying to template play variables: %s" % (str(e)))
+ if isinstance(ds, dict):
+ return ds
+ elif isinstance(ds, list):
+ all_vars = dict()
+ for item in ds:
+ if not isinstance(item, dict):
+ raise ValueError
+ all_vars = combine_vars(all_vars, item)
+ return all_vars
+ else:
+ raise ValueError
+ except ValueError:
+ raise AnsibleParserError("Vars in a playbook must be specified as a dictionary, or a list of dictionaries", obj=ds)
- ds['tasks'] = _tasks
- ds['handlers'] = _handlers
+ def _load_tasks(self, attr, ds):
+ '''
+ Loads a list of blocks from a list which may be mixed tasks/blocks.
+ Bare tasks outside of a block are given an implicit block.
+ '''
+ return load_list_of_blocks(ds=ds, play=self, variable_manager=self._variable_manager, loader=self._loader)
- self._ds = ds
+ def _load_pre_tasks(self, attr, ds):
+ '''
+ Loads a list of blocks from a list which may be mixed tasks/blocks.
+ Bare tasks outside of a block are given an implicit block.
+ '''
+ return load_list_of_blocks(ds=ds, play=self, variable_manager=self._variable_manager, loader=self._loader)
- hosts = ds.get('hosts')
- if hosts is None:
- raise errors.AnsibleError('hosts declaration is required')
- elif isinstance(hosts, list):
- try:
- hosts = ';'.join(hosts)
- except TypeError,e:
- raise errors.AnsibleError('improper host declaration: %s' % str(e))
+ def _load_post_tasks(self, attr, ds):
+ '''
+ Loads a list of blocks from a list which may be mixed tasks/blocks.
+ Bare tasks outside of a block are given an implicit block.
+ '''
+ return load_list_of_blocks(ds=ds, play=self, variable_manager=self._variable_manager, loader=self._loader)
- self.serial = str(ds.get('serial', 0))
- self.hosts = hosts
- self.name = ds.get('name', self.hosts)
- self._tasks = ds.get('tasks', [])
- self._handlers = ds.get('handlers', [])
- self.remote_user = ds.get('remote_user', ds.get('user', self.playbook.remote_user))
- self.remote_port = ds.get('port', self.playbook.remote_port)
- self.transport = ds.get('connection', self.playbook.transport)
- self.remote_port = self.remote_port
- self.any_errors_fatal = utils.boolean(ds.get('any_errors_fatal', 'false'))
- self.accelerate = utils.boolean(ds.get('accelerate', 'false'))
- self.accelerate_port = ds.get('accelerate_port', None)
- self.accelerate_ipv6 = ds.get('accelerate_ipv6', False)
- self.max_fail_pct = int(ds.get('max_fail_percentage', 100))
- self.no_log = utils.boolean(ds.get('no_log', 'false'))
- self.force_handlers = utils.boolean(ds.get('force_handlers', self.playbook.force_handlers))
+ def _load_handlers(self, attr, ds):
+ '''
+ Loads a list of blocks from a list which may be mixed handlers/blocks.
+ Bare handlers outside of a block are given an implicit block.
+ '''
+ return load_list_of_blocks(ds=ds, play=self, use_handlers=True, variable_manager=self._variable_manager, loader=self._loader)
- # Fail out if user specifies conflicting privilege escalations
- if (ds.get('become') or ds.get('become_user')) and (ds.get('sudo') or ds.get('sudo_user')):
- raise errors.AnsibleError('sudo params ("become", "become_user") and su params ("sudo", "sudo_user") cannot be used together')
- if (ds.get('become') or ds.get('become_user')) and (ds.get('su') or ds.get('su_user')):
- raise errors.AnsibleError('sudo params ("become", "become_user") and su params ("su", "su_user") cannot be used together')
- if (ds.get('sudo') or ds.get('sudo_user')) and (ds.get('su') or ds.get('su_user')):
- raise errors.AnsibleError('sudo params ("sudo", "sudo_user") and su params ("su", "su_user") cannot be used together')
+ def _load_roles(self, attr, ds):
+ '''
+ Loads and returns a list of RoleInclude objects from the datastructure
+ list of role definitions and creates the Role from those objects
+ '''
- # become settings are inherited and updated normally
- self.become = ds.get('become', self.playbook.become)
- self.become_method = ds.get('become_method', self.playbook.become_method)
- self.become_user = ds.get('become_user', self.playbook.become_user)
+ role_includes = load_list_of_roles(ds, variable_manager=self._variable_manager, loader=self._loader)
- # Make sure current play settings are reflected in become fields
- if 'sudo' in ds:
- self.become=ds['sudo']
- self.become_method='sudo'
- if 'sudo_user' in ds:
- self.become_user=ds['sudo_user']
- elif 'su' in ds:
- self.become=True
- self.become=ds['su']
- self.become_method='su'
- if 'su_user' in ds:
- self.become_user=ds['su_user']
+ roles = []
+ for ri in role_includes:
+ roles.append(Role.load(ri))
+ return roles
- # gather_facts is not a simple boolean, as None means that a 'smart'
- # fact gathering mode will be used, so we need to be careful here as
- # calling utils.boolean(None) returns False
- self.gather_facts = ds.get('gather_facts', None)
- if self.gather_facts is not None:
- self.gather_facts = utils.boolean(self.gather_facts)
+ # FIXME: post_validation needs to ensure that become/su/sudo have only 1 set
- load_vars['role_names'] = ds.get('role_names', [])
+ def _compile_roles(self):
+ '''
+ Handles the role compilation step, returning a flat list of tasks
+ with the lowest level dependencies first. For example, if a role R
+ has a dependency D1, which also has a dependency D2, the tasks from
+ D2 are merged first, followed by D1, and lastly by the tasks from
+ the parent role R last. This is done for all roles in the Play.
+ '''
- self._tasks = self._load_tasks(self._ds.get('tasks', []), load_vars)
- self._handlers = self._load_tasks(self._ds.get('handlers', []), load_vars)
+ block_list = []
- # apply any missing tags to role tasks
- self._late_merge_role_tags()
+ if len(self.roles) > 0:
+ for r in self.roles:
+ block_list.extend(r.compile(play=self))
- # place holder for the discovered hosts to be used in this play
- self._play_hosts = None
+ return block_list
- # *************************************************
+ def compile(self):
+ '''
+ Compiles and returns the task list for this play, compiled from the
+ roles (which are themselves compiled recursively) and/or the list of
+ tasks specified in the play.
+ '''
- def _get_role_path(self, role):
- """
- Returns the path on disk to the directory containing
- the role directories like tasks, templates, etc. Also
- returns any variables that were included with the role
- """
- orig_path = template(self.basedir,role,self.vars)
+ block_list = []
- role_vars = {}
- if type(orig_path) == dict:
- # what, not a path?
- role_name = orig_path.get('role', None)
- if role_name is None:
- raise errors.AnsibleError("expected a role name in dictionary: %s" % orig_path)
- role_vars = orig_path
- else:
- role_name = utils.role_spec_parse(orig_path)["name"]
+ block_list.extend(self.pre_tasks)
+ block_list.extend(self._compile_roles())
+ block_list.extend(self.tasks)
+ block_list.extend(self.post_tasks)
- role_path = None
+ return block_list
- possible_paths = [
- utils.path_dwim(self.basedir, os.path.join('roles', role_name)),
- utils.path_dwim(self.basedir, role_name)
- ]
+ def get_vars(self):
+ return self.vars.copy()
- if C.DEFAULT_ROLES_PATH:
- search_locations = C.DEFAULT_ROLES_PATH.split(os.pathsep)
- for loc in search_locations:
- loc = os.path.expanduser(loc)
- possible_paths.append(utils.path_dwim(loc, role_name))
+ def get_vars_files(self):
+ return self.vars_files
- for path_option in possible_paths:
- if os.path.isdir(path_option):
- role_path = path_option
- break
+ def get_handlers(self):
+ return self.handlers[:]
- if role_path is None:
- raise errors.AnsibleError("cannot find role in %s" % " or ".join(possible_paths))
+ def get_roles(self):
+ return self.roles[:]
- return (role_path, role_vars)
+ def get_tasks(self):
+ tasklist = []
+ for task in self.pre_tasks + self.tasks + self.post_tasks:
+ if isinstance(task, Block):
+ tasklist.append(task.block + task.rescue + task.always)
+ else:
+ tasklist.append(task)
+ return tasklist
- def _build_role_dependencies(self, roles, dep_stack, passed_vars={}, level=0):
- # this number is arbitrary, but it seems sane
- if level > 20:
- raise errors.AnsibleError("too many levels of recursion while resolving role dependencies")
- for role in roles:
- role_path,role_vars = self._get_role_path(role)
+ def serialize(self):
+ data = super(Play, self).serialize()
- # save just the role params for this role, which exclude the special
- # keywords 'role', 'tags', and 'when'.
- role_params = role_vars.copy()
- for item in ('role', 'tags', 'when'):
- if item in role_params:
- del role_params[item]
+ roles = []
+ for role in self.get_roles():
+ roles.append(role.serialize())
+ data['roles'] = roles
- role_vars = utils.combine_vars(passed_vars, role_vars)
+ return data
- vars = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(role_path, 'vars')))
- vars_data = {}
- if os.path.isfile(vars):
- vars_data = utils.parse_yaml_from_file(vars, vault_password=self.vault_password)
- if vars_data:
- if not isinstance(vars_data, dict):
- raise errors.AnsibleError("vars from '%s' are not a dict" % vars)
- role_vars = utils.combine_vars(vars_data, role_vars)
+ def deserialize(self, data):
+ super(Play, self).deserialize(data)
- defaults = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(role_path, 'defaults')))
- defaults_data = {}
- if os.path.isfile(defaults):
- defaults_data = utils.parse_yaml_from_file(defaults, vault_password=self.vault_password)
-
- # the meta directory contains the yaml that should
- # hold the list of dependencies (if any)
- meta = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(role_path, 'meta')))
- if os.path.isfile(meta):
- data = utils.parse_yaml_from_file(meta, vault_password=self.vault_password)
- if data:
- dependencies = data.get('dependencies',[])
- if dependencies is None:
- dependencies = []
- for dep in dependencies:
- allow_dupes = False
- (dep_path,dep_vars) = self._get_role_path(dep)
-
- # save the dep params, just as we did above
- dep_params = dep_vars.copy()
- for item in ('role', 'tags', 'when'):
- if item in dep_params:
- del dep_params[item]
-
- meta = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(dep_path, 'meta')))
- if os.path.isfile(meta):
- meta_data = utils.parse_yaml_from_file(meta, vault_password=self.vault_password)
- if meta_data:
- allow_dupes = utils.boolean(meta_data.get('allow_duplicates',''))
-
- # if any tags were specified as role/dep variables, merge
- # them into the current dep_vars so they're passed on to any
- # further dependencies too, and so we only have one place
- # (dep_vars) to look for tags going forward
- def __merge_tags(var_obj):
- old_tags = dep_vars.get('tags', [])
- if isinstance(old_tags, basestring):
- old_tags = [old_tags, ]
- if isinstance(var_obj, dict):
- new_tags = var_obj.get('tags', [])
- if isinstance(new_tags, basestring):
- new_tags = [new_tags, ]
- else:
- new_tags = []
- return list(set(old_tags).union(set(new_tags)))
-
- dep_vars['tags'] = __merge_tags(role_vars)
- dep_vars['tags'] = __merge_tags(passed_vars)
-
- # if tags are set from this role, merge them
- # into the tags list for the dependent role
- if "tags" in passed_vars:
- for included_role_dep in dep_stack:
- included_dep_name = included_role_dep[0]
- included_dep_vars = included_role_dep[2]
- if included_dep_name == dep:
- if "tags" in included_dep_vars:
- included_dep_vars["tags"] = list(set(included_dep_vars["tags"]).union(set(passed_vars["tags"])))
- else:
- included_dep_vars["tags"] = passed_vars["tags"][:]
-
- dep_vars = utils.combine_vars(passed_vars, dep_vars)
- dep_vars = utils.combine_vars(role_vars, dep_vars)
-
- vars = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(dep_path, 'vars')))
- vars_data = {}
- if os.path.isfile(vars):
- vars_data = utils.parse_yaml_from_file(vars, vault_password=self.vault_password)
- if vars_data:
- dep_vars = utils.combine_vars(dep_vars, vars_data)
- pass
-
- defaults = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(dep_path, 'defaults')))
- dep_defaults_data = {}
- if os.path.isfile(defaults):
- dep_defaults_data = utils.parse_yaml_from_file(defaults, vault_password=self.vault_password)
- if 'role' in dep_vars:
- del dep_vars['role']
-
- if not allow_dupes:
- if dep in self.included_roles:
- # skip back to the top, since we don't want to
- # do anything else with this role
- continue
- else:
- self.included_roles.append(dep)
-
- def _merge_conditional(cur_conditionals, new_conditionals):
- if isinstance(new_conditionals, (basestring, bool)):
- cur_conditionals.append(new_conditionals)
- elif isinstance(new_conditionals, list):
- cur_conditionals.extend(new_conditionals)
-
- # pass along conditionals from roles to dep roles
- passed_when = passed_vars.get('when')
- role_when = role_vars.get('when')
- dep_when = dep_vars.get('when')
-
- tmpcond = []
- _merge_conditional(tmpcond, passed_when)
- _merge_conditional(tmpcond, role_when)
- _merge_conditional(tmpcond, dep_when)
-
- if len(tmpcond) > 0:
- dep_vars['when'] = tmpcond
-
- self._build_role_dependencies([dep], dep_stack, passed_vars=dep_vars, level=level+1)
- dep_stack.append([dep, dep_path, dep_vars, dep_params, dep_defaults_data])
-
- # only add the current role when we're at the top level,
- # otherwise we'll end up in a recursive loop
- if level == 0:
- self.included_roles.append(role)
- dep_stack.append([role, role_path, role_vars, role_params, defaults_data])
- return dep_stack
-
- def _load_role_vars_files(self, vars_files):
- # process variables stored in vars/main.yml files
- role_vars = {}
- for filename in vars_files:
- if os.path.exists(filename):
- new_vars = utils.parse_yaml_from_file(filename, vault_password=self.vault_password)
- if new_vars:
- if type(new_vars) != dict:
- raise errors.AnsibleError("%s must be stored as dictionary/hash: %s" % (filename, type(new_vars)))
- role_vars = utils.combine_vars(role_vars, new_vars)
-
- return role_vars
-
- def _load_role_defaults(self, defaults_files):
- # process default variables
- default_vars = {}
- for filename in defaults_files:
- if os.path.exists(filename):
- new_default_vars = utils.parse_yaml_from_file(filename, vault_password=self.vault_password)
- if new_default_vars:
- if type(new_default_vars) != dict:
- raise errors.AnsibleError("%s must be stored as dictionary/hash: %s" % (filename, type(new_default_vars)))
- default_vars = utils.combine_vars(default_vars, new_default_vars)
-
- return default_vars
-
- def _load_roles(self, roles, ds):
- # a role is a name that auto-includes the following if they exist
- # /tasks/main.yml
- # /handlers/main.yml
- # /vars/main.yml
- # /library
- # and it auto-extends tasks/handlers/vars_files/module paths as appropriate if found
-
- if roles is None:
+ if 'roles' in data:
+ role_data = data.get('roles', [])
roles = []
- if type(roles) != list:
- raise errors.AnsibleError("value of 'roles:' must be a list")
+ for role in role_data:
+ r = Role()
+ r.deserialize(role)
+ roles.append(r)
- new_tasks = []
- new_handlers = []
- role_vars_files = []
- defaults_files = []
+ setattr(self, 'roles', roles)
+ del data['roles']
- pre_tasks = ds.get('pre_tasks', None)
- if type(pre_tasks) != list:
- pre_tasks = []
- for x in pre_tasks:
- new_tasks.append(x)
-
- # flush handlers after pre_tasks
- new_tasks.append(dict(meta='flush_handlers'))
-
- roles = self._build_role_dependencies(roles, [], {})
-
- # give each role an uuid and
- # make role_path available as variable to the task
- for idx, val in enumerate(roles):
- this_uuid = str(uuid.uuid4())
- roles[idx][-3]['role_uuid'] = this_uuid
- roles[idx][-3]['role_path'] = roles[idx][1]
-
- role_names = []
-
- for (role, role_path, role_vars, role_params, default_vars) in roles:
- # special vars must be extracted from the dict to the included tasks
- special_keys = [ "sudo", "sudo_user", "when", "with_items", "su", "su_user", "become", "become_user" ]
- special_vars = {}
- for k in special_keys:
- if k in role_vars:
- special_vars[k] = role_vars[k]
-
- task_basepath = utils.path_dwim(self.basedir, os.path.join(role_path, 'tasks'))
- handler_basepath = utils.path_dwim(self.basedir, os.path.join(role_path, 'handlers'))
- vars_basepath = utils.path_dwim(self.basedir, os.path.join(role_path, 'vars'))
- meta_basepath = utils.path_dwim(self.basedir, os.path.join(role_path, 'meta'))
- defaults_basepath = utils.path_dwim(self.basedir, os.path.join(role_path, 'defaults'))
-
- task = self._resolve_main(task_basepath)
- handler = self._resolve_main(handler_basepath)
- vars_file = self._resolve_main(vars_basepath)
- meta_file = self._resolve_main(meta_basepath)
- defaults_file = self._resolve_main(defaults_basepath)
-
- library = utils.path_dwim(self.basedir, os.path.join(role_path, 'library'))
-
- missing = lambda f: not os.path.isfile(f)
- if missing(task) and missing(handler) and missing(vars_file) and missing(defaults_file) and missing(meta_file) and not os.path.isdir(library):
- raise errors.AnsibleError("found role at %s, but cannot find %s or %s or %s or %s or %s or %s" % (role_path, task, handler, vars_file, defaults_file, meta_file, library))
-
- if isinstance(role, dict):
- role_name = role['role']
- else:
- role_name = utils.role_spec_parse(role)["name"]
-
- role_names.append(role_name)
- if os.path.isfile(task):
- nt = dict(include=pipes.quote(task), vars=role_vars, role_params=role_params, default_vars=default_vars, role_name=role_name)
- for k in special_keys:
- if k in special_vars:
- nt[k] = special_vars[k]
- new_tasks.append(nt)
- if os.path.isfile(handler):
- nt = dict(include=pipes.quote(handler), vars=role_vars, role_params=role_params, role_name=role_name)
- for k in special_keys:
- if k in special_vars:
- nt[k] = special_vars[k]
- new_handlers.append(nt)
- if os.path.isfile(vars_file):
- role_vars_files.append(vars_file)
- if os.path.isfile(defaults_file):
- defaults_files.append(defaults_file)
- if os.path.isdir(library):
- utils.plugins.module_finder.add_directory(library)
-
- tasks = ds.get('tasks', None)
- post_tasks = ds.get('post_tasks', None)
- handlers = ds.get('handlers', None)
- vars_files = ds.get('vars_files', None)
-
- if type(tasks) != list:
- tasks = []
- if type(handlers) != list:
- handlers = []
- if type(vars_files) != list:
- vars_files = []
- if type(post_tasks) != list:
- post_tasks = []
-
- new_tasks.extend(tasks)
- # flush handlers after tasks + role tasks
- new_tasks.append(dict(meta='flush_handlers'))
- new_tasks.extend(post_tasks)
- # flush handlers after post tasks
- new_tasks.append(dict(meta='flush_handlers'))
-
- new_handlers.extend(handlers)
-
- ds['tasks'] = new_tasks
- ds['handlers'] = new_handlers
- ds['role_names'] = role_names
-
- self.role_vars = self._load_role_vars_files(role_vars_files)
- self.default_vars = self._load_role_defaults(defaults_files)
-
- return ds
-
- # *************************************************
-
- def _resolve_main(self, basepath):
- ''' flexibly handle variations in main filenames '''
- # these filenames are acceptable:
- mains = (
- os.path.join(basepath, 'main'),
- os.path.join(basepath, 'main.yml'),
- os.path.join(basepath, 'main.yaml'),
- os.path.join(basepath, 'main.json'),
- )
- if sum([os.path.isfile(x) for x in mains]) > 1:
- raise errors.AnsibleError("found multiple main files at %s, only one allowed" % (basepath))
- else:
- for m in mains:
- if os.path.isfile(m):
- return m # exactly one main file
- return mains[0] # zero mains (we still need to return something)
-
- # *************************************************
-
- def _load_tasks(self, tasks, vars=None, role_params=None, default_vars=None, become_vars=None,
- additional_conditions=None, original_file=None, role_name=None):
- ''' handle task and handler include statements '''
-
- results = []
- if tasks is None:
- # support empty handler files, and the like.
- tasks = []
- if additional_conditions is None:
- additional_conditions = []
- if vars is None:
- vars = {}
- if role_params is None:
- role_params = {}
- if default_vars is None:
- default_vars = {}
- if become_vars is None:
- become_vars = {}
-
- old_conditions = list(additional_conditions)
-
- for x in tasks:
-
- # prevent assigning the same conditions to each task on an include
- included_additional_conditions = list(old_conditions)
-
- if not isinstance(x, dict):
- raise errors.AnsibleError("expecting dict; got: %s, error in %s" % (x, original_file))
-
- # evaluate privilege escalation vars for current and child tasks
- included_become_vars = {}
- for k in ["become", "become_user", "become_method", "become_exe", "sudo", "su", "sudo_user", "su_user"]:
- if k in x:
- included_become_vars[k] = x[k]
- elif k in become_vars:
- included_become_vars[k] = become_vars[k]
- x[k] = become_vars[k]
-
- task_vars = vars.copy()
- if original_file:
- task_vars['_original_file'] = original_file
-
- if 'meta' in x:
- if x['meta'] == 'flush_handlers':
- if role_name and 'role_name' not in x:
- x['role_name'] = role_name
- results.append(Task(self, x, module_vars=task_vars, role_name=role_name))
- continue
-
- if 'include' in x:
- tokens = split_args(str(x['include']))
- included_additional_conditions = list(additional_conditions)
- include_vars = {}
- for k in x:
- if k.startswith("with_"):
- if original_file:
- offender = " (in %s)" % original_file
- else:
- offender = ""
- utils.deprecated("include + with_items is a removed deprecated feature" + offender, "1.5", removed=True)
- elif k.startswith("when_"):
- utils.deprecated("\"when_:\" is a removed deprecated feature, use the simplified 'when:' conditional directly", None, removed=True)
- elif k == 'when':
- if isinstance(x[k], (basestring, bool)):
- included_additional_conditions.append(x[k])
- elif type(x[k]) is list:
- included_additional_conditions.extend(x[k])
- elif k in ("include", "vars", "role_params", "default_vars", "sudo", "sudo_user", "role_name", "no_log", "become", "become_user", "su", "su_user"):
- continue
- else:
- include_vars[k] = x[k]
-
- # get any role parameters specified
- role_params = x.get('role_params', {})
-
- # get any role default variables specified
- default_vars = x.get('default_vars', {})
- if not default_vars:
- default_vars = self.default_vars
- else:
- default_vars = utils.combine_vars(self.default_vars, default_vars)
-
- # append the vars defined with the include (from above)
- # as well as the old-style 'vars' element. The old-style
- # vars are given higher precedence here (just in case)
- task_vars = utils.combine_vars(task_vars, include_vars)
- if 'vars' in x:
- task_vars = utils.combine_vars(task_vars, x['vars'])
-
- new_role = None
- if 'role_name' in x:
- new_role = x['role_name']
-
- mv = task_vars.copy()
- for t in tokens[1:]:
- (k,v) = t.split("=", 1)
- v = unquote(v)
- mv[k] = template(self.basedir, v, mv)
- dirname = self.basedir
- if original_file:
- dirname = os.path.dirname(original_file)
-
- # temp vars are used here to avoid trampling on the existing vars structures
- temp_vars = utils.combine_vars(self.vars, self.vars_file_vars)
- temp_vars = utils.combine_vars(temp_vars, mv)
- temp_vars = utils.combine_vars(temp_vars, self.playbook.extra_vars)
- include_file = template(dirname, tokens[0], temp_vars)
- include_filename = utils.path_dwim(dirname, include_file)
-
- data = utils.parse_yaml_from_file(include_filename, vault_password=self.vault_password)
- if 'role_name' in x and data is not None:
- for y in data:
- if isinstance(y, dict) and 'include' in y:
- y['role_name'] = new_role
- loaded = self._load_tasks(data, mv, role_params, default_vars, included_become_vars, list(included_additional_conditions), original_file=include_filename, role_name=new_role)
- results += loaded
- elif type(x) == dict:
- task = Task(
- self, x,
- module_vars=task_vars,
- play_vars=self.vars,
- play_file_vars=self.vars_file_vars,
- role_vars=self.role_vars,
- role_params=role_params,
- default_vars=default_vars,
- additional_conditions=list(additional_conditions),
- role_name=role_name
- )
- results.append(task)
- else:
- raise Exception("unexpected task type")
-
- for x in results:
- if self.tags is not None:
- x.tags.extend(self.tags)
-
- return results
-
- # *************************************************
-
- def tasks(self):
- ''' return task objects for this play '''
- return self._tasks
-
- def handlers(self):
- ''' return handler objects for this play '''
- return self._handlers
-
- # *************************************************
-
- def _get_vars(self):
- ''' load the vars section from a play, accounting for all sorts of variable features
- including loading from yaml files, prompting, and conditional includes of the first
- file found in a list. '''
-
- if self.vars is None:
- self.vars = {}
-
- if type(self.vars) not in [dict, list]:
- raise errors.AnsibleError("'vars' section must contain only key/value pairs")
-
- vars = {}
-
- # translate a list of vars into a dict
- if type(self.vars) == list:
- for item in self.vars:
- if getattr(item, 'items', None) is None:
- raise errors.AnsibleError("expecting a key-value pair in 'vars' section")
- k, v = item.items()[0]
- vars[k] = v
- else:
- vars.update(self.vars)
-
- if type(self.vars_prompt) == list:
- for var in self.vars_prompt:
- if not 'name' in var:
- raise errors.AnsibleError("'vars_prompt' item is missing 'name:'")
-
- vname = var['name']
- prompt = var.get("prompt", vname)
- default = var.get("default", None)
- private = var.get("private", True)
-
- confirm = var.get("confirm", False)
- encrypt = var.get("encrypt", None)
- salt_size = var.get("salt_size", None)
- salt = var.get("salt", None)
-
- if vname not in self.playbook.extra_vars:
- vars[vname] = self.playbook.callbacks.on_vars_prompt(
- vname, private, prompt, encrypt, confirm, salt_size, salt, default
- )
-
- elif type(self.vars_prompt) == dict:
- for (vname, prompt) in self.vars_prompt.iteritems():
- prompt_msg = "%s: " % prompt
- if vname not in self.playbook.extra_vars:
- vars[vname] = self.playbook.callbacks.on_vars_prompt(
- varname=vname, private=False, prompt=prompt_msg, default=None
- )
-
- else:
- raise errors.AnsibleError("'vars_prompt' section is malformed, see docs")
-
- if type(self.playbook.extra_vars) == dict:
- vars = utils.combine_vars(vars, self.playbook.extra_vars)
-
- return vars
-
- # *************************************************
-
- def update_vars_files(self, hosts, vault_password=None):
- ''' calculate vars_files, which requires that setup runs first so ansible facts can be mixed in '''
-
- # now loop through all the hosts...
- for h in hosts:
- self._update_vars_files_for_host(h, vault_password=vault_password)
-
- # *************************************************
-
- def compare_tags(self, tags):
- ''' given a list of tags that the user has specified, return two lists:
- matched_tags: tags were found within the current play and match those given
- by the user
- unmatched_tags: tags that were found within the current play but do not match
- any provided by the user '''
-
- # gather all the tags in all the tasks and handlers into one list
- # FIXME: isn't this in self.tags already?
-
- all_tags = []
- for task in self._tasks:
- if not task.meta:
- all_tags.extend(task.tags)
- for handler in self._handlers:
- all_tags.extend(handler.tags)
-
- # compare the lists of tags using sets and return the matched and unmatched
- all_tags_set = set(all_tags)
- tags_set = set(tags)
-
- matched_tags = all_tags_set.intersection(tags_set)
- unmatched_tags = all_tags_set.difference(tags_set)
-
- a = set(['always'])
- u = set(['untagged'])
- if 'always' in all_tags_set:
- matched_tags = matched_tags.union(a)
- unmatched_tags = all_tags_set.difference(a)
-
- if 'all' in tags_set:
- matched_tags = matched_tags.union(all_tags_set)
- unmatched_tags = set()
-
- if 'tagged' in tags_set:
- matched_tags = all_tags_set.difference(u)
- unmatched_tags = u
-
- if 'untagged' in tags_set and 'untagged' in all_tags_set:
- matched_tags = matched_tags.union(u)
- unmatched_tags = unmatched_tags.difference(u)
-
- return matched_tags, unmatched_tags
-
- # *************************************************
-
- def _late_merge_role_tags(self):
- # build a local dict of tags for roles
- role_tags = {}
- for task in self._ds['tasks']:
- if 'role_name' in task:
- this_role = task['role_name'] + "-" + task['vars']['role_uuid']
-
- if this_role not in role_tags:
- role_tags[this_role] = []
-
- if 'tags' in task['vars']:
- if isinstance(task['vars']['tags'], basestring):
- role_tags[this_role] += shlex.split(task['vars']['tags'])
- else:
- role_tags[this_role] += task['vars']['tags']
-
- # apply each role's tags to its tasks
- for idx, val in enumerate(self._tasks):
- if getattr(val, 'role_name', None) is not None:
- this_role = val.role_name + "-" + val.module_vars['role_uuid']
- if this_role in role_tags:
- self._tasks[idx].tags = sorted(set(self._tasks[idx].tags + role_tags[this_role]))
-
- # *************************************************
-
- def _update_vars_files_for_host(self, host, vault_password=None):
-
- def generate_filenames(host, inject, filename):
-
- """ Render the raw filename into 3 forms """
-
- # filename2 is the templated version of the filename, which will
- # be fully rendered if any variables contained within it are
- # non-inventory related
- filename2 = template(self.basedir, filename, self.vars)
-
- # filename3 is the same as filename2, but when the host object is
- # available, inventory variables will be expanded as well since the
- # name is templated with the injected variables
- filename3 = filename2
- if host is not None:
- filename3 = template(self.basedir, filename2, inject)
-
- # filename4 is the dwim'd path, but may also be mixed-scope, so we use
- # both play scoped vars and host scoped vars to template the filepath
- if utils.contains_vars(filename3) and host is not None:
- inject.update(self.vars)
- filename4 = template(self.basedir, filename3, inject)
- filename4 = utils.path_dwim(self.basedir, filename4)
- else:
- filename4 = utils.path_dwim(self.basedir, filename3)
-
- return filename2, filename3, filename4
-
-
- def update_vars_cache(host, data, target_filename=None):
-
- """ update a host's varscache with new var data """
-
- self.playbook.VARS_CACHE[host] = utils.combine_vars(self.playbook.VARS_CACHE.get(host, {}), data)
- if target_filename:
- self.playbook.callbacks.on_import_for_host(host, target_filename)
-
- def process_files(filename, filename2, filename3, filename4, host=None):
-
- """ pseudo-algorithm for deciding where new vars should go """
-
- data = utils.parse_yaml_from_file(filename4, vault_password=self.vault_password)
- if data:
- if type(data) != dict:
- raise errors.AnsibleError("%s must be stored as a dictionary/hash" % filename4)
- if host is not None:
- target_filename = None
- if utils.contains_vars(filename2):
- if not utils.contains_vars(filename3):
- target_filename = filename3
- else:
- target_filename = filename4
- update_vars_cache(host, data, target_filename=target_filename)
- else:
- self.vars_file_vars = utils.combine_vars(self.vars_file_vars, data)
- # we did process this file
- return True
- # we did not process this file
- return False
-
- # Enforce that vars_files is always a list
- if type(self.vars_files) != list:
- self.vars_files = [ self.vars_files ]
-
- # Build an inject if this is a host run started by self.update_vars_files
- if host is not None:
- inject = {}
- inject.update(self.playbook.inventory.get_variables(host, vault_password=vault_password))
- inject.update(self.playbook.SETUP_CACHE.get(host, {}))
- inject.update(self.playbook.VARS_CACHE.get(host, {}))
- else:
- inject = None
-
- processed = []
- for filename in self.vars_files:
- if type(filename) == list:
- # loop over all filenames, loading the first one, and failing if none found
- found = False
- sequence = []
- for real_filename in filename:
- filename2, filename3, filename4 = generate_filenames(host, inject, real_filename)
- sequence.append(filename4)
- if os.path.exists(filename4):
- found = True
- if process_files(filename, filename2, filename3, filename4, host=host):
- processed.append(filename)
- elif host is not None:
- self.playbook.callbacks.on_not_import_for_host(host, filename4)
- if found:
- break
- if not found and host is not None:
- raise errors.AnsibleError(
- "%s: FATAL, no files matched for vars_files import sequence: %s" % (host, sequence)
- )
- else:
- # just one filename supplied, load it!
- filename2, filename3, filename4 = generate_filenames(host, inject, filename)
- if utils.contains_vars(filename4):
- continue
- if process_files(filename, filename2, filename3, filename4, host=host):
- processed.append(filename)
-
- return processed
diff --git a/v2/ansible/playbook/playbook_include.py b/lib/ansible/playbook/playbook_include.py
similarity index 100%
rename from v2/ansible/playbook/playbook_include.py
rename to lib/ansible/playbook/playbook_include.py
diff --git a/v2/ansible/playbook/role/__init__.py b/lib/ansible/playbook/role/__init__.py
similarity index 100%
rename from v2/ansible/playbook/role/__init__.py
rename to lib/ansible/playbook/role/__init__.py
diff --git a/v2/ansible/playbook/role/definition.py b/lib/ansible/playbook/role/definition.py
similarity index 100%
rename from v2/ansible/playbook/role/definition.py
rename to lib/ansible/playbook/role/definition.py
diff --git a/v2/ansible/playbook/role/include.py b/lib/ansible/playbook/role/include.py
similarity index 100%
rename from v2/ansible/playbook/role/include.py
rename to lib/ansible/playbook/role/include.py
diff --git a/v2/ansible/playbook/role/metadata.py b/lib/ansible/playbook/role/metadata.py
similarity index 100%
rename from v2/ansible/playbook/role/metadata.py
rename to lib/ansible/playbook/role/metadata.py
diff --git a/v2/ansible/playbook/role/requirement.py b/lib/ansible/playbook/role/requirement.py
similarity index 100%
rename from v2/ansible/playbook/role/requirement.py
rename to lib/ansible/playbook/role/requirement.py
diff --git a/v2/ansible/playbook/taggable.py b/lib/ansible/playbook/taggable.py
similarity index 100%
rename from v2/ansible/playbook/taggable.py
rename to lib/ansible/playbook/taggable.py
diff --git a/lib/ansible/playbook/task.py b/lib/ansible/playbook/task.py
index 70c1bc8df6..0606025798 100644
--- a/lib/ansible/playbook/task.py
+++ b/lib/ansible/playbook/task.py
@@ -15,332 +15,296 @@
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see .
-from ansible import errors
-from ansible import utils
-from ansible.module_utils.splitter import split_args
-import os
-import ansible.utils.template as template
-import sys
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
-class Task(object):
+from ansible.errors import AnsibleError
- _t_common = [
- 'action', 'always_run', 'any_errors_fatal', 'args', 'become', 'become_method', 'become_pass',
- 'become_user', 'changed_when', 'delay', 'delegate_to', 'environment', 'failed_when',
- 'first_available_file', 'ignore_errors', 'local_action', 'meta', 'name', 'no_log',
- 'notify', 'register', 'remote_user', 'retries', 'run_once', 'su', 'su_pass', 'su_user',
- 'sudo', 'sudo_pass', 'sudo_user', 'tags', 'transport', 'until', 'when',
- ]
+from ansible.parsing.mod_args import ModuleArgsParser
+from ansible.parsing.splitter import parse_kv
+from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject, AnsibleMapping
- __slots__ = [
- 'async_poll_interval', 'async_seconds', 'default_vars', 'first_available_file',
- 'items_lookup_plugin', 'items_lookup_terms', 'module_args', 'module_name', 'module_vars',
- 'notified_by', 'play', 'play_file_vars', 'play_vars', 'role_name', 'role_params', 'role_vars',
- ] + _t_common
+from ansible.plugins import module_loader, lookup_loader
- # to prevent typos and such
- VALID_KEYS = frozenset([
- 'async', 'connection', 'include', 'poll',
- ] + _t_common)
+from ansible.playbook.attribute import Attribute, FieldAttribute
+from ansible.playbook.base import Base
+from ansible.playbook.become import Become
+from ansible.playbook.block import Block
+from ansible.playbook.conditional import Conditional
+from ansible.playbook.role import Role
+from ansible.playbook.taggable import Taggable
- def __init__(self, play, ds, module_vars=None, play_vars=None, play_file_vars=None, role_vars=None, role_params=None, default_vars=None, additional_conditions=None, role_name=None):
- ''' constructor loads from a task or handler datastructure '''
+__all__ = ['Task']
- # meta directives are used to tell things like ansible/playbook to run
- # operations like handler execution. Meta tasks are not executed
- # normally.
- if 'meta' in ds:
- self.meta = ds['meta']
- self.tags = []
- self.module_vars = module_vars
- self.role_name = role_name
- return
- else:
- self.meta = None
+class Task(Base, Conditional, Taggable, Become):
+ """
+ A task is a language feature that represents a call to a module, with given arguments and other parameters.
+ A handler is a subclass of a task.
- library = os.path.join(play.basedir, 'library')
- if os.path.exists(library):
- utils.plugins.module_finder.add_directory(library)
+ Usage:
- for x in ds.keys():
+ Task.load(datastructure) -> Task
+ Task.something(...)
+ """
- # code to allow for saying "modulename: args" versus "action: modulename args"
- if x in utils.plugins.module_finder:
+ # =================================================================================
+ # ATTRIBUTES
+ # load_ and
+ # validate_
+ # will be used if defined
+ # might be possible to define others
- if 'action' in ds:
- raise errors.AnsibleError("multiple actions specified in task: '%s' and '%s'" % (x, ds.get('name', ds['action'])))
- if isinstance(ds[x], dict):
- if 'args' in ds:
- raise errors.AnsibleError("can't combine args: and a dict for %s: in task %s" % (x, ds.get('name', "%s: %s" % (x, ds[x]))))
- ds['args'] = ds[x]
- ds[x] = ''
- elif ds[x] is None:
- ds[x] = ''
- if not isinstance(ds[x], basestring):
- raise errors.AnsibleError("action specified for task %s has invalid type %s" % (ds.get('name', "%s: %s" % (x, ds[x])), type(ds[x])))
- ds['action'] = x + " " + ds[x]
- ds.pop(x)
+ _args = FieldAttribute(isa='dict', default=dict())
+ _action = FieldAttribute(isa='string')
- # code to allow "with_glob" and to reference a lookup plugin named glob
- elif x.startswith("with_"):
- if isinstance(ds[x], basestring):
- param = ds[x].strip()
+ _always_run = FieldAttribute(isa='bool')
+ _any_errors_fatal = FieldAttribute(isa='bool')
+ _async = FieldAttribute(isa='int', default=0)
+ _changed_when = FieldAttribute(isa='string')
+ _delay = FieldAttribute(isa='int', default=5)
+ _delegate_to = FieldAttribute(isa='string')
+ _failed_when = FieldAttribute(isa='string')
+ _first_available_file = FieldAttribute(isa='list')
+ _ignore_errors = FieldAttribute(isa='bool')
- plugin_name = x.replace("with_","")
- if plugin_name in utils.plugins.lookup_loader:
- ds['items_lookup_plugin'] = plugin_name
- ds['items_lookup_terms'] = ds[x]
- ds.pop(x)
- else:
- raise errors.AnsibleError("cannot find lookup plugin named %s for usage in with_%s" % (plugin_name, plugin_name))
+ _loop = FieldAttribute(isa='string', private=True)
+ _loop_args = FieldAttribute(isa='list', private=True)
+ _local_action = FieldAttribute(isa='string')
- elif x in [ 'changed_when', 'failed_when', 'when']:
- if isinstance(ds[x], basestring):
- param = ds[x].strip()
- # Only a variable, no logic
- if (param.startswith('{{') and
- param.find('}}') == len(ds[x]) - 2 and
- param.find('|') == -1):
- utils.warning("It is unnecessary to use '{{' in conditionals, leave variables in loop expressions bare.")
- elif x.startswith("when_"):
- utils.deprecated("The 'when_' conditional has been removed. Switch to using the regular unified 'when' statements as described on docs.ansible.com.","1.5", removed=True)
+ # FIXME: this should not be a Task
+ _meta = FieldAttribute(isa='string')
- if 'when' in ds:
- raise errors.AnsibleError("multiple when_* statements specified in task %s" % (ds.get('name', ds['action'])))
- when_name = x.replace("when_","")
- ds['when'] = "%s %s" % (when_name, ds[x])
- ds.pop(x)
- elif not x in Task.VALID_KEYS:
- raise errors.AnsibleError("%s is not a legal parameter in an Ansible task or handler" % x)
+ _name = FieldAttribute(isa='string', default='')
- self.module_vars = module_vars
- self.play_vars = play_vars
- self.play_file_vars = play_file_vars
- self.role_vars = role_vars
- self.role_params = role_params
- self.default_vars = default_vars
- self.play = play
+ _notify = FieldAttribute(isa='list')
+ _poll = FieldAttribute(isa='int')
+ _register = FieldAttribute(isa='string')
+ _retries = FieldAttribute(isa='int', default=1)
+ _run_once = FieldAttribute(isa='bool')
+ _until = FieldAttribute(isa='list') # ?
- # load various attributes
- self.name = ds.get('name', None)
- self.tags = [ 'untagged' ]
- self.register = ds.get('register', None)
- self.environment = ds.get('environment', play.environment)
- self.role_name = role_name
- self.no_log = utils.boolean(ds.get('no_log', "false")) or self.play.no_log
- self.run_once = utils.boolean(ds.get('run_once', 'false'))
+ def __init__(self, block=None, role=None, task_include=None):
+ ''' constructors a task, without the Task.load classmethod, it will be pretty blank '''
- #Code to allow do until feature in a Task
- if 'until' in ds:
- if not ds.get('register'):
- raise errors.AnsibleError("register keyword is mandatory when using do until feature")
- self.module_vars['delay'] = ds.get('delay', 5)
- self.module_vars['retries'] = ds.get('retries', 3)
- self.module_vars['register'] = ds.get('register', None)
- self.until = ds.get('until')
- self.module_vars['until'] = self.until
+ self._block = block
+ self._role = role
+ self._task_include = task_include
- # rather than simple key=value args on the options line, these represent structured data and the values
- # can be hashes and lists, not just scalars
- self.args = ds.get('args', {})
+ super(Task, self).__init__()
- # get remote_user for task, then play, then playbook
- if ds.get('remote_user') is not None:
- self.remote_user = ds.get('remote_user')
- elif ds.get('remote_user', play.remote_user) is not None:
- self.remote_user = ds.get('remote_user', play.remote_user)
- else:
- self.remote_user = ds.get('remote_user', play.playbook.remote_user)
+ def get_name(self):
+ ''' return the name of the task '''
- # Fail out if user specifies privilege escalation params in conflict
- if (ds.get('become') or ds.get('become_user') or ds.get('become_pass')) and (ds.get('sudo') or ds.get('sudo_user') or ds.get('sudo_pass')):
- raise errors.AnsibleError('incompatible parameters ("become", "become_user", "become_pass") and sudo params "sudo", "sudo_user", "sudo_pass" in task: %s' % self.name)
+ if self._role and self.name:
+ return "%s : %s" % (self._role.get_name(), self.name)
+ elif self.name:
+ return self.name
+ else:
+ flattened_args = self._merge_kv(self.args)
+ if self._role:
+ return "%s : %s %s" % (self._role.get_name(), self.action, flattened_args)
+ else:
+ return "%s %s" % (self.action, flattened_args)
- if (ds.get('become') or ds.get('become_user') or ds.get('become_pass')) and (ds.get('su') or ds.get('su_user') or ds.get('su_pass')):
- raise errors.AnsibleError('incompatible parameters ("become", "become_user", "become_pass") and su params "su", "su_user", "sudo_pass" in task: %s' % self.name)
+ def _merge_kv(self, ds):
+ if ds is None:
+ return ""
+ elif isinstance(ds, basestring):
+ return ds
+ elif isinstance(ds, dict):
+ buf = ""
+ for (k,v) in ds.iteritems():
+ if k.startswith('_'):
+ continue
+ buf = buf + "%s=%s " % (k,v)
+ buf = buf.strip()
+ return buf
- if (ds.get('sudo') or ds.get('sudo_user') or ds.get('sudo_pass')) and (ds.get('su') or ds.get('su_user') or ds.get('su_pass')):
- raise errors.AnsibleError('incompatible parameters ("su", "su_user", "su_pass") and sudo params "sudo", "sudo_user", "sudo_pass" in task: %s' % self.name)
+ @staticmethod
+ def load(data, block=None, role=None, task_include=None, variable_manager=None, loader=None):
+ t = Task(block=block, role=role, task_include=task_include)
+ return t.load_data(data, variable_manager=variable_manager, loader=loader)
- self.become = utils.boolean(ds.get('become', play.become))
- self.become_method = ds.get('become_method', play.become_method)
- self.become_user = ds.get('become_user', play.become_user)
- self.become_pass = ds.get('become_pass', play.playbook.become_pass)
+ def __repr__(self):
+ ''' returns a human readable representation of the task '''
+ return "TASK: %s" % self.get_name()
- # set only if passed in current task data
- if 'sudo' in ds or 'sudo_user' in ds:
- self.become_method='sudo'
+ def _preprocess_loop(self, ds, new_ds, k, v):
+ ''' take a lookup plugin name and store it correctly '''
- if 'sudo' in ds:
- self.become=ds['sudo']
- del ds['sudo']
+ loop_name = k.replace("with_", "")
+ if new_ds.get('loop') is not None:
+ raise AnsibleError("duplicate loop in task: %s" % loop_name)
+ new_ds['loop'] = loop_name
+ new_ds['loop_args'] = v
+
+ def preprocess_data(self, ds):
+ '''
+ tasks are especially complex arguments so need pre-processing.
+ keep it short.
+ '''
+
+ assert isinstance(ds, dict)
+
+ # the new, cleaned datastructure, which will have legacy
+ # items reduced to a standard structure suitable for the
+ # attributes of the task class
+ new_ds = AnsibleMapping()
+ if isinstance(ds, AnsibleBaseYAMLObject):
+ new_ds.ansible_pos = ds.ansible_pos
+
+ # use the args parsing class to determine the action, args,
+ # and the delegate_to value from the various possible forms
+ # supported as legacy
+ args_parser = ModuleArgsParser(task_ds=ds)
+ (action, args, delegate_to) = args_parser.parse()
+
+ new_ds['action'] = action
+ new_ds['args'] = args
+ new_ds['delegate_to'] = delegate_to
+
+ for (k,v) in ds.iteritems():
+ if k in ('action', 'local_action', 'args', 'delegate_to') or k == action or k == 'shell':
+ # we don't want to re-assign these values, which were
+ # determined by the ModuleArgsParser() above
+ continue
+ elif k.replace("with_", "") in lookup_loader:
+ self._preprocess_loop(ds, new_ds, k, v)
else:
- self.become=True
- if 'sudo_user' in ds:
- self.become_user = ds['sudo_user']
- del ds['sudo_user']
- if 'sudo_pass' in ds:
- self.become_pass = ds['sudo_pass']
- del ds['sudo_pass']
+ new_ds[k] = v
- elif 'su' in ds or 'su_user' in ds:
- self.become_method='su'
+ return super(Task, self).preprocess_data(new_ds)
- if 'su' in ds:
- self.become=ds['su']
+ def post_validate(self, templar):
+ '''
+ Override of base class post_validate, to also do final validation on
+ the block and task include (if any) to which this task belongs.
+ '''
+
+ if self._block:
+ self._block.post_validate(templar)
+ if self._task_include:
+ self._task_include.post_validate(templar)
+
+ super(Task, self).post_validate(templar)
+
+ def get_vars(self):
+ all_vars = self.vars.copy()
+ if self._block:
+ all_vars.update(self._block.get_vars())
+ if self._task_include:
+ all_vars.update(self._task_include.get_vars())
+
+ all_vars.update(self.serialize())
+
+ if 'tags' in all_vars:
+ del all_vars['tags']
+ if 'when' in all_vars:
+ del all_vars['when']
+ return all_vars
+
+ def copy(self, exclude_block=False):
+ new_me = super(Task, self).copy()
+
+ new_me._block = None
+ if self._block and not exclude_block:
+ new_me._block = self._block.copy()
+
+ new_me._role = None
+ if self._role:
+ new_me._role = self._role
+
+ new_me._task_include = None
+ if self._task_include:
+ new_me._task_include = self._task_include.copy()
+
+ return new_me
+
+ def serialize(self):
+ data = super(Task, self).serialize()
+
+ if self._block:
+ data['block'] = self._block.serialize()
+
+ if self._role:
+ data['role'] = self._role.serialize()
+
+ if self._task_include:
+ data['task_include'] = self._task_include.serialize()
+
+ return data
+
+ def deserialize(self, data):
+
+ # import is here to avoid import loops
+ #from ansible.playbook.task_include import TaskInclude
+
+ block_data = data.get('block')
+
+ if block_data:
+ b = Block()
+ b.deserialize(block_data)
+ self._block = b
+ del data['block']
+
+ role_data = data.get('role')
+ if role_data:
+ r = Role()
+ r.deserialize(role_data)
+ self._role = r
+ del data['role']
+
+ ti_data = data.get('task_include')
+ if ti_data:
+ #ti = TaskInclude()
+ ti = Task()
+ ti.deserialize(ti_data)
+ self._task_include = ti
+ del data['task_include']
+
+ super(Task, self).deserialize(data)
+
+ def evaluate_conditional(self, all_vars):
+ if self._block is not None:
+ if not self._block.evaluate_conditional(all_vars):
+ return False
+ if self._task_include is not None:
+ if not self._task_include.evaluate_conditional(all_vars):
+ return False
+ return super(Task, self).evaluate_conditional(all_vars)
+
+ def set_loader(self, loader):
+ '''
+ Sets the loader on this object and recursively on parent, child objects.
+ This is used primarily after the Task has been serialized/deserialized, which
+ does not preserve the loader.
+ '''
+
+ self._loader = loader
+
+ if self._block:
+ self._block.set_loader(loader)
+ if self._task_include:
+ self._task_include.set_loader(loader)
+
+ def _get_parent_attribute(self, attr, extend=False):
+ '''
+ Generic logic to get the attribute or parent attribute for a task value.
+ '''
+ value = self._attributes[attr]
+ if self._block and (not value or extend):
+ parent_value = getattr(self._block, attr)
+ if extend:
+ value = self._extend_value(value, parent_value)
else:
- self.become=True
- del ds['su']
- if 'su_user' in ds:
- self.become_user = ds['su_user']
- del ds['su_user']
- if 'su_pass' in ds:
- self.become_pass = ds['su_pass']
- del ds['su_pass']
-
- # Both are defined
- if ('action' in ds) and ('local_action' in ds):
- raise errors.AnsibleError("the 'action' and 'local_action' attributes can not be used together")
- # Both are NOT defined
- elif (not 'action' in ds) and (not 'local_action' in ds):
- raise errors.AnsibleError("'action' or 'local_action' attribute missing in task \"%s\"" % ds.get('name', ''))
- # Only one of them is defined
- elif 'local_action' in ds:
- self.action = ds.get('local_action', '')
- self.delegate_to = '127.0.0.1'
- else:
- self.action = ds.get('action', '')
- self.delegate_to = ds.get('delegate_to', None)
- self.transport = ds.get('connection', ds.get('transport', play.transport))
-
- if isinstance(self.action, dict):
- if 'module' not in self.action:
- raise errors.AnsibleError("'module' attribute missing from action in task \"%s\"" % ds.get('name', '%s' % self.action))
- if self.args:
- raise errors.AnsibleError("'args' cannot be combined with dict 'action' in task \"%s\"" % ds.get('name', '%s' % self.action))
- self.args = self.action
- self.action = self.args.pop('module')
-
- # delegate_to can use variables
- if not (self.delegate_to is None):
- # delegate_to: localhost should use local transport
- if self.delegate_to in ['127.0.0.1', 'localhost']:
- self.transport = 'local'
-
- # notified by is used by Playbook code to flag which hosts
- # need to run a notifier
- self.notified_by = []
-
- # if no name is specified, use the action line as the name
- if self.name is None:
- self.name = self.action
-
- # load various attributes
- self.when = ds.get('when', None)
- self.changed_when = ds.get('changed_when', None)
- self.failed_when = ds.get('failed_when', None)
-
- # combine the default and module vars here for use in templating
- all_vars = self.default_vars.copy()
- all_vars = utils.combine_vars(all_vars, self.play_vars)
- all_vars = utils.combine_vars(all_vars, self.play_file_vars)
- all_vars = utils.combine_vars(all_vars, self.role_vars)
- all_vars = utils.combine_vars(all_vars, self.module_vars)
- all_vars = utils.combine_vars(all_vars, self.role_params)
-
- self.async_seconds = ds.get('async', 0) # not async by default
- self.async_seconds = template.template_from_string(play.basedir, self.async_seconds, all_vars)
- self.async_seconds = int(self.async_seconds)
- self.async_poll_interval = ds.get('poll', 10) # default poll = 10 seconds
- self.async_poll_interval = template.template_from_string(play.basedir, self.async_poll_interval, all_vars)
- self.async_poll_interval = int(self.async_poll_interval)
- self.notify = ds.get('notify', [])
- self.first_available_file = ds.get('first_available_file', None)
-
- self.items_lookup_plugin = ds.get('items_lookup_plugin', None)
- self.items_lookup_terms = ds.get('items_lookup_terms', None)
-
-
- self.ignore_errors = ds.get('ignore_errors', False)
- self.any_errors_fatal = ds.get('any_errors_fatal', play.any_errors_fatal)
-
- self.always_run = ds.get('always_run', False)
-
- # action should be a string
- if not isinstance(self.action, basestring):
- raise errors.AnsibleError("action is of type '%s' and not a string in task. name: %s" % (type(self.action).__name__, self.name))
-
- # notify can be a string or a list, store as a list
- if isinstance(self.notify, basestring):
- self.notify = [ self.notify ]
-
- # split the action line into a module name + arguments
- try:
- tokens = split_args(self.action)
- except Exception, e:
- if "unbalanced" in str(e):
- raise errors.AnsibleError("There was an error while parsing the task %s.\n" % repr(self.action) + \
- "Make sure quotes are matched or escaped properly")
+ value = parent_value
+ if self._task_include and (not value or extend):
+ parent_value = getattr(self._task_include, attr)
+ if extend:
+ value = self._extend_value(value, parent_value)
else:
- raise
- if len(tokens) < 1:
- raise errors.AnsibleError("invalid/missing action in task. name: %s" % self.name)
- self.module_name = tokens[0]
- self.module_args = ''
- if len(tokens) > 1:
- self.module_args = " ".join(tokens[1:])
+ value = parent_value
+ return value
- import_tags = self.module_vars.get('tags',[])
- if type(import_tags) in [int,float]:
- import_tags = str(import_tags)
- elif type(import_tags) in [str,unicode]:
- # allow the user to list comma delimited tags
- import_tags = import_tags.split(",")
-
- # handle mutually incompatible options
- incompatibles = [ x for x in [ self.first_available_file, self.items_lookup_plugin ] if x is not None ]
- if len(incompatibles) > 1:
- raise errors.AnsibleError("with_(plugin), and first_available_file are mutually incompatible in a single task")
-
- # make first_available_file accessible to Runner code
- if self.first_available_file:
- self.module_vars['first_available_file'] = self.first_available_file
- # make sure that the 'item' variable is set when using
- # first_available_file (issue #8220)
- if 'item' not in self.module_vars:
- self.module_vars['item'] = ''
-
- if self.items_lookup_plugin is not None:
- self.module_vars['items_lookup_plugin'] = self.items_lookup_plugin
- self.module_vars['items_lookup_terms'] = self.items_lookup_terms
-
- # allow runner to see delegate_to option
- self.module_vars['delegate_to'] = self.delegate_to
-
- # make some task attributes accessible to Runner code
- self.module_vars['ignore_errors'] = self.ignore_errors
- self.module_vars['register'] = self.register
- self.module_vars['changed_when'] = self.changed_when
- self.module_vars['failed_when'] = self.failed_when
- self.module_vars['always_run'] = self.always_run
-
- # tags allow certain parts of a playbook to be run without running the whole playbook
- apply_tags = ds.get('tags', None)
- if apply_tags is not None:
- if type(apply_tags) in [ str, unicode ]:
- self.tags.append(apply_tags)
- elif type(apply_tags) in [ int, float ]:
- self.tags.append(str(apply_tags))
- elif type(apply_tags) == list:
- self.tags.extend(apply_tags)
- self.tags.extend(import_tags)
-
- if len(self.tags) > 1:
- self.tags.remove('untagged')
-
- if additional_conditions:
- new_conditions = additional_conditions[:]
- if self.when:
- new_conditions.append(self.when)
- self.when = new_conditions
diff --git a/v2/ansible/playbook/vars.py b/lib/ansible/playbook/vars.py
similarity index 100%
rename from v2/ansible/playbook/vars.py
rename to lib/ansible/playbook/vars.py
diff --git a/v2/ansible/playbook/vars_file.py b/lib/ansible/playbook/vars_file.py
similarity index 100%
rename from v2/ansible/playbook/vars_file.py
rename to lib/ansible/playbook/vars_file.py
diff --git a/v2/ansible/plugins/__init__.py b/lib/ansible/plugins/__init__.py
similarity index 100%
rename from v2/ansible/plugins/__init__.py
rename to lib/ansible/plugins/__init__.py
diff --git a/v2/ansible/plugins/action/__init__.py b/lib/ansible/plugins/action/__init__.py
similarity index 100%
rename from v2/ansible/plugins/action/__init__.py
rename to lib/ansible/plugins/action/__init__.py
diff --git a/v2/ansible/plugins/action/add_host.py b/lib/ansible/plugins/action/add_host.py
similarity index 100%
rename from v2/ansible/plugins/action/add_host.py
rename to lib/ansible/plugins/action/add_host.py
diff --git a/v2/ansible/plugins/action/assemble.py b/lib/ansible/plugins/action/assemble.py
similarity index 100%
rename from v2/ansible/plugins/action/assemble.py
rename to lib/ansible/plugins/action/assemble.py
diff --git a/v2/ansible/plugins/action/assert.py b/lib/ansible/plugins/action/assert.py
similarity index 100%
rename from v2/ansible/plugins/action/assert.py
rename to lib/ansible/plugins/action/assert.py
diff --git a/v2/ansible/plugins/action/async.py b/lib/ansible/plugins/action/async.py
similarity index 100%
rename from v2/ansible/plugins/action/async.py
rename to lib/ansible/plugins/action/async.py
diff --git a/v2/ansible/plugins/action/copy.py b/lib/ansible/plugins/action/copy.py
similarity index 100%
rename from v2/ansible/plugins/action/copy.py
rename to lib/ansible/plugins/action/copy.py
diff --git a/v2/ansible/plugins/action/debug.py b/lib/ansible/plugins/action/debug.py
similarity index 100%
rename from v2/ansible/plugins/action/debug.py
rename to lib/ansible/plugins/action/debug.py
diff --git a/v2/ansible/plugins/action/fail.py b/lib/ansible/plugins/action/fail.py
similarity index 100%
rename from v2/ansible/plugins/action/fail.py
rename to lib/ansible/plugins/action/fail.py
diff --git a/v2/ansible/plugins/action/fetch.py b/lib/ansible/plugins/action/fetch.py
similarity index 100%
rename from v2/ansible/plugins/action/fetch.py
rename to lib/ansible/plugins/action/fetch.py
diff --git a/v2/ansible/plugins/action/group_by.py b/lib/ansible/plugins/action/group_by.py
similarity index 100%
rename from v2/ansible/plugins/action/group_by.py
rename to lib/ansible/plugins/action/group_by.py
diff --git a/v2/ansible/plugins/action/include_vars.py b/lib/ansible/plugins/action/include_vars.py
similarity index 100%
rename from v2/ansible/plugins/action/include_vars.py
rename to lib/ansible/plugins/action/include_vars.py
diff --git a/v2/ansible/plugins/action/normal.py b/lib/ansible/plugins/action/normal.py
similarity index 100%
rename from v2/ansible/plugins/action/normal.py
rename to lib/ansible/plugins/action/normal.py
diff --git a/v2/ansible/plugins/action/patch.py b/lib/ansible/plugins/action/patch.py
similarity index 100%
rename from v2/ansible/plugins/action/patch.py
rename to lib/ansible/plugins/action/patch.py
diff --git a/v2/ansible/plugins/action/pause.py b/lib/ansible/plugins/action/pause.py
similarity index 100%
rename from v2/ansible/plugins/action/pause.py
rename to lib/ansible/plugins/action/pause.py
diff --git a/v2/ansible/plugins/action/raw.py b/lib/ansible/plugins/action/raw.py
similarity index 100%
rename from v2/ansible/plugins/action/raw.py
rename to lib/ansible/plugins/action/raw.py
diff --git a/v2/ansible/plugins/action/script.py b/lib/ansible/plugins/action/script.py
similarity index 100%
rename from v2/ansible/plugins/action/script.py
rename to lib/ansible/plugins/action/script.py
diff --git a/v2/ansible/plugins/action/set_fact.py b/lib/ansible/plugins/action/set_fact.py
similarity index 100%
rename from v2/ansible/plugins/action/set_fact.py
rename to lib/ansible/plugins/action/set_fact.py
diff --git a/v2/ansible/plugins/action/synchronize.py b/lib/ansible/plugins/action/synchronize.py
similarity index 100%
rename from v2/ansible/plugins/action/synchronize.py
rename to lib/ansible/plugins/action/synchronize.py
diff --git a/v2/ansible/plugins/action/template.py b/lib/ansible/plugins/action/template.py
similarity index 100%
rename from v2/ansible/plugins/action/template.py
rename to lib/ansible/plugins/action/template.py
diff --git a/v2/ansible/plugins/action/unarchive.py b/lib/ansible/plugins/action/unarchive.py
similarity index 100%
rename from v2/ansible/plugins/action/unarchive.py
rename to lib/ansible/plugins/action/unarchive.py
diff --git a/v2/ansible/plugins/cache/__init__.py b/lib/ansible/plugins/cache/__init__.py
similarity index 100%
rename from v2/ansible/plugins/cache/__init__.py
rename to lib/ansible/plugins/cache/__init__.py
diff --git a/v2/ansible/plugins/cache/base.py b/lib/ansible/plugins/cache/base.py
similarity index 100%
rename from v2/ansible/plugins/cache/base.py
rename to lib/ansible/plugins/cache/base.py
diff --git a/v2/ansible/plugins/cache/memcached.py b/lib/ansible/plugins/cache/memcached.py
similarity index 100%
rename from v2/ansible/plugins/cache/memcached.py
rename to lib/ansible/plugins/cache/memcached.py
diff --git a/v2/ansible/plugins/cache/memory.py b/lib/ansible/plugins/cache/memory.py
similarity index 100%
rename from v2/ansible/plugins/cache/memory.py
rename to lib/ansible/plugins/cache/memory.py
diff --git a/v2/ansible/plugins/cache/redis.py b/lib/ansible/plugins/cache/redis.py
similarity index 100%
rename from v2/ansible/plugins/cache/redis.py
rename to lib/ansible/plugins/cache/redis.py
diff --git a/v2/ansible/plugins/callback/__init__.py b/lib/ansible/plugins/callback/__init__.py
similarity index 100%
rename from v2/ansible/plugins/callback/__init__.py
rename to lib/ansible/plugins/callback/__init__.py
diff --git a/v2/ansible/plugins/callback/default.py b/lib/ansible/plugins/callback/default.py
similarity index 100%
rename from v2/ansible/plugins/callback/default.py
rename to lib/ansible/plugins/callback/default.py
diff --git a/v2/ansible/plugins/callback/minimal.py b/lib/ansible/plugins/callback/minimal.py
similarity index 100%
rename from v2/ansible/plugins/callback/minimal.py
rename to lib/ansible/plugins/callback/minimal.py
diff --git a/v2/ansible/plugins/connections/__init__.py b/lib/ansible/plugins/connections/__init__.py
similarity index 100%
rename from v2/ansible/plugins/connections/__init__.py
rename to lib/ansible/plugins/connections/__init__.py
diff --git a/v2/ansible/plugins/connections/accelerate.py b/lib/ansible/plugins/connections/accelerate.py
similarity index 100%
rename from v2/ansible/plugins/connections/accelerate.py
rename to lib/ansible/plugins/connections/accelerate.py
diff --git a/v2/ansible/plugins/connections/chroot.py b/lib/ansible/plugins/connections/chroot.py
similarity index 100%
rename from v2/ansible/plugins/connections/chroot.py
rename to lib/ansible/plugins/connections/chroot.py
diff --git a/v2/ansible/plugins/connections/funcd.py b/lib/ansible/plugins/connections/funcd.py
similarity index 100%
rename from v2/ansible/plugins/connections/funcd.py
rename to lib/ansible/plugins/connections/funcd.py
diff --git a/v2/ansible/plugins/connections/jail.py b/lib/ansible/plugins/connections/jail.py
similarity index 100%
rename from v2/ansible/plugins/connections/jail.py
rename to lib/ansible/plugins/connections/jail.py
diff --git a/v2/ansible/plugins/connections/libvirt_lxc.py b/lib/ansible/plugins/connections/libvirt_lxc.py
similarity index 100%
rename from v2/ansible/plugins/connections/libvirt_lxc.py
rename to lib/ansible/plugins/connections/libvirt_lxc.py
diff --git a/v2/ansible/plugins/connections/local.py b/lib/ansible/plugins/connections/local.py
similarity index 100%
rename from v2/ansible/plugins/connections/local.py
rename to lib/ansible/plugins/connections/local.py
diff --git a/v2/ansible/plugins/connections/paramiko_ssh.py b/lib/ansible/plugins/connections/paramiko_ssh.py
similarity index 100%
rename from v2/ansible/plugins/connections/paramiko_ssh.py
rename to lib/ansible/plugins/connections/paramiko_ssh.py
diff --git a/v2/ansible/plugins/connections/ssh.py b/lib/ansible/plugins/connections/ssh.py
similarity index 100%
rename from v2/ansible/plugins/connections/ssh.py
rename to lib/ansible/plugins/connections/ssh.py
diff --git a/v2/ansible/plugins/connections/winrm.py b/lib/ansible/plugins/connections/winrm.py
similarity index 100%
rename from v2/ansible/plugins/connections/winrm.py
rename to lib/ansible/plugins/connections/winrm.py
diff --git a/v2/ansible/plugins/connections/zone.py b/lib/ansible/plugins/connections/zone.py
similarity index 100%
rename from v2/ansible/plugins/connections/zone.py
rename to lib/ansible/plugins/connections/zone.py
diff --git a/v2/ansible/plugins/filter b/lib/ansible/plugins/filter
similarity index 100%
rename from v2/ansible/plugins/filter
rename to lib/ansible/plugins/filter
diff --git a/v2/ansible/plugins/inventory/__init__.py b/lib/ansible/plugins/inventory/__init__.py
similarity index 100%
rename from v2/ansible/plugins/inventory/__init__.py
rename to lib/ansible/plugins/inventory/__init__.py
diff --git a/v2/ansible/plugins/inventory/aggregate.py b/lib/ansible/plugins/inventory/aggregate.py
similarity index 100%
rename from v2/ansible/plugins/inventory/aggregate.py
rename to lib/ansible/plugins/inventory/aggregate.py
diff --git a/v2/ansible/plugins/inventory/directory.py b/lib/ansible/plugins/inventory/directory.py
similarity index 100%
rename from v2/ansible/plugins/inventory/directory.py
rename to lib/ansible/plugins/inventory/directory.py
diff --git a/v2/ansible/plugins/inventory/ini.py b/lib/ansible/plugins/inventory/ini.py
similarity index 100%
rename from v2/ansible/plugins/inventory/ini.py
rename to lib/ansible/plugins/inventory/ini.py
diff --git a/v2/ansible/plugins/lookup/__init__.py b/lib/ansible/plugins/lookup/__init__.py
similarity index 100%
rename from v2/ansible/plugins/lookup/__init__.py
rename to lib/ansible/plugins/lookup/__init__.py
diff --git a/v2/ansible/plugins/lookup/cartesian.py b/lib/ansible/plugins/lookup/cartesian.py
similarity index 100%
rename from v2/ansible/plugins/lookup/cartesian.py
rename to lib/ansible/plugins/lookup/cartesian.py
diff --git a/v2/ansible/plugins/lookup/csvfile.py b/lib/ansible/plugins/lookup/csvfile.py
similarity index 100%
rename from v2/ansible/plugins/lookup/csvfile.py
rename to lib/ansible/plugins/lookup/csvfile.py
diff --git a/v2/ansible/plugins/lookup/dict.py b/lib/ansible/plugins/lookup/dict.py
similarity index 100%
rename from v2/ansible/plugins/lookup/dict.py
rename to lib/ansible/plugins/lookup/dict.py
diff --git a/v2/ansible/plugins/lookup/dnstxt.py b/lib/ansible/plugins/lookup/dnstxt.py
similarity index 100%
rename from v2/ansible/plugins/lookup/dnstxt.py
rename to lib/ansible/plugins/lookup/dnstxt.py
diff --git a/v2/ansible/plugins/lookup/env.py b/lib/ansible/plugins/lookup/env.py
similarity index 100%
rename from v2/ansible/plugins/lookup/env.py
rename to lib/ansible/plugins/lookup/env.py
diff --git a/v2/ansible/plugins/lookup/etcd.py b/lib/ansible/plugins/lookup/etcd.py
similarity index 100%
rename from v2/ansible/plugins/lookup/etcd.py
rename to lib/ansible/plugins/lookup/etcd.py
diff --git a/v2/ansible/plugins/lookup/file.py b/lib/ansible/plugins/lookup/file.py
similarity index 100%
rename from v2/ansible/plugins/lookup/file.py
rename to lib/ansible/plugins/lookup/file.py
diff --git a/v2/ansible/plugins/lookup/fileglob.py b/lib/ansible/plugins/lookup/fileglob.py
similarity index 100%
rename from v2/ansible/plugins/lookup/fileglob.py
rename to lib/ansible/plugins/lookup/fileglob.py
diff --git a/v2/ansible/plugins/lookup/first_found.py b/lib/ansible/plugins/lookup/first_found.py
similarity index 100%
rename from v2/ansible/plugins/lookup/first_found.py
rename to lib/ansible/plugins/lookup/first_found.py
diff --git a/v2/ansible/plugins/lookup/flattened.py b/lib/ansible/plugins/lookup/flattened.py
similarity index 100%
rename from v2/ansible/plugins/lookup/flattened.py
rename to lib/ansible/plugins/lookup/flattened.py
diff --git a/v2/ansible/plugins/lookup/indexed_items.py b/lib/ansible/plugins/lookup/indexed_items.py
similarity index 100%
rename from v2/ansible/plugins/lookup/indexed_items.py
rename to lib/ansible/plugins/lookup/indexed_items.py
diff --git a/v2/ansible/plugins/lookup/inventory_hostnames.py b/lib/ansible/plugins/lookup/inventory_hostnames.py
similarity index 100%
rename from v2/ansible/plugins/lookup/inventory_hostnames.py
rename to lib/ansible/plugins/lookup/inventory_hostnames.py
diff --git a/v2/ansible/plugins/lookup/items.py b/lib/ansible/plugins/lookup/items.py
similarity index 100%
rename from v2/ansible/plugins/lookup/items.py
rename to lib/ansible/plugins/lookup/items.py
diff --git a/v2/ansible/plugins/lookup/lines.py b/lib/ansible/plugins/lookup/lines.py
similarity index 100%
rename from v2/ansible/plugins/lookup/lines.py
rename to lib/ansible/plugins/lookup/lines.py
diff --git a/v2/ansible/plugins/lookup/nested.py b/lib/ansible/plugins/lookup/nested.py
similarity index 100%
rename from v2/ansible/plugins/lookup/nested.py
rename to lib/ansible/plugins/lookup/nested.py
diff --git a/v2/ansible/plugins/lookup/password.py b/lib/ansible/plugins/lookup/password.py
similarity index 100%
rename from v2/ansible/plugins/lookup/password.py
rename to lib/ansible/plugins/lookup/password.py
diff --git a/v2/ansible/plugins/lookup/pipe.py b/lib/ansible/plugins/lookup/pipe.py
similarity index 100%
rename from v2/ansible/plugins/lookup/pipe.py
rename to lib/ansible/plugins/lookup/pipe.py
diff --git a/v2/ansible/plugins/lookup/random_choice.py b/lib/ansible/plugins/lookup/random_choice.py
similarity index 100%
rename from v2/ansible/plugins/lookup/random_choice.py
rename to lib/ansible/plugins/lookup/random_choice.py
diff --git a/v2/ansible/plugins/lookup/redis_kv.py b/lib/ansible/plugins/lookup/redis_kv.py
similarity index 100%
rename from v2/ansible/plugins/lookup/redis_kv.py
rename to lib/ansible/plugins/lookup/redis_kv.py
diff --git a/v2/ansible/plugins/lookup/sequence.py b/lib/ansible/plugins/lookup/sequence.py
similarity index 100%
rename from v2/ansible/plugins/lookup/sequence.py
rename to lib/ansible/plugins/lookup/sequence.py
diff --git a/v2/ansible/plugins/lookup/subelements.py b/lib/ansible/plugins/lookup/subelements.py
similarity index 100%
rename from v2/ansible/plugins/lookup/subelements.py
rename to lib/ansible/plugins/lookup/subelements.py
diff --git a/v2/ansible/plugins/lookup/template.py b/lib/ansible/plugins/lookup/template.py
similarity index 100%
rename from v2/ansible/plugins/lookup/template.py
rename to lib/ansible/plugins/lookup/template.py
diff --git a/v2/ansible/plugins/lookup/together.py b/lib/ansible/plugins/lookup/together.py
similarity index 100%
rename from v2/ansible/plugins/lookup/together.py
rename to lib/ansible/plugins/lookup/together.py
diff --git a/v2/ansible/plugins/lookup/url.py b/lib/ansible/plugins/lookup/url.py
similarity index 100%
rename from v2/ansible/plugins/lookup/url.py
rename to lib/ansible/plugins/lookup/url.py
diff --git a/v2/ansible/plugins/shell/__init__.py b/lib/ansible/plugins/shell/__init__.py
similarity index 100%
rename from v2/ansible/plugins/shell/__init__.py
rename to lib/ansible/plugins/shell/__init__.py
diff --git a/v2/ansible/plugins/shell/csh.py b/lib/ansible/plugins/shell/csh.py
similarity index 100%
rename from v2/ansible/plugins/shell/csh.py
rename to lib/ansible/plugins/shell/csh.py
diff --git a/v2/ansible/plugins/shell/fish.py b/lib/ansible/plugins/shell/fish.py
similarity index 100%
rename from v2/ansible/plugins/shell/fish.py
rename to lib/ansible/plugins/shell/fish.py
diff --git a/v2/ansible/plugins/shell/powershell.py b/lib/ansible/plugins/shell/powershell.py
similarity index 100%
rename from v2/ansible/plugins/shell/powershell.py
rename to lib/ansible/plugins/shell/powershell.py
diff --git a/v2/ansible/plugins/shell/sh.py b/lib/ansible/plugins/shell/sh.py
similarity index 100%
rename from v2/ansible/plugins/shell/sh.py
rename to lib/ansible/plugins/shell/sh.py
diff --git a/v2/ansible/plugins/strategies/__init__.py b/lib/ansible/plugins/strategies/__init__.py
similarity index 100%
rename from v2/ansible/plugins/strategies/__init__.py
rename to lib/ansible/plugins/strategies/__init__.py
diff --git a/v2/ansible/plugins/strategies/free.py b/lib/ansible/plugins/strategies/free.py
similarity index 100%
rename from v2/ansible/plugins/strategies/free.py
rename to lib/ansible/plugins/strategies/free.py
diff --git a/v2/ansible/plugins/strategies/linear.py b/lib/ansible/plugins/strategies/linear.py
similarity index 100%
rename from v2/ansible/plugins/strategies/linear.py
rename to lib/ansible/plugins/strategies/linear.py
diff --git a/v2/ansible/plugins/vars/__init__.py b/lib/ansible/plugins/vars/__init__.py
similarity index 100%
rename from v2/ansible/plugins/vars/__init__.py
rename to lib/ansible/plugins/vars/__init__.py
diff --git a/v2/ansible/template/__init__.py b/lib/ansible/template/__init__.py
similarity index 100%
rename from v2/ansible/template/__init__.py
rename to lib/ansible/template/__init__.py
diff --git a/v2/ansible/template/safe_eval.py b/lib/ansible/template/safe_eval.py
similarity index 100%
rename from v2/ansible/template/safe_eval.py
rename to lib/ansible/template/safe_eval.py
diff --git a/v2/ansible/template/template.py b/lib/ansible/template/template.py
similarity index 100%
rename from v2/ansible/template/template.py
rename to lib/ansible/template/template.py
diff --git a/v2/ansible/template/vars.py b/lib/ansible/template/vars.py
similarity index 100%
rename from v2/ansible/template/vars.py
rename to lib/ansible/template/vars.py
diff --git a/v2/test-requirements.txt b/lib/ansible/test-requirements.txt
similarity index 100%
rename from v2/test-requirements.txt
rename to lib/ansible/test-requirements.txt
diff --git a/lib/ansible/utils/__init__.py b/lib/ansible/utils/__init__.py
index 7ed07a54c8..ae8ccff595 100644
--- a/lib/ansible/utils/__init__.py
+++ b/lib/ansible/utils/__init__.py
@@ -15,1646 +15,6 @@
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see .
-import errno
-import sys
-import re
-import os
-import shlex
-import yaml
-import copy
-import optparse
-import operator
-from ansible import errors
-from ansible import __version__
-from ansible.utils.display_functions import *
-from ansible.utils.plugins import *
-from ansible.utils.su_prompts import *
-from ansible.utils.hashing import secure_hash, secure_hash_s, checksum, checksum_s, md5, md5s
-from ansible.callbacks import display
-from ansible.module_utils.splitter import split_args, unquote
-from ansible.module_utils.basic import heuristic_log_sanitize
-from ansible.utils.unicode import to_bytes, to_unicode
-import ansible.constants as C
-import ast
-import time
-import StringIO
-import stat
-import termios
-import tty
-import pipes
-import random
-import difflib
-import warnings
-import traceback
-import getpass
-import sys
-import subprocess
-import contextlib
-
-from vault import VaultLib
-
-VERBOSITY=0
-
-MAX_FILE_SIZE_FOR_DIFF=1*1024*1024
-
-# caching the compilation of the regex used
-# to check for lookup calls within data
-LOOKUP_REGEX = re.compile(r'lookup\s*\(')
-PRINT_CODE_REGEX = re.compile(r'(?:{[{%]|[%}]})')
-CODE_REGEX = re.compile(r'(?:{%|%})')
-
-
-try:
- # simplejson can be much faster if it's available
- import simplejson as json
-except ImportError:
- import json
-
-try:
- from yaml import CSafeLoader as Loader
-except ImportError:
- from yaml import SafeLoader as Loader
-
-PASSLIB_AVAILABLE = False
-try:
- import passlib.hash
- PASSLIB_AVAILABLE = True
-except:
- pass
-
-try:
- import builtin
-except ImportError:
- import __builtin__ as builtin
-
-KEYCZAR_AVAILABLE=False
-try:
- try:
- # some versions of pycrypto may not have this?
- from Crypto.pct_warnings import PowmInsecureWarning
- except ImportError:
- PowmInsecureWarning = RuntimeWarning
-
- with warnings.catch_warnings(record=True) as warning_handler:
- warnings.simplefilter("error", PowmInsecureWarning)
- try:
- import keyczar.errors as key_errors
- from keyczar.keys import AesKey
- except PowmInsecureWarning:
- system_warning(
- "The version of gmp you have installed has a known issue regarding " + \
- "timing vulnerabilities when used with pycrypto. " + \
- "If possible, you should update it (i.e. yum update gmp)."
- )
- warnings.resetwarnings()
- warnings.simplefilter("ignore")
- import keyczar.errors as key_errors
- from keyczar.keys import AesKey
- KEYCZAR_AVAILABLE=True
-except ImportError:
- pass
-
-
-###############################################################
-# Abstractions around keyczar
-###############################################################
-
-def key_for_hostname(hostname):
- # fireball mode is an implementation of ansible firing up zeromq via SSH
- # to use no persistent daemons or key management
-
- if not KEYCZAR_AVAILABLE:
- raise errors.AnsibleError("python-keyczar must be installed on the control machine to use accelerated modes")
-
- key_path = os.path.expanduser(C.ACCELERATE_KEYS_DIR)
- if not os.path.exists(key_path):
- os.makedirs(key_path, mode=0700)
- os.chmod(key_path, int(C.ACCELERATE_KEYS_DIR_PERMS, 8))
- elif not os.path.isdir(key_path):
- raise errors.AnsibleError('ACCELERATE_KEYS_DIR is not a directory.')
-
- if stat.S_IMODE(os.stat(key_path).st_mode) != int(C.ACCELERATE_KEYS_DIR_PERMS, 8):
- raise errors.AnsibleError('Incorrect permissions on the private key directory. Use `chmod 0%o %s` to correct this issue, and make sure any of the keys files contained within that directory are set to 0%o' % (int(C.ACCELERATE_KEYS_DIR_PERMS, 8), C.ACCELERATE_KEYS_DIR, int(C.ACCELERATE_KEYS_FILE_PERMS, 8)))
-
- key_path = os.path.join(key_path, hostname)
-
- # use new AES keys every 2 hours, which means fireball must not allow running for longer either
- if not os.path.exists(key_path) or (time.time() - os.path.getmtime(key_path) > 60*60*2):
- key = AesKey.Generate()
- fd = os.open(key_path, os.O_WRONLY | os.O_CREAT, int(C.ACCELERATE_KEYS_FILE_PERMS, 8))
- fh = os.fdopen(fd, 'w')
- fh.write(str(key))
- fh.close()
- return key
- else:
- if stat.S_IMODE(os.stat(key_path).st_mode) != int(C.ACCELERATE_KEYS_FILE_PERMS, 8):
- raise errors.AnsibleError('Incorrect permissions on the key file for this host. Use `chmod 0%o %s` to correct this issue.' % (int(C.ACCELERATE_KEYS_FILE_PERMS, 8), key_path))
- fh = open(key_path)
- key = AesKey.Read(fh.read())
- fh.close()
- return key
-
-def encrypt(key, msg):
- return key.Encrypt(msg)
-
-def decrypt(key, msg):
- try:
- return key.Decrypt(msg)
- except key_errors.InvalidSignatureError:
- raise errors.AnsibleError("decryption failed")
-
-###############################################################
-# UTILITY FUNCTIONS FOR COMMAND LINE TOOLS
-###############################################################
-
-def read_vault_file(vault_password_file):
- """Read a vault password from a file or if executable, execute the script and
- retrieve password from STDOUT
- """
- if vault_password_file:
- this_path = os.path.realpath(os.path.expanduser(vault_password_file))
- if is_executable(this_path):
- try:
- # STDERR not captured to make it easier for users to prompt for input in their scripts
- p = subprocess.Popen(this_path, stdout=subprocess.PIPE)
- except OSError, e:
- raise errors.AnsibleError("problem running %s (%s)" % (' '.join(this_path), e))
- stdout, stderr = p.communicate()
- vault_pass = stdout.strip('\r\n')
- else:
- try:
- f = open(this_path, "rb")
- vault_pass=f.read().strip()
- f.close()
- except (OSError, IOError), e:
- raise errors.AnsibleError("Could not read %s: %s" % (this_path, e))
-
- return vault_pass
- else:
- return None
-
-def err(msg):
- ''' print an error message to stderr '''
-
- print >> sys.stderr, msg
-
-def exit(msg, rc=1):
- ''' quit with an error to stdout and a failure code '''
-
- err(msg)
- sys.exit(rc)
-
-def jsonify(result, format=False):
- ''' format JSON output (uncompressed or uncompressed) '''
-
- if result is None:
- return "{}"
- result2 = result.copy()
- for key, value in result2.items():
- if type(value) is str:
- result2[key] = value.decode('utf-8', 'ignore')
-
- indent = None
- if format:
- indent = 4
-
- try:
- return json.dumps(result2, sort_keys=True, indent=indent, ensure_ascii=False)
- except UnicodeDecodeError:
- return json.dumps(result2, sort_keys=True, indent=indent)
-
-def write_tree_file(tree, hostname, buf):
- ''' write something into treedir/hostname '''
-
- # TODO: might be nice to append playbook runs per host in a similar way
- # in which case, we'd want append mode.
- path = os.path.join(tree, hostname)
- fd = open(path, "w+")
- fd.write(buf)
- fd.close()
-
-def is_failed(result):
- ''' is a given JSON result a failed result? '''
-
- return ((result.get('rc', 0) != 0) or (result.get('failed', False) in [ True, 'True', 'true']))
-
-def is_changed(result):
- ''' is a given JSON result a changed result? '''
-
- return (result.get('changed', False) in [ True, 'True', 'true'])
-
-def check_conditional(conditional, basedir, inject, fail_on_undefined=False):
- from ansible.utils import template
-
- if conditional is None or conditional == '':
- return True
-
- if isinstance(conditional, list):
- for x in conditional:
- if not check_conditional(x, basedir, inject, fail_on_undefined=fail_on_undefined):
- return False
- return True
-
- if not isinstance(conditional, basestring):
- return conditional
-
- conditional = conditional.replace("jinja2_compare ","")
- # allow variable names
- if conditional in inject and '-' not in to_unicode(inject[conditional], nonstring='simplerepr'):
- conditional = to_unicode(inject[conditional], nonstring='simplerepr')
- conditional = template.template(basedir, conditional, inject, fail_on_undefined=fail_on_undefined)
- original = to_unicode(conditional, nonstring='simplerepr').replace("jinja2_compare ","")
- # a Jinja2 evaluation that results in something Python can eval!
- presented = "{%% if %s %%} True {%% else %%} False {%% endif %%}" % conditional
- conditional = template.template(basedir, presented, inject)
- val = conditional.strip()
- if val == presented:
- # the templating failed, meaning most likely a
- # variable was undefined. If we happened to be
- # looking for an undefined variable, return True,
- # otherwise fail
- if "is undefined" in conditional:
- return True
- elif "is defined" in conditional:
- return False
- else:
- raise errors.AnsibleError("error while evaluating conditional: %s" % original)
- elif val == "True":
- return True
- elif val == "False":
- return False
- else:
- raise errors.AnsibleError("unable to evaluate conditional: %s" % original)
-
-def is_executable(path):
- '''is the given path executable?'''
- return (stat.S_IXUSR & os.stat(path)[stat.ST_MODE]
- or stat.S_IXGRP & os.stat(path)[stat.ST_MODE]
- or stat.S_IXOTH & os.stat(path)[stat.ST_MODE])
-
-def unfrackpath(path):
- '''
- returns a path that is free of symlinks, environment
- variables, relative path traversals and symbols (~)
- example:
- '$HOME/../../var/mail' becomes '/var/spool/mail'
- '''
- return os.path.normpath(os.path.realpath(os.path.expandvars(os.path.expanduser(path))))
-
-def prepare_writeable_dir(tree,mode=0777):
- ''' make sure a directory exists and is writeable '''
-
- # modify the mode to ensure the owner at least
- # has read/write access to this directory
- mode |= 0700
-
- # make sure the tree path is always expanded
- # and normalized and free of symlinks
- tree = unfrackpath(tree)
-
- if not os.path.exists(tree):
- try:
- os.makedirs(tree, mode)
- except (IOError, OSError), e:
- raise errors.AnsibleError("Could not make dir %s: %s" % (tree, e))
- if not os.access(tree, os.W_OK):
- raise errors.AnsibleError("Cannot write to path %s" % tree)
- return tree
-
-def path_dwim(basedir, given):
- '''
- make relative paths work like folks expect.
- '''
-
- if given.startswith("'"):
- given = given[1:-1]
-
- if given.startswith("/"):
- return os.path.abspath(given)
- elif given.startswith("~"):
- return os.path.abspath(os.path.expanduser(given))
- else:
- if basedir is None:
- basedir = "."
- return os.path.abspath(os.path.join(basedir, given))
-
-def path_dwim_relative(original, dirname, source, playbook_base, check=True):
- ''' find one file in a directory one level up in a dir named dirname relative to current '''
- # (used by roles code)
-
- from ansible.utils import template
-
-
- basedir = os.path.dirname(original)
- if os.path.islink(basedir):
- basedir = unfrackpath(basedir)
- template2 = os.path.join(basedir, dirname, source)
- else:
- template2 = os.path.join(basedir, '..', dirname, source)
- source2 = path_dwim(basedir, template2)
- if os.path.exists(source2):
- return source2
- obvious_local_path = path_dwim(playbook_base, source)
- if os.path.exists(obvious_local_path):
- return obvious_local_path
- if check:
- raise errors.AnsibleError("input file not found at %s or %s" % (source2, obvious_local_path))
- return source2 # which does not exist
-
-def repo_url_to_role_name(repo_url):
- # gets the role name out of a repo like
- # http://git.example.com/repos/repo.git" => "repo"
-
- if '://' not in repo_url and '@' not in repo_url:
- return repo_url
- trailing_path = repo_url.split('/')[-1]
- if trailing_path.endswith('.git'):
- trailing_path = trailing_path[:-4]
- if trailing_path.endswith('.tar.gz'):
- trailing_path = trailing_path[:-7]
- if ',' in trailing_path:
- trailing_path = trailing_path.split(',')[0]
- return trailing_path
-
-
-def role_spec_parse(role_spec):
- # takes a repo and a version like
- # git+http://git.example.com/repos/repo.git,v1.0
- # and returns a list of properties such as:
- # {
- # 'scm': 'git',
- # 'src': 'http://git.example.com/repos/repo.git',
- # 'version': 'v1.0',
- # 'name': 'repo'
- # }
-
- role_spec = role_spec.strip()
- role_version = ''
- default_role_versions = dict(git='master', hg='tip')
- if role_spec == "" or role_spec.startswith("#"):
- return (None, None, None, None)
-
- tokens = [s.strip() for s in role_spec.split(',')]
-
- # assume https://github.com URLs are git+https:// URLs and not
- # tarballs unless they end in '.zip'
- if 'github.com/' in tokens[0] and not tokens[0].startswith("git+") and not tokens[0].endswith('.tar.gz'):
- tokens[0] = 'git+' + tokens[0]
-
- if '+' in tokens[0]:
- (scm, role_url) = tokens[0].split('+')
- else:
- scm = None
- role_url = tokens[0]
- if len(tokens) >= 2:
- role_version = tokens[1]
- if len(tokens) == 3:
- role_name = tokens[2]
- else:
- role_name = repo_url_to_role_name(tokens[0])
- if scm and not role_version:
- role_version = default_role_versions.get(scm, '')
- return dict(scm=scm, src=role_url, version=role_version, name=role_name)
-
-
-def role_yaml_parse(role):
- if 'role' in role:
- # Old style: {role: "galaxy.role,version,name", other_vars: "here" }
- role_info = role_spec_parse(role['role'])
- if isinstance(role_info, dict):
- # Warning: Slight change in behaviour here. name may be being
- # overloaded. Previously, name was only a parameter to the role.
- # Now it is both a parameter to the role and the name that
- # ansible-galaxy will install under on the local system.
- if 'name' in role and 'name' in role_info:
- del role_info['name']
- role.update(role_info)
- else:
- # New style: { src: 'galaxy.role,version,name', other_vars: "here" }
- if 'github.com' in role["src"] and 'http' in role["src"] and '+' not in role["src"] and not role["src"].endswith('.tar.gz'):
- role["src"] = "git+" + role["src"]
-
- if '+' in role["src"]:
- (scm, src) = role["src"].split('+')
- role["scm"] = scm
- role["src"] = src
-
- if 'name' not in role:
- role["name"] = repo_url_to_role_name(role["src"])
-
- if 'version' not in role:
- role['version'] = ''
-
- if 'scm' not in role:
- role['scm'] = None
-
- return role
-
-
-def json_loads(data):
- ''' parse a JSON string and return a data structure '''
- try:
- loaded = json.loads(data)
- except ValueError,e:
- raise errors.AnsibleError("Unable to read provided data as JSON: %s" % str(e))
-
- return loaded
-
-def _clean_data(orig_data, from_remote=False, from_inventory=False):
- ''' remove jinja2 template tags from a string '''
-
- if not isinstance(orig_data, basestring):
- return orig_data
-
- # when the data is marked as having come from a remote, we always
- # replace any print blocks (ie. {{var}}), however when marked as coming
- # from inventory we only replace print blocks that contain a call to
- # a lookup plugin (ie. {{lookup('foo','bar'))}})
- replace_prints = from_remote or (from_inventory and '{{' in orig_data and LOOKUP_REGEX.search(orig_data) is not None)
-
- regex = PRINT_CODE_REGEX if replace_prints else CODE_REGEX
-
- with contextlib.closing(StringIO.StringIO(orig_data)) as data:
- # these variables keep track of opening block locations, as we only
- # want to replace matched pairs of print/block tags
- print_openings = []
- block_openings = []
- for mo in regex.finditer(orig_data):
- token = mo.group(0)
- token_start = mo.start(0)
-
- if token[0] == '{':
- if token == '{%':
- block_openings.append(token_start)
- elif token == '{{':
- print_openings.append(token_start)
-
- elif token[1] == '}':
- prev_idx = None
- if token == '%}' and block_openings:
- prev_idx = block_openings.pop()
- elif token == '}}' and print_openings:
- prev_idx = print_openings.pop()
-
- if prev_idx is not None:
- # replace the opening
- data.seek(prev_idx, os.SEEK_SET)
- data.write('{#')
- # replace the closing
- data.seek(token_start, os.SEEK_SET)
- data.write('#}')
-
- else:
- assert False, 'Unhandled regex match'
-
- return data.getvalue()
-
-def _clean_data_struct(orig_data, from_remote=False, from_inventory=False):
- '''
- walk a complex data structure, and use _clean_data() to
- remove any template tags that may exist
- '''
- if not from_remote and not from_inventory:
- raise errors.AnsibleErrors("when cleaning data, you must specify either from_remote or from_inventory")
- if isinstance(orig_data, dict):
- data = orig_data.copy()
- for key in data:
- new_key = _clean_data_struct(key, from_remote, from_inventory)
- new_val = _clean_data_struct(data[key], from_remote, from_inventory)
- if key != new_key:
- del data[key]
- data[new_key] = new_val
- elif isinstance(orig_data, list):
- data = orig_data[:]
- for i in range(0, len(data)):
- data[i] = _clean_data_struct(data[i], from_remote, from_inventory)
- elif isinstance(orig_data, basestring):
- data = _clean_data(orig_data, from_remote, from_inventory)
- else:
- data = orig_data
- return data
-
-def parse_json(raw_data, from_remote=False, from_inventory=False, no_exceptions=False):
- ''' this version for module return data only '''
-
- orig_data = raw_data
-
- # ignore stuff like tcgetattr spewage or other warnings
- data = filter_leading_non_json_lines(raw_data)
-
- try:
- results = json.loads(data)
- except:
- if no_exceptions:
- return dict(failed=True, parsed=False, msg=raw_data)
- else:
- raise
-
- if from_remote:
- results = _clean_data_struct(results, from_remote, from_inventory)
-
- return results
-
-def serialize_args(args):
- '''
- Flattens a dictionary args to a k=v string
- '''
- module_args = ""
- for (k,v) in args.iteritems():
- if isinstance(v, basestring):
- module_args = "%s=%s %s" % (k, pipes.quote(v), module_args)
- elif isinstance(v, bool):
- module_args = "%s=%s %s" % (k, str(v), module_args)
- return module_args.strip()
-
-def merge_module_args(current_args, new_args):
- '''
- merges either a dictionary or string of k=v pairs with another string of k=v pairs,
- and returns a new k=v string without duplicates.
- '''
- if not isinstance(current_args, basestring):
- raise errors.AnsibleError("expected current_args to be a basestring")
- # we use parse_kv to split up the current args into a dictionary
- final_args = parse_kv(current_args)
- if isinstance(new_args, dict):
- final_args.update(new_args)
- elif isinstance(new_args, basestring):
- new_args_kv = parse_kv(new_args)
- final_args.update(new_args_kv)
- return serialize_args(final_args)
-
-def parse_yaml(data, path_hint=None):
- ''' convert a yaml string to a data structure. Also supports JSON, ssssssh!!!'''
-
- stripped_data = data.lstrip()
- loaded = None
- if stripped_data.startswith("{") or stripped_data.startswith("["):
- # since the line starts with { or [ we can infer this is a JSON document.
- try:
- loaded = json.loads(data)
- except ValueError, ve:
- if path_hint:
- raise errors.AnsibleError(path_hint + ": " + str(ve))
- else:
- raise errors.AnsibleError(str(ve))
- else:
- # else this is pretty sure to be a YAML document
- loaded = yaml.load(data, Loader=Loader)
-
- return loaded
-
-def process_common_errors(msg, probline, column):
- replaced = probline.replace(" ","")
-
- if ":{{" in replaced and "}}" in replaced:
- msg = msg + """
-This one looks easy to fix. YAML thought it was looking for the start of a
-hash/dictionary and was confused to see a second "{". Most likely this was
-meant to be an ansible template evaluation instead, so we have to give the
-parser a small hint that we wanted a string instead. The solution here is to
-just quote the entire value.
-
-For instance, if the original line was:
-
- app_path: {{ base_path }}/foo
-
-It should be written as:
-
- app_path: "{{ base_path }}/foo"
-"""
- return msg
-
- elif len(probline) and len(probline) > 1 and len(probline) > column and probline[column] == ":" and probline.count(':') > 1:
- msg = msg + """
-This one looks easy to fix. There seems to be an extra unquoted colon in the line
-and this is confusing the parser. It was only expecting to find one free
-colon. The solution is just add some quotes around the colon, or quote the
-entire line after the first colon.
-
-For instance, if the original line was:
-
- copy: src=file.txt dest=/path/filename:with_colon.txt
-
-It can be written as:
-
- copy: src=file.txt dest='/path/filename:with_colon.txt'
-
-Or:
-
- copy: 'src=file.txt dest=/path/filename:with_colon.txt'
-
-
-"""
- return msg
- else:
- parts = probline.split(":")
- if len(parts) > 1:
- middle = parts[1].strip()
- match = False
- unbalanced = False
- if middle.startswith("'") and not middle.endswith("'"):
- match = True
- elif middle.startswith('"') and not middle.endswith('"'):
- match = True
- if len(middle) > 0 and middle[0] in [ '"', "'" ] and middle[-1] in [ '"', "'" ] and probline.count("'") > 2 or probline.count('"') > 2:
- unbalanced = True
- if match:
- msg = msg + """
-This one looks easy to fix. It seems that there is a value started
-with a quote, and the YAML parser is expecting to see the line ended
-with the same kind of quote. For instance:
-
- when: "ok" in result.stdout
-
-Could be written as:
-
- when: '"ok" in result.stdout'
-
-or equivalently:
-
- when: "'ok' in result.stdout"
-
-"""
- return msg
-
- if unbalanced:
- msg = msg + """
-We could be wrong, but this one looks like it might be an issue with
-unbalanced quotes. If starting a value with a quote, make sure the
-line ends with the same set of quotes. For instance this arbitrary
-example:
-
- foo: "bad" "wolf"
-
-Could be written as:
-
- foo: '"bad" "wolf"'
-
-"""
- return msg
-
- return msg
-
-def process_yaml_error(exc, data, path=None, show_content=True):
- if hasattr(exc, 'problem_mark'):
- mark = exc.problem_mark
- if show_content:
- if mark.line -1 >= 0:
- before_probline = data.split("\n")[mark.line-1]
- else:
- before_probline = ''
- probline = data.split("\n")[mark.line]
- arrow = " " * mark.column + "^"
- msg = """Syntax Error while loading YAML script, %s
-Note: The error may actually appear before this position: line %s, column %s
-
-%s
-%s
-%s""" % (path, mark.line + 1, mark.column + 1, before_probline, probline, arrow)
-
- unquoted_var = None
- if '{{' in probline and '}}' in probline:
- if '"{{' not in probline or "'{{" not in probline:
- unquoted_var = True
-
- if not unquoted_var:
- msg = process_common_errors(msg, probline, mark.column)
- else:
- msg = msg + """
-We could be wrong, but this one looks like it might be an issue with
-missing quotes. Always quote template expression brackets when they
-start a value. For instance:
-
- with_items:
- - {{ foo }}
-
-Should be written as:
-
- with_items:
- - "{{ foo }}"
-
-"""
- else:
- # most likely displaying a file with sensitive content,
- # so don't show any of the actual lines of yaml just the
- # line number itself
- msg = """Syntax error while loading YAML script, %s
-The error appears to have been on line %s, column %s, but may actually
-be before there depending on the exact syntax problem.
-""" % (path, mark.line + 1, mark.column + 1)
-
- else:
- # No problem markers means we have to throw a generic
- # "stuff messed up" type message. Sry bud.
- if path:
- msg = "Could not parse YAML. Check over %s again." % path
- else:
- msg = "Could not parse YAML."
- raise errors.AnsibleYAMLValidationFailed(msg)
-
-
-def parse_yaml_from_file(path, vault_password=None):
- ''' convert a yaml file to a data structure '''
-
- data = None
- show_content = True
-
- try:
- data = open(path).read()
- except IOError:
- raise errors.AnsibleError("file could not read: %s" % path)
-
- vault = VaultLib(password=vault_password)
- if vault.is_encrypted(data):
- # if the file is encrypted and no password was specified,
- # the decrypt call would throw an error, but we check first
- # since the decrypt function doesn't know the file name
- if vault_password is None:
- raise errors.AnsibleError("A vault password must be specified to decrypt %s" % path)
- data = vault.decrypt(data)
- show_content = False
-
- try:
- return parse_yaml(data, path_hint=path)
- except yaml.YAMLError, exc:
- process_yaml_error(exc, data, path, show_content)
-
-def parse_kv(args):
- ''' convert a string of key/value items to a dict '''
- options = {}
- if args is not None:
- try:
- vargs = split_args(args)
- except ValueError, ve:
- if 'no closing quotation' in str(ve).lower():
- raise errors.AnsibleError("error parsing argument string, try quoting the entire line.")
- else:
- raise
- for x in vargs:
- if "=" in x:
- k, v = x.split("=",1)
- options[k.strip()] = unquote(v.strip())
- return options
-
-def _validate_both_dicts(a, b):
-
- if not (isinstance(a, dict) and isinstance(b, dict)):
- raise errors.AnsibleError(
- "failed to combine variables, expected dicts but got a '%s' and a '%s'" % (type(a).__name__, type(b).__name__)
- )
-
-def merge_hash(a, b):
- ''' recursively merges hash b into a
- keys from b take precedence over keys from a '''
-
- result = {}
-
- # we check here as well as in combine_vars() since this
- # function can work recursively with nested dicts
- _validate_both_dicts(a, b)
-
- for dicts in a, b:
- # next, iterate over b keys and values
- for k, v in dicts.iteritems():
- # if there's already such key in a
- # and that key contains dict
- if k in result and isinstance(result[k], dict):
- # merge those dicts recursively
- result[k] = merge_hash(a[k], v)
- else:
- # otherwise, just copy a value from b to a
- result[k] = v
-
- return result
-
-def default(value, function):
- ''' syntactic sugar around lazy evaluation of defaults '''
- if value is None:
- return function()
- return value
-
-
-def _git_repo_info(repo_path):
- ''' returns a string containing git branch, commit id and commit date '''
- result = None
- if os.path.exists(repo_path):
- # Check if the .git is a file. If it is a file, it means that we are in a submodule structure.
- if os.path.isfile(repo_path):
- try:
- gitdir = yaml.safe_load(open(repo_path)).get('gitdir')
- # There is a possibility the .git file to have an absolute path.
- if os.path.isabs(gitdir):
- repo_path = gitdir
- else:
- repo_path = os.path.join(repo_path[:-4], gitdir)
- except (IOError, AttributeError):
- return ''
- f = open(os.path.join(repo_path, "HEAD"))
- branch = f.readline().split('/')[-1].rstrip("\n")
- f.close()
- branch_path = os.path.join(repo_path, "refs", "heads", branch)
- if os.path.exists(branch_path):
- f = open(branch_path)
- commit = f.readline()[:10]
- f.close()
- else:
- # detached HEAD
- commit = branch[:10]
- branch = 'detached HEAD'
- branch_path = os.path.join(repo_path, "HEAD")
-
- date = time.localtime(os.stat(branch_path).st_mtime)
- if time.daylight == 0:
- offset = time.timezone
- else:
- offset = time.altzone
- result = "({0} {1}) last updated {2} (GMT {3:+04d})".format(branch, commit,
- time.strftime("%Y/%m/%d %H:%M:%S", date), offset / -36)
- else:
- result = ''
- return result
-
-
-def _gitinfo():
- basedir = os.path.join(os.path.dirname(__file__), '..', '..', '..')
- repo_path = os.path.join(basedir, '.git')
- result = _git_repo_info(repo_path)
- submodules = os.path.join(basedir, '.gitmodules')
- if not os.path.exists(submodules):
- return result
- f = open(submodules)
- for line in f:
- tokens = line.strip().split(' ')
- if tokens[0] == 'path':
- submodule_path = tokens[2]
- submodule_info =_git_repo_info(os.path.join(basedir, submodule_path, '.git'))
- if not submodule_info:
- submodule_info = ' not found - use git submodule update --init ' + submodule_path
- result += "\n {0}: {1}".format(submodule_path, submodule_info)
- f.close()
- return result
-
-
-def version(prog):
- result = "{0} {1}".format(prog, __version__)
- gitinfo = _gitinfo()
- if gitinfo:
- result = result + " {0}".format(gitinfo)
- result = result + "\n configured module search path = %s" % C.DEFAULT_MODULE_PATH
- return result
-
-def version_info(gitinfo=False):
- if gitinfo:
- # expensive call, user with care
- ansible_version_string = version('')
- else:
- ansible_version_string = __version__
- ansible_version = ansible_version_string.split()[0]
- ansible_versions = ansible_version.split('.')
- for counter in range(len(ansible_versions)):
- if ansible_versions[counter] == "":
- ansible_versions[counter] = 0
- try:
- ansible_versions[counter] = int(ansible_versions[counter])
- except:
- pass
- if len(ansible_versions) < 3:
- for counter in range(len(ansible_versions), 3):
- ansible_versions.append(0)
- return {'string': ansible_version_string.strip(),
- 'full': ansible_version,
- 'major': ansible_versions[0],
- 'minor': ansible_versions[1],
- 'revision': ansible_versions[2]}
-
-def getch():
- ''' read in a single character '''
- fd = sys.stdin.fileno()
- old_settings = termios.tcgetattr(fd)
- try:
- tty.setraw(sys.stdin.fileno())
- ch = sys.stdin.read(1)
- finally:
- termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
- return ch
-
-def sanitize_output(arg_string):
- ''' strips private info out of a string '''
-
- private_keys = ('password', 'login_password')
-
- output = []
- for part in arg_string.split():
- try:
- (k, v) = part.split('=', 1)
- except ValueError:
- v = heuristic_log_sanitize(part)
- output.append(v)
- continue
-
- if k in private_keys:
- v = 'VALUE_HIDDEN'
- else:
- v = heuristic_log_sanitize(v)
- output.append('%s=%s' % (k, v))
-
- output = ' '.join(output)
- return output
-
-
-####################################################################
-# option handling code for /usr/bin/ansible and ansible-playbook
-# below this line
-
-class SortedOptParser(optparse.OptionParser):
- '''Optparser which sorts the options by opt before outputting --help'''
-
- def format_help(self, formatter=None):
- self.option_list.sort(key=operator.methodcaller('get_opt_string'))
- return optparse.OptionParser.format_help(self, formatter=None)
-
-def increment_debug(option, opt, value, parser):
- global VERBOSITY
- VERBOSITY += 1
-
-def base_parser(constants=C, usage="", output_opts=False, runas_opts=False,
- async_opts=False, connect_opts=False, subset_opts=False, check_opts=False, diff_opts=False):
- ''' create an options parser for any ansible script '''
-
- parser = SortedOptParser(usage, version=version("%prog"))
- parser.add_option('-v','--verbose', default=False, action="callback",
- callback=increment_debug, help="verbose mode (-vvv for more, -vvvv to enable connection debugging)")
-
- parser.add_option('-f','--forks', dest='forks', default=constants.DEFAULT_FORKS, type='int',
- help="specify number of parallel processes to use (default=%s)" % constants.DEFAULT_FORKS)
- parser.add_option('-i', '--inventory-file', dest='inventory',
- help="specify inventory host file (default=%s)" % constants.DEFAULT_HOST_LIST,
- default=constants.DEFAULT_HOST_LIST)
- parser.add_option('-e', '--extra-vars', dest="extra_vars", action="append",
- help="set additional variables as key=value or YAML/JSON", default=[])
- parser.add_option('-u', '--user', default=constants.DEFAULT_REMOTE_USER, dest='remote_user',
- help='connect as this user (default=%s)' % constants.DEFAULT_REMOTE_USER)
- parser.add_option('-k', '--ask-pass', default=False, dest='ask_pass', action='store_true',
- help='ask for SSH password')
- parser.add_option('--private-key', default=constants.DEFAULT_PRIVATE_KEY_FILE, dest='private_key_file',
- help='use this file to authenticate the connection')
- parser.add_option('--ask-vault-pass', default=False, dest='ask_vault_pass', action='store_true',
- help='ask for vault password')
- parser.add_option('--vault-password-file', default=constants.DEFAULT_VAULT_PASSWORD_FILE,
- dest='vault_password_file', help="vault password file")
- parser.add_option('--list-hosts', dest='listhosts', action='store_true',
- help='outputs a list of matching hosts; does not execute anything else')
- parser.add_option('-M', '--module-path', dest='module_path',
- help="specify path(s) to module library (default=%s)" % constants.DEFAULT_MODULE_PATH,
- default=None)
-
- if subset_opts:
- parser.add_option('-l', '--limit', default=constants.DEFAULT_SUBSET, dest='subset',
- help='further limit selected hosts to an additional pattern')
-
- parser.add_option('-T', '--timeout', default=constants.DEFAULT_TIMEOUT, type='int',
- dest='timeout',
- help="override the SSH timeout in seconds (default=%s)" % constants.DEFAULT_TIMEOUT)
-
- if output_opts:
- parser.add_option('-o', '--one-line', dest='one_line', action='store_true',
- help='condense output')
- parser.add_option('-t', '--tree', dest='tree', default=None,
- help='log output to this directory')
-
- if runas_opts:
- # priv user defaults to root later on to enable detecting when this option was given here
- parser.add_option('-K', '--ask-sudo-pass', default=False, dest='ask_sudo_pass', action='store_true',
- help='ask for sudo password (deprecated, use become)')
- parser.add_option('--ask-su-pass', default=False, dest='ask_su_pass', action='store_true',
- help='ask for su password (deprecated, use become)')
- parser.add_option("-s", "--sudo", default=constants.DEFAULT_SUDO, action="store_true", dest='sudo',
- help="run operations with sudo (nopasswd) (deprecated, use become)")
- parser.add_option('-U', '--sudo-user', dest='sudo_user', default=None,
- help='desired sudo user (default=root) (deprecated, use become)')
- parser.add_option('-S', '--su', default=constants.DEFAULT_SU, action='store_true',
- help='run operations with su (deprecated, use become)')
- parser.add_option('-R', '--su-user', default=None,
- help='run operations with su as this user (default=%s) (deprecated, use become)' % constants.DEFAULT_SU_USER)
-
- # consolidated privilege escalation (become)
- parser.add_option("-b", "--become", default=constants.DEFAULT_BECOME, action="store_true", dest='become',
- help="run operations with become (nopasswd implied)")
- parser.add_option('--become-method', dest='become_method', default=constants.DEFAULT_BECOME_METHOD, type='string',
- help="privilege escalation method to use (default=%s), valid choices: [ %s ]" % (constants.DEFAULT_BECOME_METHOD, ' | '.join(constants.BECOME_METHODS)))
- parser.add_option('--become-user', default=None, dest='become_user', type='string',
- help='run operations as this user (default=%s)' % constants.DEFAULT_BECOME_USER)
- parser.add_option('--ask-become-pass', default=False, dest='become_ask_pass', action='store_true',
- help='ask for privilege escalation password')
-
-
- if connect_opts:
- parser.add_option('-c', '--connection', dest='connection',
- default=constants.DEFAULT_TRANSPORT,
- help="connection type to use (default=%s)" % constants.DEFAULT_TRANSPORT)
-
- if async_opts:
- parser.add_option('-P', '--poll', default=constants.DEFAULT_POLL_INTERVAL, type='int',
- dest='poll_interval',
- help="set the poll interval if using -B (default=%s)" % constants.DEFAULT_POLL_INTERVAL)
- parser.add_option('-B', '--background', dest='seconds', type='int', default=0,
- help='run asynchronously, failing after X seconds (default=N/A)')
-
- if check_opts:
- parser.add_option("-C", "--check", default=False, dest='check', action='store_true',
- help="don't make any changes; instead, try to predict some of the changes that may occur"
- )
-
- if diff_opts:
- parser.add_option("-D", "--diff", default=False, dest='diff', action='store_true',
- help="when changing (small) files and templates, show the differences in those files; works great with --check"
- )
-
- return parser
-
-def parse_extra_vars(extra_vars_opts, vault_pass):
- extra_vars = {}
- for extra_vars_opt in extra_vars_opts:
- extra_vars_opt = to_unicode(extra_vars_opt)
- if extra_vars_opt.startswith(u"@"):
- # Argument is a YAML file (JSON is a subset of YAML)
- extra_vars = combine_vars(extra_vars, parse_yaml_from_file(extra_vars_opt[1:], vault_password=vault_pass))
- elif extra_vars_opt and extra_vars_opt[0] in u'[{':
- # Arguments as YAML
- extra_vars = combine_vars(extra_vars, parse_yaml(extra_vars_opt))
- else:
- # Arguments as Key-value
- extra_vars = combine_vars(extra_vars, parse_kv(extra_vars_opt))
- return extra_vars
-
-def ask_vault_passwords(ask_vault_pass=False, ask_new_vault_pass=False, confirm_vault=False, confirm_new=False):
-
- vault_pass = None
- new_vault_pass = None
-
- if ask_vault_pass:
- vault_pass = getpass.getpass(prompt="Vault password: ")
-
- if ask_vault_pass and confirm_vault:
- vault_pass2 = getpass.getpass(prompt="Confirm Vault password: ")
- if vault_pass != vault_pass2:
- raise errors.AnsibleError("Passwords do not match")
-
- if ask_new_vault_pass:
- new_vault_pass = getpass.getpass(prompt="New Vault password: ")
-
- if ask_new_vault_pass and confirm_new:
- new_vault_pass2 = getpass.getpass(prompt="Confirm New Vault password: ")
- if new_vault_pass != new_vault_pass2:
- raise errors.AnsibleError("Passwords do not match")
-
- # enforce no newline chars at the end of passwords
- if vault_pass:
- vault_pass = to_bytes(vault_pass, errors='strict', nonstring='simplerepr').strip()
- if new_vault_pass:
- new_vault_pass = to_bytes(new_vault_pass, errors='strict', nonstring='simplerepr').strip()
-
- return vault_pass, new_vault_pass
-
-def ask_passwords(ask_pass=False, become_ask_pass=False, ask_vault_pass=False, become_method=C.DEFAULT_BECOME_METHOD):
- sshpass = None
- becomepass = None
- vaultpass = None
- become_prompt = ''
-
- if ask_pass:
- sshpass = getpass.getpass(prompt="SSH password: ")
- become_prompt = "%s password[defaults to SSH password]: " % become_method.upper()
- if sshpass:
- sshpass = to_bytes(sshpass, errors='strict', nonstring='simplerepr')
- else:
- become_prompt = "%s password: " % become_method.upper()
-
- if become_ask_pass:
- becomepass = getpass.getpass(prompt=become_prompt)
- if ask_pass and becomepass == '':
- becomepass = sshpass
- if becomepass:
- becomepass = to_bytes(becomepass)
-
- if ask_vault_pass:
- vaultpass = getpass.getpass(prompt="Vault password: ")
- if vaultpass:
- vaultpass = to_bytes(vaultpass, errors='strict', nonstring='simplerepr').strip()
-
- return (sshpass, becomepass, vaultpass)
-
-
-def choose_pass_prompt(options):
-
- if options.ask_su_pass:
- return 'su'
- elif options.ask_sudo_pass:
- return 'sudo'
-
- return options.become_method
-
-def normalize_become_options(options):
-
- options.become_ask_pass = options.become_ask_pass or options.ask_sudo_pass or options.ask_su_pass or C.DEFAULT_BECOME_ASK_PASS
- options.become_user = options.become_user or options.sudo_user or options.su_user or C.DEFAULT_BECOME_USER
-
- if options.become:
- pass
- elif options.sudo:
- options.become = True
- options.become_method = 'sudo'
- elif options.su:
- options.become = True
- options.become_method = 'su'
-
-
-def do_encrypt(result, encrypt, salt_size=None, salt=None):
- if PASSLIB_AVAILABLE:
- try:
- crypt = getattr(passlib.hash, encrypt)
- except:
- raise errors.AnsibleError("passlib does not support '%s' algorithm" % encrypt)
-
- if salt_size:
- result = crypt.encrypt(result, salt_size=salt_size)
- elif salt:
- result = crypt.encrypt(result, salt=salt)
- else:
- result = crypt.encrypt(result)
- else:
- raise errors.AnsibleError("passlib must be installed to encrypt vars_prompt values")
-
- return result
-
-def last_non_blank_line(buf):
-
- all_lines = buf.splitlines()
- all_lines.reverse()
- for line in all_lines:
- if (len(line) > 0):
- return line
- # shouldn't occur unless there's no output
- return ""
-
-def filter_leading_non_json_lines(buf):
- '''
- used to avoid random output from SSH at the top of JSON output, like messages from
- tcagetattr, or where dropbear spews MOTD on every single command (which is nuts).
-
- need to filter anything which starts not with '{', '[', ', '=' or is an empty line.
- filter only leading lines since multiline JSON is valid.
- '''
-
- filtered_lines = StringIO.StringIO()
- stop_filtering = False
- for line in buf.splitlines():
- if stop_filtering or line.startswith('{') or line.startswith('['):
- stop_filtering = True
- filtered_lines.write(line + '\n')
- return filtered_lines.getvalue()
-
-def boolean(value):
- val = str(value)
- if val.lower() in [ "true", "t", "y", "1", "yes" ]:
- return True
- else:
- return False
-
-def make_become_cmd(cmd, user, shell, method, flags=None, exe=None):
- """
- helper function for connection plugins to create privilege escalation commands
- """
-
- randbits = ''.join(chr(random.randint(ord('a'), ord('z'))) for x in xrange(32))
- success_key = 'BECOME-SUCCESS-%s' % randbits
- prompt = None
- becomecmd = None
-
- shell = shell or '$SHELL'
-
- if method == 'sudo':
- # Rather than detect if sudo wants a password this time, -k makes sudo always ask for
- # a password if one is required. Passing a quoted compound command to sudo (or sudo -s)
- # directly doesn't work, so we shellquote it with pipes.quote() and pass the quoted
- # string to the user's shell. We loop reading output until we see the randomly-generated
- # sudo prompt set with the -p option.
- prompt = '[sudo via ansible, key=%s] password: ' % randbits
- exe = exe or C.DEFAULT_SUDO_EXE
- becomecmd = '%s -k && %s %s -S -p "%s" -u %s %s -c %s' % \
- (exe, exe, flags or C.DEFAULT_SUDO_FLAGS, prompt, user, shell, pipes.quote('echo %s; %s' % (success_key, cmd)))
-
- elif method == 'su':
- exe = exe or C.DEFAULT_SU_EXE
- flags = flags or C.DEFAULT_SU_FLAGS
- becomecmd = '%s %s %s -c "%s -c %s"' % (exe, flags, user, shell, pipes.quote('echo %s; %s' % (success_key, cmd)))
-
- elif method == 'pbrun':
- prompt = 'assword:'
- exe = exe or 'pbrun'
- flags = flags or ''
- becomecmd = '%s -b -l %s -u %s "%s"' % (exe, flags, user, pipes.quote('echo %s; %s' % (success_key,cmd)))
-
- elif method == 'pfexec':
- exe = exe or 'pfexec'
- flags = flags or ''
- # No user as it uses it's own exec_attr to figure it out
- becomecmd = '%s %s "%s"' % (exe, flags, pipes.quote('echo %s; %s' % (success_key,cmd)))
-
- if becomecmd is None:
- raise errors.AnsibleError("Privilege escalation method not found: %s" % method)
-
- return (('%s -c ' % shell) + pipes.quote(becomecmd), prompt, success_key)
-
-
-def make_sudo_cmd(sudo_exe, sudo_user, executable, cmd):
- """
- helper function for connection plugins to create sudo commands
- """
- return make_become_cmd(cmd, sudo_user, executable, 'sudo', C.DEFAULT_SUDO_FLAGS, sudo_exe)
-
-
-def make_su_cmd(su_user, executable, cmd):
- """
- Helper function for connection plugins to create direct su commands
- """
- return make_become_cmd(cmd, su_user, executable, 'su', C.DEFAULT_SU_FLAGS, C.DEFAULT_SU_EXE)
-
-def get_diff(diff):
- # called by --diff usage in playbook and runner via callbacks
- # include names in diffs 'before' and 'after' and do diff -U 10
-
- try:
- with warnings.catch_warnings():
- warnings.simplefilter('ignore')
- ret = []
- if 'dst_binary' in diff:
- ret.append("diff skipped: destination file appears to be binary\n")
- if 'src_binary' in diff:
- ret.append("diff skipped: source file appears to be binary\n")
- if 'dst_larger' in diff:
- ret.append("diff skipped: destination file size is greater than %d\n" % diff['dst_larger'])
- if 'src_larger' in diff:
- ret.append("diff skipped: source file size is greater than %d\n" % diff['src_larger'])
- if 'before' in diff and 'after' in diff:
- if 'before_header' in diff:
- before_header = "before: %s" % diff['before_header']
- else:
- before_header = 'before'
- if 'after_header' in diff:
- after_header = "after: %s" % diff['after_header']
- else:
- after_header = 'after'
- differ = difflib.unified_diff(to_unicode(diff['before']).splitlines(True), to_unicode(diff['after']).splitlines(True), before_header, after_header, '', '', 10)
- for line in list(differ):
- ret.append(line)
- return u"".join(ret)
- except UnicodeDecodeError:
- return ">> the files are different, but the diff library cannot compare unicode strings"
-
-def is_list_of_strings(items):
- for x in items:
- if not isinstance(x, basestring):
- return False
- return True
-
-def list_union(a, b):
- result = []
- for x in a:
- if x not in result:
- result.append(x)
- for x in b:
- if x not in result:
- result.append(x)
- return result
-
-def list_intersection(a, b):
- result = []
- for x in a:
- if x in b and x not in result:
- result.append(x)
- return result
-
-def list_difference(a, b):
- result = []
- for x in a:
- if x not in b and x not in result:
- result.append(x)
- for x in b:
- if x not in a and x not in result:
- result.append(x)
- return result
-
-def contains_vars(data):
- '''
- returns True if the data contains a variable pattern
- '''
- return "$" in data or "{{" in data
-
-def safe_eval(expr, locals={}, include_exceptions=False):
- '''
- This is intended for allowing things like:
- with_items: a_list_variable
-
- Where Jinja2 would return a string but we do not want to allow it to
- call functions (outside of Jinja2, where the env is constrained). If
- the input data to this function came from an untrusted (remote) source,
- it should first be run through _clean_data_struct() to ensure the data
- is further sanitized prior to evaluation.
-
- Based on:
- http://stackoverflow.com/questions/12523516/using-ast-and-whitelists-to-make-pythons-eval-safe
- '''
-
- # this is the whitelist of AST nodes we are going to
- # allow in the evaluation. Any node type other than
- # those listed here will raise an exception in our custom
- # visitor class defined below.
- SAFE_NODES = set(
- (
- ast.Add,
- ast.BinOp,
- ast.Call,
- ast.Compare,
- ast.Dict,
- ast.Div,
- ast.Expression,
- ast.List,
- ast.Load,
- ast.Mult,
- ast.Num,
- ast.Name,
- ast.Str,
- ast.Sub,
- ast.Tuple,
- ast.UnaryOp,
- )
- )
-
- # AST node types were expanded after 2.6
- if not sys.version.startswith('2.6'):
- SAFE_NODES.union(
- set(
- (ast.Set,)
- )
- )
-
- filter_list = []
- for filter in filter_loader.all():
- filter_list.extend(filter.filters().keys())
-
- CALL_WHITELIST = C.DEFAULT_CALLABLE_WHITELIST + filter_list
-
- class CleansingNodeVisitor(ast.NodeVisitor):
- def generic_visit(self, node, inside_call=False):
- if type(node) not in SAFE_NODES:
- raise Exception("invalid expression (%s)" % expr)
- elif isinstance(node, ast.Call):
- inside_call = True
- elif isinstance(node, ast.Name) and inside_call:
- if hasattr(builtin, node.id) and node.id not in CALL_WHITELIST:
- raise Exception("invalid function: %s" % node.id)
- # iterate over all child nodes
- for child_node in ast.iter_child_nodes(node):
- self.generic_visit(child_node, inside_call)
-
- if not isinstance(expr, basestring):
- # already templated to a datastructure, perhaps?
- if include_exceptions:
- return (expr, None)
- return expr
-
- cnv = CleansingNodeVisitor()
- try:
- parsed_tree = ast.parse(expr, mode='eval')
- cnv.visit(parsed_tree)
- compiled = compile(parsed_tree, expr, 'eval')
- result = eval(compiled, {}, locals)
-
- if include_exceptions:
- return (result, None)
- else:
- return result
- except SyntaxError, e:
- # special handling for syntax errors, we just return
- # the expression string back as-is
- if include_exceptions:
- return (expr, None)
- return expr
- except Exception, e:
- if include_exceptions:
- return (expr, e)
- return expr
-
-
-def listify_lookup_plugin_terms(terms, basedir, inject):
-
- from ansible.utils import template
-
- if isinstance(terms, basestring):
- # someone did:
- # with_items: alist
- # OR
- # with_items: {{ alist }}
-
- stripped = terms.strip()
- if not (stripped.startswith('{') or stripped.startswith('[')) and \
- not stripped.startswith("/") and \
- not stripped.startswith('set([') and \
- not LOOKUP_REGEX.search(terms):
- # if not already a list, get ready to evaluate with Jinja2
- # not sure why the "/" is in above code :)
- try:
- new_terms = template.template(basedir, "{{ %s }}" % terms, inject)
- if isinstance(new_terms, basestring) and "{{" in new_terms:
- pass
- else:
- terms = new_terms
- except:
- pass
-
- if '{' in terms or '[' in terms:
- # Jinja2 already evaluated a variable to a list.
- # Jinja2-ified list needs to be converted back to a real type
- # TODO: something a bit less heavy than eval
- return safe_eval(terms)
-
- if isinstance(terms, basestring):
- terms = [ terms ]
-
- return terms
-
-def combine_vars(a, b):
-
- _validate_both_dicts(a, b)
-
- if C.DEFAULT_HASH_BEHAVIOUR == "merge":
- return merge_hash(a, b)
- else:
- return dict(a.items() + b.items())
-
-def random_password(length=20, chars=C.DEFAULT_PASSWORD_CHARS):
- '''Return a random password string of length containing only chars.'''
-
- password = []
- while len(password) < length:
- new_char = os.urandom(1)
- if new_char in chars:
- password.append(new_char)
-
- return ''.join(password)
-
-def before_comment(msg):
- ''' what's the part of a string before a comment? '''
- msg = msg.replace("\#","**NOT_A_COMMENT**")
- msg = msg.split("#")[0]
- msg = msg.replace("**NOT_A_COMMENT**","#")
- return msg
-
-def load_vars(basepath, results, vault_password=None):
- """
- Load variables from any potential yaml filename combinations of basepath,
- returning result.
- """
-
- paths_to_check = [ "".join([basepath, ext])
- for ext in C.YAML_FILENAME_EXTENSIONS ]
-
- found_paths = []
-
- for path in paths_to_check:
- found, results = _load_vars_from_path(path, results, vault_password=vault_password)
- if found:
- found_paths.append(path)
-
-
- # disallow the potentially confusing situation that there are multiple
- # variable files for the same name. For example if both group_vars/all.yml
- # and group_vars/all.yaml
- if len(found_paths) > 1:
- raise errors.AnsibleError("Multiple variable files found. "
- "There should only be one. %s" % ( found_paths, ))
-
- return results
-
-## load variables from yaml files/dirs
-# e.g. host/group_vars
-#
-def _load_vars_from_path(path, results, vault_password=None):
- """
- Robustly access the file at path and load variables, carefully reporting
- errors in a friendly/informative way.
-
- Return the tuple (found, new_results, )
- """
-
- try:
- # in the case of a symbolic link, we want the stat of the link itself,
- # not its target
- pathstat = os.lstat(path)
- except os.error, err:
- # most common case is that nothing exists at that path.
- if err.errno == errno.ENOENT:
- return False, results
- # otherwise this is a condition we should report to the user
- raise errors.AnsibleError(
- "%s is not accessible: %s."
- " Please check its permissions." % ( path, err.strerror))
-
- # symbolic link
- if stat.S_ISLNK(pathstat.st_mode):
- try:
- target = os.path.realpath(path)
- except os.error, err2:
- raise errors.AnsibleError("The symbolic link at %s "
- "is not readable: %s. Please check its permissions."
- % (path, err2.strerror, ))
- # follow symbolic link chains by recursing, so we repeat the same
- # permissions checks above and provide useful errors.
- return _load_vars_from_path(target, results, vault_password)
-
- # directory
- if stat.S_ISDIR(pathstat.st_mode):
-
- # support organizing variables across multiple files in a directory
- return True, _load_vars_from_folder(path, results, vault_password=vault_password)
-
- # regular file
- elif stat.S_ISREG(pathstat.st_mode):
- data = parse_yaml_from_file(path, vault_password=vault_password)
- if data and type(data) != dict:
- raise errors.AnsibleError(
- "%s must be stored as a dictionary/hash" % path)
- elif data is None:
- data = {}
-
- # combine vars overrides by default but can be configured to do a
- # hash merge in settings
- results = combine_vars(results, data)
- return True, results
-
- # something else? could be a fifo, socket, device, etc.
- else:
- raise errors.AnsibleError("Expected a variable file or directory "
- "but found a non-file object at path %s" % (path, ))
-
-def _load_vars_from_folder(folder_path, results, vault_password=None):
- """
- Load all variables within a folder recursively.
- """
-
- # this function and _load_vars_from_path are mutually recursive
-
- try:
- names = os.listdir(folder_path)
- except os.error, err:
- raise errors.AnsibleError(
- "This folder cannot be listed: %s: %s."
- % ( folder_path, err.strerror))
-
- # evaluate files in a stable order rather than whatever order the
- # filesystem lists them.
- names.sort()
-
- # do not parse hidden files or dirs, e.g. .svn/
- paths = [os.path.join(folder_path, name) for name in names if not name.startswith('.')]
- for path in paths:
- _found, results = _load_vars_from_path(path, results, vault_password=vault_password)
- return results
-
-def update_hash(hash, key, new_value):
- ''' used to avoid nested .update calls on the parent '''
-
- value = hash.get(key, {})
- value.update(new_value)
- hash[key] = value
-
-def censor_unlogged_data(data):
- '''
- used when the no_log: True attribute is passed to a task to keep data from a callback.
- NOT intended to prevent variable registration, but only things from showing up on
- screen
- '''
- new_data = {}
- for (x,y) in data.iteritems():
- if x in [ 'skipped', 'changed', 'failed', 'rc' ]:
- new_data[x] = y
- new_data['censored'] = 'results hidden due to no_log parameter'
- return new_data
-
-def check_mutually_exclusive_privilege(options, parser):
-
- # privilege escalation command line arguments need to be mutually exclusive
- if (options.su or options.su_user or options.ask_su_pass) and \
- (options.sudo or options.sudo_user or options.ask_sudo_pass) or \
- (options.su or options.su_user or options.ask_su_pass) and \
- (options.become or options.become_user or options.become_ask_pass) or \
- (options.sudo or options.sudo_user or options.ask_sudo_pass) and \
- (options.become or options.become_user or options.become_ask_pass):
-
- parser.error("Sudo arguments ('--sudo', '--sudo-user', and '--ask-sudo-pass') "
- "and su arguments ('-su', '--su-user', and '--ask-su-pass') "
- "and become arguments ('--become', '--become-user', and '--ask-become-pass')"
- " are exclusive of each other")
-
-
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
diff --git a/v2/ansible/utils/boolean.py b/lib/ansible/utils/boolean.py
similarity index 100%
rename from v2/ansible/utils/boolean.py
rename to lib/ansible/utils/boolean.py
diff --git a/v2/ansible/utils/color.py b/lib/ansible/utils/color.py
similarity index 100%
rename from v2/ansible/utils/color.py
rename to lib/ansible/utils/color.py
diff --git a/v2/ansible/utils/debug.py b/lib/ansible/utils/debug.py
similarity index 100%
rename from v2/ansible/utils/debug.py
rename to lib/ansible/utils/debug.py
diff --git a/v2/ansible/utils/display.py b/lib/ansible/utils/display.py
similarity index 100%
rename from v2/ansible/utils/display.py
rename to lib/ansible/utils/display.py
diff --git a/v2/ansible/utils/encrypt.py b/lib/ansible/utils/encrypt.py
similarity index 100%
rename from v2/ansible/utils/encrypt.py
rename to lib/ansible/utils/encrypt.py
diff --git a/lib/ansible/utils/hashing.py b/lib/ansible/utils/hashing.py
index a7d142e5bd..5e378db79f 100644
--- a/lib/ansible/utils/hashing.py
+++ b/lib/ansible/utils/hashing.py
@@ -20,6 +20,7 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
+from ansible.errors import AnsibleError
# Note, sha1 is the only hash algorithm compatible with python2.4 and with
# FIPS-140 mode (as of 11-2014)
@@ -43,6 +44,8 @@ def secure_hash_s(data, hash_func=sha1):
digest = hash_func()
try:
+ if not isinstance(data, basestring):
+ data = "%s" % data
digest.update(data)
except UnicodeEncodeError:
digest.update(data.encode('utf-8'))
@@ -62,8 +65,8 @@ def secure_hash(filename, hash_func=sha1):
digest.update(block)
block = infile.read(blocksize)
infile.close()
- except IOError, e:
- raise errors.AnsibleError("error while accessing the file %s, error was: %s" % (filename, e))
+ except IOError as e:
+ raise AnsibleError("error while accessing the file %s, error was: %s" % (filename, e))
return digest.hexdigest()
# The checksum algorithm must match with the algorithm in ShellModule.checksum() method
diff --git a/v2/ansible/utils/listify.py b/lib/ansible/utils/listify.py
similarity index 100%
rename from v2/ansible/utils/listify.py
rename to lib/ansible/utils/listify.py
diff --git a/lib/ansible/utils/module_docs.py b/lib/ansible/utils/module_docs.py
index ee99af2cb5..632b4a00c2 100644
--- a/lib/ansible/utils/module_docs.py
+++ b/lib/ansible/utils/module_docs.py
@@ -23,7 +23,7 @@ import ast
import yaml
import traceback
-from ansible import utils
+from ansible.plugins import fragment_loader
# modules that are ok that they do not have documentation strings
BLACKLIST_MODULES = [
@@ -66,7 +66,7 @@ def get_docstring(filename, verbose=False):
if fragment_slug != 'doesnotexist':
- fragment_class = utils.plugins.fragment_loader.get(fragment_name)
+ fragment_class = fragment_loader.get(fragment_name)
assert fragment_class is not None
fragment_yaml = getattr(fragment_class, fragment_var, '{}')
diff --git a/v2/ansible/utils/module_docs_fragments b/lib/ansible/utils/module_docs_fragments
similarity index 100%
rename from v2/ansible/utils/module_docs_fragments
rename to lib/ansible/utils/module_docs_fragments
diff --git a/v2/ansible/utils/path.py b/lib/ansible/utils/path.py
similarity index 100%
rename from v2/ansible/utils/path.py
rename to lib/ansible/utils/path.py
diff --git a/lib/ansible/utils/unicode.py b/lib/ansible/utils/unicode.py
index 7bd035c007..2cff2e5e45 100644
--- a/lib/ansible/utils/unicode.py
+++ b/lib/ansible/utils/unicode.py
@@ -19,6 +19,8 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
+from six import string_types, text_type, binary_type, PY3
+
# to_bytes and to_unicode were written by Toshio Kuratomi for the
# python-kitchen library https://pypi.python.org/pypi/kitchen
# They are licensed in kitchen under the terms of the GPLv2+
@@ -35,6 +37,9 @@ _LATIN1_ALIASES = frozenset(('latin-1', 'LATIN-1', 'latin1', 'LATIN1',
# EXCEPTION_CONVERTERS is defined below due to using to_unicode
+if PY3:
+ basestring = (str, bytes)
+
def to_unicode(obj, encoding='utf-8', errors='replace', nonstring=None):
'''Convert an object into a :class:`unicode` string
@@ -89,12 +94,12 @@ def to_unicode(obj, encoding='utf-8', errors='replace', nonstring=None):
# Could use isbasestring/isunicode here but we want this code to be as
# fast as possible
if isinstance(obj, basestring):
- if isinstance(obj, unicode):
+ if isinstance(obj, text_type):
return obj
if encoding in _UTF8_ALIASES:
- return unicode(obj, 'utf-8', errors)
+ return text_type(obj, 'utf-8', errors)
if encoding in _LATIN1_ALIASES:
- return unicode(obj, 'latin-1', errors)
+ return text_type(obj, 'latin-1', errors)
return obj.decode(encoding, errors)
if not nonstring:
@@ -110,19 +115,19 @@ def to_unicode(obj, encoding='utf-8', errors='replace', nonstring=None):
simple = None
if not simple:
try:
- simple = str(obj)
+ simple = text_type(obj)
except UnicodeError:
try:
simple = obj.__str__()
except (UnicodeError, AttributeError):
simple = u''
- if isinstance(simple, str):
- return unicode(simple, encoding, errors)
+ if isinstance(simple, binary_type):
+ return text_type(simple, encoding, errors)
return simple
elif nonstring in ('repr', 'strict'):
obj_repr = repr(obj)
- if isinstance(obj_repr, str):
- obj_repr = unicode(obj_repr, encoding, errors)
+ if isinstance(obj_repr, binary_type):
+ obj_repr = text_type(obj_repr, encoding, errors)
if nonstring == 'repr':
return obj_repr
raise TypeError('to_unicode was given "%(obj)s" which is neither'
@@ -198,19 +203,19 @@ def to_bytes(obj, encoding='utf-8', errors='replace', nonstring=None):
# Could use isbasestring, isbytestring here but we want this to be as fast
# as possible
if isinstance(obj, basestring):
- if isinstance(obj, str):
+ if isinstance(obj, binary_type):
return obj
return obj.encode(encoding, errors)
if not nonstring:
nonstring = 'simplerepr'
if nonstring == 'empty':
- return ''
+ return b''
elif nonstring == 'passthru':
return obj
elif nonstring == 'simplerepr':
try:
- simple = str(obj)
+ simple = binary_type(obj)
except UnicodeError:
try:
simple = obj.__str__()
@@ -220,19 +225,19 @@ def to_bytes(obj, encoding='utf-8', errors='replace', nonstring=None):
try:
simple = obj.__unicode__()
except (AttributeError, UnicodeError):
- simple = ''
- if isinstance(simple, unicode):
+ simple = b''
+ if isinstance(simple, text_type):
simple = simple.encode(encoding, 'replace')
return simple
elif nonstring in ('repr', 'strict'):
try:
obj_repr = obj.__repr__()
except (AttributeError, UnicodeError):
- obj_repr = ''
- if isinstance(obj_repr, unicode):
+ obj_repr = b''
+ if isinstance(obj_repr, text_type):
obj_repr = obj_repr.encode(encoding, errors)
else:
- obj_repr = str(obj_repr)
+ obj_repr = binary_type(obj_repr)
if nonstring == 'repr':
return obj_repr
raise TypeError('to_bytes was given "%(obj)s" which is neither'
diff --git a/v2/ansible/utils/vars.py b/lib/ansible/utils/vars.py
similarity index 100%
rename from v2/ansible/utils/vars.py
rename to lib/ansible/utils/vars.py
diff --git a/lib/ansible/utils/vault.py b/lib/ansible/utils/vault.py
index 842688a2c1..5c704afac5 100644
--- a/lib/ansible/utils/vault.py
+++ b/lib/ansible/utils/vault.py
@@ -1,4 +1,6 @@
-# (c) 2014, James Tanner
+# (c) 2012-2014, Michael DeHaan
+#
+# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -12,574 +14,43 @@
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see .
-#
-# ansible-pull is a script that runs ansible in local mode
-# after checking out a playbooks directory from source repo. There is an
-# example playbook to bootstrap this script in the examples/ dir which
-# installs ansible and sets it up to run on cron.
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
import os
-import shlex
-import shutil
-import tempfile
-from io import BytesIO
-from subprocess import call
-from ansible import errors
-from hashlib import sha256
+import subprocess
-# Note: Only used for loading obsolete VaultAES files. All files are written
-# using the newer VaultAES256 which does not require md5
-try:
- from hashlib import md5
-except ImportError:
- try:
- from md5 import md5
- except ImportError:
- # MD5 unavailable. Possibly FIPS mode
- md5 = None
-
-from binascii import hexlify
-from binascii import unhexlify
from ansible import constants as C
+from ansible.errors import AnsibleError
+from ansible.utils.path import is_executable
-try:
- from Crypto.Hash import SHA256, HMAC
- HAS_HASH = True
-except ImportError:
- HAS_HASH = False
+def read_vault_file(vault_password_file):
+ """
+ Read a vault password from a file or if executable, execute the script and
+ retrieve password from STDOUT
+ """
-# Counter import fails for 2.0.1, requires >= 2.6.1 from pip
-try:
- from Crypto.Util import Counter
- HAS_COUNTER = True
-except ImportError:
- HAS_COUNTER = False
+ this_path = os.path.realpath(os.path.expanduser(vault_password_file))
+ if not os.path.exists(this_path):
+ raise AnsibleError("The vault password file %s was not found" % this_path)
-# KDF import fails for 2.0.1, requires >= 2.6.1 from pip
-try:
- from Crypto.Protocol.KDF import PBKDF2
- HAS_PBKDF2 = True
-except ImportError:
- HAS_PBKDF2 = False
-
-# AES IMPORTS
-try:
- from Crypto.Cipher import AES as AES
- HAS_AES = True
-except ImportError:
- HAS_AES = False
-
-CRYPTO_UPGRADE = "ansible-vault requires a newer version of pycrypto than the one installed on your platform. You may fix this with OS-specific commands such as: yum install python-devel; rpm -e --nodeps python-crypto; pip install pycrypto"
-
-HEADER='$ANSIBLE_VAULT'
-CIPHER_WHITELIST=['AES', 'AES256']
-
-class VaultLib(object):
-
- def __init__(self, password):
- self.password = password
- self.cipher_name = None
- self.version = '1.1'
-
- def is_encrypted(self, data):
- if data.startswith(HEADER):
- return True
- else:
- return False
-
- def encrypt(self, data):
-
- if self.is_encrypted(data):
- raise errors.AnsibleError("data is already encrypted")
-
- if not self.cipher_name:
- self.cipher_name = "AES256"
- #raise errors.AnsibleError("the cipher must be set before encrypting data")
-
- if 'Vault' + self.cipher_name in globals() and self.cipher_name in CIPHER_WHITELIST:
- cipher = globals()['Vault' + self.cipher_name]
- this_cipher = cipher()
- else:
- raise errors.AnsibleError("%s cipher could not be found" % self.cipher_name)
-
- """
- # combine sha + data
- this_sha = sha256(data).hexdigest()
- tmp_data = this_sha + "\n" + data
- """
-
- # encrypt sha + data
- enc_data = this_cipher.encrypt(data, self.password)
-
- # add header
- tmp_data = self._add_header(enc_data)
- return tmp_data
-
- def decrypt(self, data):
- if self.password is None:
- raise errors.AnsibleError("A vault password must be specified to decrypt data")
-
- if not self.is_encrypted(data):
- raise errors.AnsibleError("data is not encrypted")
-
- # clean out header
- data = self._split_header(data)
-
- # create the cipher object
- if 'Vault' + self.cipher_name in globals() and self.cipher_name in CIPHER_WHITELIST:
- cipher = globals()['Vault' + self.cipher_name]
- this_cipher = cipher()
- else:
- raise errors.AnsibleError("%s cipher could not be found" % self.cipher_name)
-
- # try to unencrypt data
- data = this_cipher.decrypt(data, self.password)
- if data is None:
- raise errors.AnsibleError("Decryption failed")
-
- return data
-
- def _add_header(self, data):
- # combine header and encrypted data in 80 char columns
-
- #tmpdata = hexlify(data)
- tmpdata = [data[i:i+80] for i in range(0, len(data), 80)]
-
- if not self.cipher_name:
- raise errors.AnsibleError("the cipher must be set before adding a header")
-
- dirty_data = HEADER + ";" + str(self.version) + ";" + self.cipher_name + "\n"
-
- for l in tmpdata:
- dirty_data += l + '\n'
-
- return dirty_data
-
-
- def _split_header(self, data):
- # used by decrypt
-
- tmpdata = data.split('\n')
- tmpheader = tmpdata[0].strip().split(';')
-
- self.version = str(tmpheader[1].strip())
- self.cipher_name = str(tmpheader[2].strip())
- clean_data = '\n'.join(tmpdata[1:])
-
- """
- # strip out newline, join, unhex
- clean_data = [ x.strip() for x in clean_data ]
- clean_data = unhexlify(''.join(clean_data))
- """
-
- return clean_data
-
- def __enter__(self):
- return self
-
- def __exit__(self, *err):
- pass
-
-class VaultEditor(object):
- # uses helper methods for write_file(self, filename, data)
- # to write a file so that code isn't duplicated for simple
- # file I/O, ditto read_file(self, filename) and launch_editor(self, filename)
- # ... "Don't Repeat Yourself", etc.
-
- def __init__(self, cipher_name, password, filename):
- # instantiates a member variable for VaultLib
- self.cipher_name = cipher_name
- self.password = password
- self.filename = filename
-
- def _edit_file_helper(self, existing_data=None, cipher=None):
- # make sure the umask is set to a sane value
- old_umask = os.umask(0o077)
-
- # Create a tempfile
- _, tmp_path = tempfile.mkstemp()
-
- if existing_data:
- self.write_data(existing_data, tmp_path)
-
- # drop the user into an editor on the tmp file
+ if is_executable(this_path):
try:
- call(self._editor_shell_command(tmp_path))
- except OSError, e:
- raise Exception("Failed to open editor (%s): %s" % (self._editor_shell_command(tmp_path)[0],str(e)))
- tmpdata = self.read_data(tmp_path)
-
- # create new vault
- this_vault = VaultLib(self.password)
- if cipher:
- this_vault.cipher_name = cipher
-
- # encrypt new data and write out to tmp
- enc_data = this_vault.encrypt(tmpdata)
- self.write_data(enc_data, tmp_path)
-
- # shuffle tmp file into place
- self.shuffle_files(tmp_path, self.filename)
-
- # and restore umask
- os.umask(old_umask)
-
- def create_file(self):
- """ create a new encrypted file """
-
- if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or not HAS_HASH:
- raise errors.AnsibleError(CRYPTO_UPGRADE)
-
- if os.path.isfile(self.filename):
- raise errors.AnsibleError("%s exists, please use 'edit' instead" % self.filename)
-
- # Let the user specify contents and save file
- self._edit_file_helper(cipher=self.cipher_name)
-
- def decrypt_file(self):
-
- if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or not HAS_HASH:
- raise errors.AnsibleError(CRYPTO_UPGRADE)
-
- if not os.path.isfile(self.filename):
- raise errors.AnsibleError("%s does not exist" % self.filename)
-
- tmpdata = self.read_data(self.filename)
- this_vault = VaultLib(self.password)
- if this_vault.is_encrypted(tmpdata):
- dec_data = this_vault.decrypt(tmpdata)
- if dec_data is None:
- raise errors.AnsibleError("Decryption failed")
- else:
- self.write_data(dec_data, self.filename)
- else:
- raise errors.AnsibleError("%s is not encrypted" % self.filename)
-
- def edit_file(self):
-
- if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or not HAS_HASH:
- raise errors.AnsibleError(CRYPTO_UPGRADE)
-
- # decrypt to tmpfile
- tmpdata = self.read_data(self.filename)
- this_vault = VaultLib(self.password)
- dec_data = this_vault.decrypt(tmpdata)
-
- # let the user edit the data and save
- self._edit_file_helper(existing_data=dec_data)
- ###we want the cipher to default to AES256 (get rid of files
- # encrypted with the AES cipher)
- #self._edit_file_helper(existing_data=dec_data, cipher=this_vault.cipher_name)
-
-
- def view_file(self):
-
- if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or not HAS_HASH:
- raise errors.AnsibleError(CRYPTO_UPGRADE)
-
- # decrypt to tmpfile
- tmpdata = self.read_data(self.filename)
- this_vault = VaultLib(self.password)
- dec_data = this_vault.decrypt(tmpdata)
- old_umask = os.umask(0o077)
- _, tmp_path = tempfile.mkstemp()
- self.write_data(dec_data, tmp_path)
- os.umask(old_umask)
-
- # drop the user into pager on the tmp file
- call(self._pager_shell_command(tmp_path))
- os.remove(tmp_path)
-
- def encrypt_file(self):
-
- if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or not HAS_HASH:
- raise errors.AnsibleError(CRYPTO_UPGRADE)
-
- if not os.path.isfile(self.filename):
- raise errors.AnsibleError("%s does not exist" % self.filename)
-
- tmpdata = self.read_data(self.filename)
- this_vault = VaultLib(self.password)
- this_vault.cipher_name = self.cipher_name
- if not this_vault.is_encrypted(tmpdata):
- enc_data = this_vault.encrypt(tmpdata)
- self.write_data(enc_data, self.filename)
- else:
- raise errors.AnsibleError("%s is already encrypted" % self.filename)
-
- def rekey_file(self, new_password):
-
- if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or not HAS_HASH:
- raise errors.AnsibleError(CRYPTO_UPGRADE)
-
- # decrypt
- tmpdata = self.read_data(self.filename)
- this_vault = VaultLib(self.password)
- dec_data = this_vault.decrypt(tmpdata)
-
- # create new vault
- new_vault = VaultLib(new_password)
-
- # we want to force cipher to the default
- #new_vault.cipher_name = this_vault.cipher_name
-
- # re-encrypt data and re-write file
- enc_data = new_vault.encrypt(dec_data)
- self.write_data(enc_data, self.filename)
-
- def read_data(self, filename):
- f = open(filename, "rb")
- tmpdata = f.read()
- f.close()
- return tmpdata
-
- def write_data(self, data, filename):
- if os.path.isfile(filename):
- os.remove(filename)
- f = open(filename, "wb")
- f.write(data)
- f.close()
-
- def shuffle_files(self, src, dest):
- # overwrite dest with src
- if os.path.isfile(dest):
- os.remove(dest)
- shutil.move(src, dest)
-
- def _editor_shell_command(self, filename):
- EDITOR = os.environ.get('EDITOR','vim')
- editor = shlex.split(EDITOR)
- editor.append(filename)
-
- return editor
-
- def _pager_shell_command(self, filename):
- PAGER = os.environ.get('PAGER','less')
- pager = shlex.split(PAGER)
- pager.append(filename)
-
- return pager
-
-########################################
-# CIPHERS #
-########################################
-
-class VaultAES(object):
-
- # this version has been obsoleted by the VaultAES256 class
- # which uses encrypt-then-mac (fixing order) and also improving the KDF used
- # code remains for upgrade purposes only
- # http://stackoverflow.com/a/16761459
-
- def __init__(self):
- if not md5:
- raise errors.AnsibleError('md5 hash is unavailable (Could be due to FIPS mode). Legacy VaultAES format is unavailable.')
- if not HAS_AES:
- raise errors.AnsibleError(CRYPTO_UPGRADE)
-
- def aes_derive_key_and_iv(self, password, salt, key_length, iv_length):
-
- """ Create a key and an initialization vector """
-
- d = d_i = ''
- while len(d) < key_length + iv_length:
- d_i = md5(d_i + password + salt).digest()
- d += d_i
-
- key = d[:key_length]
- iv = d[key_length:key_length+iv_length]
-
- return key, iv
-
- def encrypt(self, data, password, key_length=32):
-
- """ Read plaintext data from in_file and write encrypted to out_file """
-
-
- # combine sha + data
- this_sha = sha256(data).hexdigest()
- tmp_data = this_sha + "\n" + data
-
- in_file = BytesIO(tmp_data)
- in_file.seek(0)
- out_file = BytesIO()
-
- bs = AES.block_size
-
- # Get a block of random data. EL does not have Crypto.Random.new()
- # so os.urandom is used for cross platform purposes
- salt = os.urandom(bs - len('Salted__'))
-
- key, iv = self.aes_derive_key_and_iv(password, salt, key_length, bs)
- cipher = AES.new(key, AES.MODE_CBC, iv)
- out_file.write('Salted__' + salt)
- finished = False
- while not finished:
- chunk = in_file.read(1024 * bs)
- if len(chunk) == 0 or len(chunk) % bs != 0:
- padding_length = (bs - len(chunk) % bs) or bs
- chunk += padding_length * chr(padding_length)
- finished = True
- out_file.write(cipher.encrypt(chunk))
-
- out_file.seek(0)
- enc_data = out_file.read()
- tmp_data = hexlify(enc_data)
-
- return tmp_data
-
-
- def decrypt(self, data, password, key_length=32):
-
- """ Read encrypted data from in_file and write decrypted to out_file """
-
- # http://stackoverflow.com/a/14989032
-
- data = ''.join(data.split('\n'))
- data = unhexlify(data)
-
- in_file = BytesIO(data)
- in_file.seek(0)
- out_file = BytesIO()
-
- bs = AES.block_size
- salt = in_file.read(bs)[len('Salted__'):]
- key, iv = self.aes_derive_key_and_iv(password, salt, key_length, bs)
- cipher = AES.new(key, AES.MODE_CBC, iv)
- next_chunk = ''
- finished = False
-
- while not finished:
- chunk, next_chunk = next_chunk, cipher.decrypt(in_file.read(1024 * bs))
- if len(next_chunk) == 0:
- padding_length = ord(chunk[-1])
- chunk = chunk[:-padding_length]
- finished = True
- out_file.write(chunk)
-
- # reset the stream pointer to the beginning
- out_file.seek(0)
- new_data = out_file.read()
-
- # split out sha and verify decryption
- split_data = new_data.split("\n")
- this_sha = split_data[0]
- this_data = '\n'.join(split_data[1:])
- test_sha = sha256(this_data).hexdigest()
-
- if this_sha != test_sha:
- raise errors.AnsibleError("Decryption failed")
-
- #return out_file.read()
- return this_data
-
-
-class VaultAES256(object):
-
- """
- Vault implementation using AES-CTR with an HMAC-SHA256 authentication code.
- Keys are derived using PBKDF2
- """
-
- # http://www.daemonology.net/blog/2009-06-11-cryptographic-right-answers.html
-
- def __init__(self):
-
- if not HAS_PBKDF2 or not HAS_COUNTER or not HAS_HASH:
- raise errors.AnsibleError(CRYPTO_UPGRADE)
-
- def gen_key_initctr(self, password, salt):
- # 16 for AES 128, 32 for AES256
- keylength = 32
-
- # match the size used for counter.new to avoid extra work
- ivlength = 16
-
- hash_function = SHA256
-
- # make two keys and one iv
- pbkdf2_prf = lambda p, s: HMAC.new(p, s, hash_function).digest()
-
-
- derivedkey = PBKDF2(password, salt, dkLen=(2 * keylength) + ivlength,
- count=10000, prf=pbkdf2_prf)
-
- key1 = derivedkey[:keylength]
- key2 = derivedkey[keylength:(keylength * 2)]
- iv = derivedkey[(keylength * 2):(keylength * 2) + ivlength]
-
- return key1, key2, hexlify(iv)
-
-
- def encrypt(self, data, password):
-
- salt = os.urandom(32)
- key1, key2, iv = self.gen_key_initctr(password, salt)
-
- # PKCS#7 PAD DATA http://tools.ietf.org/html/rfc5652#section-6.3
- bs = AES.block_size
- padding_length = (bs - len(data) % bs) or bs
- data += padding_length * chr(padding_length)
-
- # COUNTER.new PARAMETERS
- # 1) nbits (integer) - Length of the counter, in bits.
- # 2) initial_value (integer) - initial value of the counter. "iv" from gen_key_initctr
-
- ctr = Counter.new(128, initial_value=long(iv, 16))
-
- # AES.new PARAMETERS
- # 1) AES key, must be either 16, 24, or 32 bytes long -- "key" from gen_key_initctr
- # 2) MODE_CTR, is the recommended mode
- # 3) counter=
-
- cipher = AES.new(key1, AES.MODE_CTR, counter=ctr)
-
- # ENCRYPT PADDED DATA
- cryptedData = cipher.encrypt(data)
-
- # COMBINE SALT, DIGEST AND DATA
- hmac = HMAC.new(key2, cryptedData, SHA256)
- message = "%s\n%s\n%s" % ( hexlify(salt), hmac.hexdigest(), hexlify(cryptedData) )
- message = hexlify(message)
- return message
-
- def decrypt(self, data, password):
-
- # SPLIT SALT, DIGEST, AND DATA
- data = ''.join(data.split("\n"))
- data = unhexlify(data)
- salt, cryptedHmac, cryptedData = data.split("\n", 2)
- salt = unhexlify(salt)
- cryptedData = unhexlify(cryptedData)
-
- key1, key2, iv = self.gen_key_initctr(password, salt)
-
- # EXIT EARLY IF DIGEST DOESN'T MATCH
- hmacDecrypt = HMAC.new(key2, cryptedData, SHA256)
- if not self.is_equal(cryptedHmac, hmacDecrypt.hexdigest()):
- return None
-
- # SET THE COUNTER AND THE CIPHER
- ctr = Counter.new(128, initial_value=long(iv, 16))
- cipher = AES.new(key1, AES.MODE_CTR, counter=ctr)
-
- # DECRYPT PADDED DATA
- decryptedData = cipher.decrypt(cryptedData)
-
- # UNPAD DATA
- padding_length = ord(decryptedData[-1])
- decryptedData = decryptedData[:-padding_length]
-
- return decryptedData
-
- def is_equal(self, a, b):
- # http://codahale.com/a-lesson-in-timing-attacks/
- if len(a) != len(b):
- return False
-
- result = 0
- for x, y in zip(a, b):
- result |= ord(x) ^ ord(y)
- return result == 0
-
+ # STDERR not captured to make it easier for users to prompt for input in their scripts
+ p = subprocess.Popen(this_path, stdout=subprocess.PIPE)
+ except OSError as e:
+ raise AnsibleError("Problem running vault password script %s (%s). If this is not a script, remove the executable bit from the file." % (' '.join(this_path), e))
+ stdout, stderr = p.communicate()
+ vault_pass = stdout.strip('\r\n')
+ else:
+ try:
+ f = open(this_path, "rb")
+ vault_pass=f.read().strip()
+ f.close()
+ except (OSError, IOError) as e:
+ raise AnsibleError("Could not read vault password file %s: %s" % (this_path, e))
+
+ return vault_pass
diff --git a/v2/ansible/vars/__init__.py b/lib/ansible/vars/__init__.py
similarity index 100%
rename from v2/ansible/vars/__init__.py
rename to lib/ansible/vars/__init__.py
diff --git a/v2/ansible/vars/hostvars.py b/lib/ansible/vars/hostvars.py
similarity index 100%
rename from v2/ansible/vars/hostvars.py
rename to lib/ansible/vars/hostvars.py
diff --git a/v2/samples/README.md b/samples/README.md
similarity index 100%
rename from v2/samples/README.md
rename to samples/README.md
diff --git a/v2/samples/common_include.yml b/samples/common_include.yml
similarity index 100%
rename from v2/samples/common_include.yml
rename to samples/common_include.yml
diff --git a/v2/samples/hosts b/samples/hosts
similarity index 100%
rename from v2/samples/hosts
rename to samples/hosts
diff --git a/v2/samples/ignore_errors.yml b/samples/ignore_errors.yml
similarity index 100%
rename from v2/samples/ignore_errors.yml
rename to samples/ignore_errors.yml
diff --git a/v2/samples/include.yml b/samples/include.yml
similarity index 100%
rename from v2/samples/include.yml
rename to samples/include.yml
diff --git a/v2/samples/inv_lg b/samples/inv_lg
similarity index 100%
rename from v2/samples/inv_lg
rename to samples/inv_lg
diff --git a/v2/samples/inv_md b/samples/inv_md
similarity index 100%
rename from v2/samples/inv_md
rename to samples/inv_md
diff --git a/v2/samples/inv_sm b/samples/inv_sm
similarity index 100%
rename from v2/samples/inv_sm
rename to samples/inv_sm
diff --git a/v2/samples/l1_include.yml b/samples/l1_include.yml
similarity index 100%
rename from v2/samples/l1_include.yml
rename to samples/l1_include.yml
diff --git a/v2/samples/l2_include.yml b/samples/l2_include.yml
similarity index 100%
rename from v2/samples/l2_include.yml
rename to samples/l2_include.yml
diff --git a/v2/samples/l3_include.yml b/samples/l3_include.yml
similarity index 100%
rename from v2/samples/l3_include.yml
rename to samples/l3_include.yml
diff --git a/v2/samples/localhost_include.yml b/samples/localhost_include.yml
similarity index 100%
rename from v2/samples/localhost_include.yml
rename to samples/localhost_include.yml
diff --git a/v2/samples/localhosts b/samples/localhosts
similarity index 100%
rename from v2/samples/localhosts
rename to samples/localhosts
diff --git a/v2/samples/lookup_file.yml b/samples/lookup_file.yml
similarity index 100%
rename from v2/samples/lookup_file.yml
rename to samples/lookup_file.yml
diff --git a/v2/samples/lookup_password.yml b/samples/lookup_password.yml
similarity index 100%
rename from v2/samples/lookup_password.yml
rename to samples/lookup_password.yml
diff --git a/v2/samples/lookup_pipe.py b/samples/lookup_pipe.py
similarity index 100%
rename from v2/samples/lookup_pipe.py
rename to samples/lookup_pipe.py
diff --git a/v2/samples/lookup_template.yml b/samples/lookup_template.yml
similarity index 100%
rename from v2/samples/lookup_template.yml
rename to samples/lookup_template.yml
diff --git a/v2/samples/multi.py b/samples/multi.py
similarity index 100%
rename from v2/samples/multi.py
rename to samples/multi.py
diff --git a/v2/samples/multi_queues.py b/samples/multi_queues.py
similarity index 100%
rename from v2/samples/multi_queues.py
rename to samples/multi_queues.py
diff --git a/v2/samples/roles/common/meta/main.yml b/samples/roles/common/meta/main.yml
similarity index 100%
rename from v2/samples/roles/common/meta/main.yml
rename to samples/roles/common/meta/main.yml
diff --git a/v2/samples/roles/common/tasks/main.yml b/samples/roles/common/tasks/main.yml
similarity index 100%
rename from v2/samples/roles/common/tasks/main.yml
rename to samples/roles/common/tasks/main.yml
diff --git a/v2/samples/roles/role_a/meta/main.yml b/samples/roles/role_a/meta/main.yml
similarity index 100%
rename from v2/samples/roles/role_a/meta/main.yml
rename to samples/roles/role_a/meta/main.yml
diff --git a/v2/samples/roles/role_a/tasks/main.yml b/samples/roles/role_a/tasks/main.yml
similarity index 100%
rename from v2/samples/roles/role_a/tasks/main.yml
rename to samples/roles/role_a/tasks/main.yml
diff --git a/v2/samples/roles/role_b/meta/main.yml b/samples/roles/role_b/meta/main.yml
similarity index 100%
rename from v2/samples/roles/role_b/meta/main.yml
rename to samples/roles/role_b/meta/main.yml
diff --git a/v2/samples/roles/role_b/tasks/main.yml b/samples/roles/role_b/tasks/main.yml
similarity index 100%
rename from v2/samples/roles/role_b/tasks/main.yml
rename to samples/roles/role_b/tasks/main.yml
diff --git a/v2/samples/roles/test_become_r1/meta/main.yml b/samples/roles/test_become_r1/meta/main.yml
similarity index 100%
rename from v2/samples/roles/test_become_r1/meta/main.yml
rename to samples/roles/test_become_r1/meta/main.yml
diff --git a/v2/samples/roles/test_become_r1/tasks/main.yml b/samples/roles/test_become_r1/tasks/main.yml
similarity index 100%
rename from v2/samples/roles/test_become_r1/tasks/main.yml
rename to samples/roles/test_become_r1/tasks/main.yml
diff --git a/v2/samples/roles/test_become_r2/meta/main.yml b/samples/roles/test_become_r2/meta/main.yml
similarity index 100%
rename from v2/samples/roles/test_become_r2/meta/main.yml
rename to samples/roles/test_become_r2/meta/main.yml
diff --git a/v2/samples/roles/test_become_r2/tasks/main.yml b/samples/roles/test_become_r2/tasks/main.yml
similarity index 100%
rename from v2/samples/roles/test_become_r2/tasks/main.yml
rename to samples/roles/test_become_r2/tasks/main.yml
diff --git a/v2/samples/roles/test_role/meta/main.yml b/samples/roles/test_role/meta/main.yml
similarity index 100%
rename from v2/samples/roles/test_role/meta/main.yml
rename to samples/roles/test_role/meta/main.yml
diff --git a/v2/samples/roles/test_role/tasks/main.yml b/samples/roles/test_role/tasks/main.yml
similarity index 100%
rename from v2/samples/roles/test_role/tasks/main.yml
rename to samples/roles/test_role/tasks/main.yml
diff --git a/v2/samples/roles/test_role_dep/tasks/main.yml b/samples/roles/test_role_dep/tasks/main.yml
similarity index 100%
rename from v2/samples/roles/test_role_dep/tasks/main.yml
rename to samples/roles/test_role_dep/tasks/main.yml
diff --git a/v2/samples/src b/samples/src
similarity index 100%
rename from v2/samples/src
rename to samples/src
diff --git a/v2/samples/template.j2 b/samples/template.j2
similarity index 100%
rename from v2/samples/template.j2
rename to samples/template.j2
diff --git a/v2/samples/test_become.yml b/samples/test_become.yml
similarity index 100%
rename from v2/samples/test_become.yml
rename to samples/test_become.yml
diff --git a/v2/samples/test_big_debug.yml b/samples/test_big_debug.yml
similarity index 100%
rename from v2/samples/test_big_debug.yml
rename to samples/test_big_debug.yml
diff --git a/v2/samples/test_big_ping.yml b/samples/test_big_ping.yml
similarity index 100%
rename from v2/samples/test_big_ping.yml
rename to samples/test_big_ping.yml
diff --git a/v2/samples/test_block.yml b/samples/test_block.yml
similarity index 100%
rename from v2/samples/test_block.yml
rename to samples/test_block.yml
diff --git a/v2/samples/test_blocks_of_blocks.yml b/samples/test_blocks_of_blocks.yml
similarity index 100%
rename from v2/samples/test_blocks_of_blocks.yml
rename to samples/test_blocks_of_blocks.yml
diff --git a/v2/samples/test_fact_gather.yml b/samples/test_fact_gather.yml
similarity index 100%
rename from v2/samples/test_fact_gather.yml
rename to samples/test_fact_gather.yml
diff --git a/v2/samples/test_free.yml b/samples/test_free.yml
similarity index 100%
rename from v2/samples/test_free.yml
rename to samples/test_free.yml
diff --git a/v2/samples/test_include.yml b/samples/test_include.yml
similarity index 100%
rename from v2/samples/test_include.yml
rename to samples/test_include.yml
diff --git a/v2/samples/test_pb.yml b/samples/test_pb.yml
similarity index 100%
rename from v2/samples/test_pb.yml
rename to samples/test_pb.yml
diff --git a/v2/samples/test_role.yml b/samples/test_role.yml
similarity index 100%
rename from v2/samples/test_role.yml
rename to samples/test_role.yml
diff --git a/v2/samples/test_roles_complex.yml b/samples/test_roles_complex.yml
similarity index 100%
rename from v2/samples/test_roles_complex.yml
rename to samples/test_roles_complex.yml
diff --git a/v2/samples/test_run_once.yml b/samples/test_run_once.yml
similarity index 100%
rename from v2/samples/test_run_once.yml
rename to samples/test_run_once.yml
diff --git a/v2/samples/test_sudo.yml b/samples/test_sudo.yml
similarity index 100%
rename from v2/samples/test_sudo.yml
rename to samples/test_sudo.yml
diff --git a/v2/samples/test_tags.yml b/samples/test_tags.yml
similarity index 100%
rename from v2/samples/test_tags.yml
rename to samples/test_tags.yml
diff --git a/v2/samples/testing/extra_vars.yml b/samples/testing/extra_vars.yml
similarity index 100%
rename from v2/samples/testing/extra_vars.yml
rename to samples/testing/extra_vars.yml
diff --git a/v2/samples/testing/frag1 b/samples/testing/frag1
similarity index 100%
rename from v2/samples/testing/frag1
rename to samples/testing/frag1
diff --git a/v2/samples/testing/frag2 b/samples/testing/frag2
similarity index 100%
rename from v2/samples/testing/frag2
rename to samples/testing/frag2
diff --git a/v2/samples/testing/frag3 b/samples/testing/frag3
similarity index 100%
rename from v2/samples/testing/frag3
rename to samples/testing/frag3
diff --git a/v2/samples/testing/vars.yml b/samples/testing/vars.yml
similarity index 100%
rename from v2/samples/testing/vars.yml
rename to samples/testing/vars.yml
diff --git a/v2/samples/with_dict.yml b/samples/with_dict.yml
similarity index 100%
rename from v2/samples/with_dict.yml
rename to samples/with_dict.yml
diff --git a/v2/samples/with_env.yml b/samples/with_env.yml
similarity index 100%
rename from v2/samples/with_env.yml
rename to samples/with_env.yml
diff --git a/v2/samples/with_fileglob.yml b/samples/with_fileglob.yml
similarity index 100%
rename from v2/samples/with_fileglob.yml
rename to samples/with_fileglob.yml
diff --git a/v2/samples/with_first_found.yml b/samples/with_first_found.yml
similarity index 100%
rename from v2/samples/with_first_found.yml
rename to samples/with_first_found.yml
diff --git a/v2/samples/with_flattened.yml b/samples/with_flattened.yml
similarity index 100%
rename from v2/samples/with_flattened.yml
rename to samples/with_flattened.yml
diff --git a/v2/samples/with_indexed_items.yml b/samples/with_indexed_items.yml
similarity index 100%
rename from v2/samples/with_indexed_items.yml
rename to samples/with_indexed_items.yml
diff --git a/v2/samples/with_items.yml b/samples/with_items.yml
similarity index 100%
rename from v2/samples/with_items.yml
rename to samples/with_items.yml
diff --git a/v2/samples/with_lines.yml b/samples/with_lines.yml
similarity index 100%
rename from v2/samples/with_lines.yml
rename to samples/with_lines.yml
diff --git a/v2/samples/with_nested.yml b/samples/with_nested.yml
similarity index 100%
rename from v2/samples/with_nested.yml
rename to samples/with_nested.yml
diff --git a/v2/samples/with_random_choice.yml b/samples/with_random_choice.yml
similarity index 100%
rename from v2/samples/with_random_choice.yml
rename to samples/with_random_choice.yml
diff --git a/v2/samples/with_sequence.yml b/samples/with_sequence.yml
similarity index 100%
rename from v2/samples/with_sequence.yml
rename to samples/with_sequence.yml
diff --git a/v2/samples/with_subelements.yml b/samples/with_subelements.yml
similarity index 100%
rename from v2/samples/with_subelements.yml
rename to samples/with_subelements.yml
diff --git a/v2/samples/with_together.yml b/samples/with_together.yml
similarity index 100%
rename from v2/samples/with_together.yml
rename to samples/with_together.yml
diff --git a/v2/test/__init__.py b/test/units/__init__.py
similarity index 100%
rename from v2/test/__init__.py
rename to test/units/__init__.py
diff --git a/v2/test/errors/__init__.py b/test/units/errors/__init__.py
similarity index 100%
rename from v2/test/errors/__init__.py
rename to test/units/errors/__init__.py
diff --git a/v2/test/errors/test_errors.py b/test/units/errors/test_errors.py
similarity index 100%
rename from v2/test/errors/test_errors.py
rename to test/units/errors/test_errors.py
diff --git a/v2/test/executor/__init__.py b/test/units/executor/__init__.py
similarity index 100%
rename from v2/test/executor/__init__.py
rename to test/units/executor/__init__.py
diff --git a/v2/test/executor/test_play_iterator.py b/test/units/executor/test_play_iterator.py
similarity index 100%
rename from v2/test/executor/test_play_iterator.py
rename to test/units/executor/test_play_iterator.py
diff --git a/v2/ansible/modules/__init__.py b/test/units/mock/__init__.py
similarity index 100%
rename from v2/ansible/modules/__init__.py
rename to test/units/mock/__init__.py
diff --git a/v2/test/mock/loader.py b/test/units/mock/loader.py
similarity index 100%
rename from v2/test/mock/loader.py
rename to test/units/mock/loader.py
diff --git a/v2/test/parsing/__init__.py b/test/units/parsing/__init__.py
similarity index 100%
rename from v2/test/parsing/__init__.py
rename to test/units/parsing/__init__.py
diff --git a/v2/test/parsing/test_data_loader.py b/test/units/parsing/test_data_loader.py
similarity index 100%
rename from v2/test/parsing/test_data_loader.py
rename to test/units/parsing/test_data_loader.py
diff --git a/v2/test/parsing/test_mod_args.py b/test/units/parsing/test_mod_args.py
similarity index 100%
rename from v2/test/parsing/test_mod_args.py
rename to test/units/parsing/test_mod_args.py
diff --git a/v2/test/parsing/test_splitter.py b/test/units/parsing/test_splitter.py
similarity index 100%
rename from v2/test/parsing/test_splitter.py
rename to test/units/parsing/test_splitter.py
diff --git a/v2/test/parsing/vault/__init__.py b/test/units/parsing/vault/__init__.py
similarity index 100%
rename from v2/test/parsing/vault/__init__.py
rename to test/units/parsing/vault/__init__.py
diff --git a/v2/test/parsing/vault/test_vault.py b/test/units/parsing/vault/test_vault.py
similarity index 100%
rename from v2/test/parsing/vault/test_vault.py
rename to test/units/parsing/vault/test_vault.py
diff --git a/v2/test/parsing/vault/test_vault_editor.py b/test/units/parsing/vault/test_vault_editor.py
similarity index 100%
rename from v2/test/parsing/vault/test_vault_editor.py
rename to test/units/parsing/vault/test_vault_editor.py
diff --git a/lib/ansible/callback_plugins/__init__.py b/test/units/parsing/yaml/__init__.py
similarity index 100%
rename from lib/ansible/callback_plugins/__init__.py
rename to test/units/parsing/yaml/__init__.py
diff --git a/v2/test/parsing/yaml/test_loader.py b/test/units/parsing/yaml/test_loader.py
similarity index 100%
rename from v2/test/parsing/yaml/test_loader.py
rename to test/units/parsing/yaml/test_loader.py
diff --git a/v2/test/playbook/__init__.py b/test/units/playbook/__init__.py
similarity index 100%
rename from v2/test/playbook/__init__.py
rename to test/units/playbook/__init__.py
diff --git a/v2/test/playbook/test_block.py b/test/units/playbook/test_block.py
similarity index 100%
rename from v2/test/playbook/test_block.py
rename to test/units/playbook/test_block.py
diff --git a/v2/test/playbook/test_play.py b/test/units/playbook/test_play.py
similarity index 100%
rename from v2/test/playbook/test_play.py
rename to test/units/playbook/test_play.py
diff --git a/v2/test/playbook/test_playbook.py b/test/units/playbook/test_playbook.py
similarity index 100%
rename from v2/test/playbook/test_playbook.py
rename to test/units/playbook/test_playbook.py
diff --git a/v2/test/playbook/test_role.py b/test/units/playbook/test_role.py
similarity index 100%
rename from v2/test/playbook/test_role.py
rename to test/units/playbook/test_role.py
diff --git a/v2/test/playbook/test_task.py b/test/units/playbook/test_task.py
similarity index 100%
rename from v2/test/playbook/test_task.py
rename to test/units/playbook/test_task.py
diff --git a/v2/test/plugins/__init__.py b/test/units/plugins/__init__.py
similarity index 100%
rename from v2/test/plugins/__init__.py
rename to test/units/plugins/__init__.py
diff --git a/v2/test/plugins/test_cache.py b/test/units/plugins/test_cache.py
similarity index 100%
rename from v2/test/plugins/test_cache.py
rename to test/units/plugins/test_cache.py
diff --git a/v2/test/plugins/test_connection.py b/test/units/plugins/test_connection.py
similarity index 100%
rename from v2/test/plugins/test_connection.py
rename to test/units/plugins/test_connection.py
diff --git a/v2/test/plugins/test_plugins.py b/test/units/plugins/test_plugins.py
similarity index 100%
rename from v2/test/plugins/test_plugins.py
rename to test/units/plugins/test_plugins.py
diff --git a/v2/test/vars/__init__.py b/test/units/vars/__init__.py
similarity index 100%
rename from v2/test/vars/__init__.py
rename to test/units/vars/__init__.py
diff --git a/v2/test/vars/test_variable_manager.py b/test/units/vars/test_variable_manager.py
similarity index 100%
rename from v2/test/vars/test_variable_manager.py
rename to test/units/vars/test_variable_manager.py
diff --git a/v2/ansible/utils/__init__.py b/v1/ansible/__init__.py
similarity index 85%
rename from v2/ansible/utils/__init__.py
rename to v1/ansible/__init__.py
index ae8ccff595..ba5ca83b72 100644
--- a/v2/ansible/utils/__init__.py
+++ b/v1/ansible/__init__.py
@@ -14,7 +14,5 @@
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see .
-
-# Make coding more python3-ish
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+__version__ = '2.0.0'
+__author__ = 'Michael DeHaan'
diff --git a/lib/ansible/cache/__init__.py b/v1/ansible/cache/__init__.py
similarity index 100%
rename from lib/ansible/cache/__init__.py
rename to v1/ansible/cache/__init__.py
diff --git a/lib/ansible/cache/base.py b/v1/ansible/cache/base.py
similarity index 100%
rename from lib/ansible/cache/base.py
rename to v1/ansible/cache/base.py
diff --git a/lib/ansible/cache/jsonfile.py b/v1/ansible/cache/jsonfile.py
similarity index 100%
rename from lib/ansible/cache/jsonfile.py
rename to v1/ansible/cache/jsonfile.py
diff --git a/lib/ansible/cache/memcached.py b/v1/ansible/cache/memcached.py
similarity index 100%
rename from lib/ansible/cache/memcached.py
rename to v1/ansible/cache/memcached.py
diff --git a/lib/ansible/cache/memory.py b/v1/ansible/cache/memory.py
similarity index 100%
rename from lib/ansible/cache/memory.py
rename to v1/ansible/cache/memory.py
diff --git a/lib/ansible/cache/redis.py b/v1/ansible/cache/redis.py
similarity index 100%
rename from lib/ansible/cache/redis.py
rename to v1/ansible/cache/redis.py
diff --git a/lib/ansible/runner/action_plugins/__init__.py b/v1/ansible/callback_plugins/__init__.py
similarity index 100%
rename from lib/ansible/runner/action_plugins/__init__.py
rename to v1/ansible/callback_plugins/__init__.py
diff --git a/lib/ansible/callback_plugins/noop.py b/v1/ansible/callback_plugins/noop.py
similarity index 100%
rename from lib/ansible/callback_plugins/noop.py
rename to v1/ansible/callback_plugins/noop.py
diff --git a/lib/ansible/callbacks.py b/v1/ansible/callbacks.py
similarity index 100%
rename from lib/ansible/callbacks.py
rename to v1/ansible/callbacks.py
diff --git a/lib/ansible/color.py b/v1/ansible/color.py
similarity index 100%
rename from lib/ansible/color.py
rename to v1/ansible/color.py
diff --git a/v2/ansible/constants.py b/v1/ansible/constants.py
similarity index 89%
rename from v2/ansible/constants.py
rename to v1/ansible/constants.py
index 456beb8bbc..089de5b7c5 100644
--- a/v2/ansible/constants.py
+++ b/v1/ansible/constants.py
@@ -15,15 +15,10 @@
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see .
-# Make coding more python3-ish
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
import os
import pwd
import sys
-
-from six.moves import configparser
+import ConfigParser
from string import ascii_letters, digits
# copied from utils, avoid circular reference fun :)
@@ -40,15 +35,13 @@ def get_config(p, section, key, env_var, default, boolean=False, integer=False,
''' return a configuration variable with casting '''
value = _get_config(p, section, key, env_var, default)
if boolean:
- value = mk_boolean(value)
- if value:
- if integer:
- value = int(value)
- elif floating:
- value = float(value)
- elif islist:
- if isinstance(value, basestring):
- value = [x.strip() for x in value.split(',')]
+ return mk_boolean(value)
+ if value and integer:
+ return int(value)
+ if value and floating:
+ return float(value)
+ if value and islist:
+ return [x.strip() for x in value.split(',')]
return value
def _get_config(p, section, key, env_var, default):
@@ -67,7 +60,7 @@ def _get_config(p, section, key, env_var, default):
def load_config_file():
''' Load Config File order(first found is used): ENV, CWD, HOME, /etc/ansible '''
- p = configparser.ConfigParser()
+ p = ConfigParser.ConfigParser()
path0 = os.getenv("ANSIBLE_CONFIG", None)
if path0 is not None:
@@ -80,8 +73,8 @@ def load_config_file():
if path is not None and os.path.exists(path):
try:
p.read(path)
- except configparser.Error as e:
- print("Error reading config file: \n{0}".format(e))
+ except ConfigParser.Error as e:
+ print "Error reading config file: \n%s" % e
sys.exit(1)
return p
return None
@@ -105,8 +98,7 @@ YAML_FILENAME_EXTENSIONS = [ "", ".yml", ".yaml", ".json" ]
DEFAULTS='defaults'
# configurable things
-DEFAULT_DEBUG = get_config(p, DEFAULTS, 'debug', 'ANSIBLE_DEBUG', False, boolean=True)
-DEFAULT_HOST_LIST = shell_expand_path(get_config(p, DEFAULTS, 'hostfile', 'ANSIBLE_HOSTS', get_config(p, DEFAULTS,'inventory','ANSIBLE_INVENTORY', '/etc/ansible/hosts')))
+DEFAULT_HOST_LIST = shell_expand_path(get_config(p, DEFAULTS, 'inventory', 'ANSIBLE_INVENTORY', get_config(p, DEFAULTS,'hostfile','ANSIBLE_HOSTS', '/etc/ansible/hosts')))
DEFAULT_MODULE_PATH = get_config(p, DEFAULTS, 'library', 'ANSIBLE_LIBRARY', None)
DEFAULT_ROLES_PATH = shell_expand_path(get_config(p, DEFAULTS, 'roles_path', 'ANSIBLE_ROLES_PATH', '/etc/ansible/roles'))
DEFAULT_REMOTE_TMP = get_config(p, DEFAULTS, 'remote_tmp', 'ANSIBLE_REMOTE_TEMP', '$HOME/.ansible/tmp')
@@ -120,7 +112,6 @@ DEFAULT_POLL_INTERVAL = get_config(p, DEFAULTS, 'poll_interval', 'ANSIBLE
DEFAULT_REMOTE_USER = get_config(p, DEFAULTS, 'remote_user', 'ANSIBLE_REMOTE_USER', active_user)
DEFAULT_ASK_PASS = get_config(p, DEFAULTS, 'ask_pass', 'ANSIBLE_ASK_PASS', False, boolean=True)
DEFAULT_PRIVATE_KEY_FILE = shell_expand_path(get_config(p, DEFAULTS, 'private_key_file', 'ANSIBLE_PRIVATE_KEY_FILE', None))
-DEFAULT_SUDO_USER = get_config(p, DEFAULTS, 'sudo_user', 'ANSIBLE_SUDO_USER', 'root')
DEFAULT_ASK_SUDO_PASS = get_config(p, DEFAULTS, 'ask_sudo_pass', 'ANSIBLE_ASK_SUDO_PASS', False, boolean=True)
DEFAULT_REMOTE_PORT = get_config(p, DEFAULTS, 'remote_port', 'ANSIBLE_REMOTE_PORT', None, integer=True)
DEFAULT_ASK_VAULT_PASS = get_config(p, DEFAULTS, 'ask_vault_pass', 'ANSIBLE_ASK_VAULT_PASS', False, boolean=True)
@@ -131,6 +122,7 @@ DEFAULT_MANAGED_STR = get_config(p, DEFAULTS, 'ansible_managed', None,
DEFAULT_SYSLOG_FACILITY = get_config(p, DEFAULTS, 'syslog_facility', 'ANSIBLE_SYSLOG_FACILITY', 'LOG_USER')
DEFAULT_KEEP_REMOTE_FILES = get_config(p, DEFAULTS, 'keep_remote_files', 'ANSIBLE_KEEP_REMOTE_FILES', False, boolean=True)
DEFAULT_SUDO = get_config(p, DEFAULTS, 'sudo', 'ANSIBLE_SUDO', False, boolean=True)
+DEFAULT_SUDO_USER = get_config(p, DEFAULTS, 'sudo_user', 'ANSIBLE_SUDO_USER', 'root')
DEFAULT_SUDO_EXE = get_config(p, DEFAULTS, 'sudo_exe', 'ANSIBLE_SUDO_EXE', 'sudo')
DEFAULT_SUDO_FLAGS = get_config(p, DEFAULTS, 'sudo_flags', 'ANSIBLE_SUDO_FLAGS', '-H')
DEFAULT_HASH_BEHAVIOUR = get_config(p, DEFAULTS, 'hash_behaviour', 'ANSIBLE_HASH_BEHAVIOUR', 'replace')
@@ -149,7 +141,7 @@ BECOME_METHODS = ['sudo','su','pbrun','pfexec','runas']
BECOME_ERROR_STRINGS = {'sudo': 'Sorry, try again.', 'su': 'Authentication failure', 'pbrun': '', 'pfexec': '', 'runas': ''}
DEFAULT_BECOME = get_config(p, 'privilege_escalation', 'become', 'ANSIBLE_BECOME',False, boolean=True)
DEFAULT_BECOME_METHOD = get_config(p, 'privilege_escalation', 'become_method', 'ANSIBLE_BECOME_METHOD','sudo' if DEFAULT_SUDO else 'su' if DEFAULT_SU else 'sudo' ).lower()
-DEFAULT_BECOME_USER = get_config(p, 'privilege_escalation', 'become_user', 'ANSIBLE_BECOME_USER', 'root')
+DEFAULT_BECOME_USER = get_config(p, 'privilege_escalation', 'become_user', 'ANSIBLE_BECOME_USER',default=None)
DEFAULT_BECOME_ASK_PASS = get_config(p, 'privilege_escalation', 'become_ask_pass', 'ANSIBLE_BECOME_ASK_PASS', False, boolean=True)
# need to rethink impementing these 2
DEFAULT_BECOME_EXE = None
@@ -164,7 +156,6 @@ DEFAULT_CONNECTION_PLUGIN_PATH = get_config(p, DEFAULTS, 'connection_plugins', '
DEFAULT_LOOKUP_PLUGIN_PATH = get_config(p, DEFAULTS, 'lookup_plugins', 'ANSIBLE_LOOKUP_PLUGINS', '~/.ansible/plugins/lookup_plugins:/usr/share/ansible_plugins/lookup_plugins')
DEFAULT_VARS_PLUGIN_PATH = get_config(p, DEFAULTS, 'vars_plugins', 'ANSIBLE_VARS_PLUGINS', '~/.ansible/plugins/vars_plugins:/usr/share/ansible_plugins/vars_plugins')
DEFAULT_FILTER_PLUGIN_PATH = get_config(p, DEFAULTS, 'filter_plugins', 'ANSIBLE_FILTER_PLUGINS', '~/.ansible/plugins/filter_plugins:/usr/share/ansible_plugins/filter_plugins')
-DEFAULT_STDOUT_CALLBACK = get_config(p, DEFAULTS, 'stdout_callback', 'ANSIBLE_STDOUT_CALLBACK', 'default')
CACHE_PLUGIN = get_config(p, DEFAULTS, 'fact_caching', 'ANSIBLE_CACHE_PLUGIN', 'memory')
CACHE_PLUGIN_CONNECTION = get_config(p, DEFAULTS, 'fact_caching_connection', 'ANSIBLE_CACHE_PLUGIN_CONNECTION', None)
@@ -182,8 +173,8 @@ DEPRECATION_WARNINGS = get_config(p, DEFAULTS, 'deprecation_warnings',
DEFAULT_CALLABLE_WHITELIST = get_config(p, DEFAULTS, 'callable_whitelist', 'ANSIBLE_CALLABLE_WHITELIST', [], islist=True)
COMMAND_WARNINGS = get_config(p, DEFAULTS, 'command_warnings', 'ANSIBLE_COMMAND_WARNINGS', False, boolean=True)
DEFAULT_LOAD_CALLBACK_PLUGINS = get_config(p, DEFAULTS, 'bin_ansible_callbacks', 'ANSIBLE_LOAD_CALLBACK_PLUGINS', False, boolean=True)
-RETRY_FILES_ENABLED = get_config(p, DEFAULTS, 'retry_files_enabled', 'ANSIBLE_RETRY_FILES_ENABLED', True, boolean=True)
-RETRY_FILES_SAVE_PATH = get_config(p, DEFAULTS, 'retry_files_save_path', 'ANSIBLE_RETRY_FILES_SAVE_PATH', '~/')
+DEFAULT_FORCE_HANDLERS = get_config(p, DEFAULTS, 'force_handlers', 'ANSIBLE_FORCE_HANDLERS', False, boolean=True)
+
RETRY_FILES_ENABLED = get_config(p, DEFAULTS, 'retry_files_enabled', 'ANSIBLE_RETRY_FILES_ENABLED', True, boolean=True)
RETRY_FILES_SAVE_PATH = get_config(p, DEFAULTS, 'retry_files_save_path', 'ANSIBLE_RETRY_FILES_SAVE_PATH', '~/')
@@ -205,16 +196,10 @@ ACCELERATE_KEYS_FILE_PERMS = get_config(p, 'accelerate', 'accelerate_keys_fi
ACCELERATE_MULTI_KEY = get_config(p, 'accelerate', 'accelerate_multi_key', 'ACCELERATE_MULTI_KEY', False, boolean=True)
PARAMIKO_PTY = get_config(p, 'paramiko_connection', 'pty', 'ANSIBLE_PARAMIKO_PTY', True, boolean=True)
-# galaxy related
-DEFAULT_GALAXY_URI = get_config(p, 'galaxy', 'server_uri', 'ANSIBLE_GALAXY_SERVER_URI', 'https://galaxy.ansible.com')
-# this can be configured to blacklist SCMS but cannot add new ones unless the code is also updated
-GALAXY_SCMS = get_config(p, 'galaxy', 'scms', 'ANSIBLE_GALAXY_SCMS', ['git','hg'], islist=True)
-
# characters included in auto-generated passwords
DEFAULT_PASSWORD_CHARS = ascii_letters + digits + ".,:-_"
# non-configurable things
-MODULE_REQUIRE_ARGS = ['command', 'shell', 'raw', 'script']
DEFAULT_BECOME_PASS = None
DEFAULT_SUDO_PASS = None
DEFAULT_REMOTE_PASS = None
diff --git a/lib/ansible/errors.py b/v1/ansible/errors.py
similarity index 100%
rename from lib/ansible/errors.py
rename to v1/ansible/errors.py
diff --git a/v2/ansible/inventory/__init__.py b/v1/ansible/inventory/__init__.py
similarity index 88%
rename from v2/ansible/inventory/__init__.py
rename to v1/ansible/inventory/__init__.py
index 063398f17f..2048046d3c 100644
--- a/v2/ansible/inventory/__init__.py
+++ b/v1/ansible/inventory/__init__.py
@@ -16,44 +16,36 @@
# along with Ansible. If not, see .
#############################################
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
import fnmatch
import os
import sys
import re
-import stat
import subprocess
-from ansible import constants as C
-from ansible.errors import *
-
+import ansible.constants as C
from ansible.inventory.ini import InventoryParser
from ansible.inventory.script import InventoryScript
from ansible.inventory.dir import InventoryDirectory
from ansible.inventory.group import Group
from ansible.inventory.host import Host
-from ansible.plugins import vars_loader
-from ansible.utils.path import is_executable
-from ansible.utils.vars import combine_vars
+from ansible import errors
+from ansible import utils
class Inventory(object):
"""
Host inventory for ansible.
"""
- #__slots__ = [ 'host_list', 'groups', '_restriction', '_also_restriction', '_subset',
- # 'parser', '_vars_per_host', '_vars_per_group', '_hosts_cache', '_groups_list',
- # '_pattern_cache', '_vault_password', '_vars_plugins', '_playbook_basedir']
+ __slots__ = [ 'host_list', 'groups', '_restriction', '_also_restriction', '_subset',
+ 'parser', '_vars_per_host', '_vars_per_group', '_hosts_cache', '_groups_list',
+ '_pattern_cache', '_vault_password', '_vars_plugins', '_playbook_basedir']
- def __init__(self, loader, variable_manager, host_list=C.DEFAULT_HOST_LIST):
+ def __init__(self, host_list=C.DEFAULT_HOST_LIST, vault_password=None):
# the host file file, or script path, or list of hosts
# if a list, inventory data will NOT be loaded
self.host_list = host_list
- self._loader = loader
- self._variable_manager = variable_manager
+ self._vault_password=vault_password
# caching to avoid repeated calculations, particularly with
# external inventory scripts.
@@ -105,7 +97,7 @@ class Inventory(object):
if os.path.isdir(host_list):
# Ensure basedir is inside the directory
self.host_list = os.path.join(self.host_list, "")
- self.parser = InventoryDirectory(loader=self._loader, filename=host_list)
+ self.parser = InventoryDirectory(filename=host_list)
self.groups = self.parser.groups.values()
else:
# check to see if the specified file starts with a
@@ -121,9 +113,9 @@ class Inventory(object):
except:
pass
- if is_executable(host_list):
+ if utils.is_executable(host_list):
try:
- self.parser = InventoryScript(loader=self._loader, filename=host_list)
+ self.parser = InventoryScript(filename=host_list)
self.groups = self.parser.groups.values()
except:
if not shebang_present:
@@ -142,23 +134,19 @@ class Inventory(object):
else:
raise
- vars_loader.add_directory(self.basedir(), with_subdir=True)
+ utils.plugins.vars_loader.add_directory(self.basedir(), with_subdir=True)
else:
raise errors.AnsibleError("Unable to find an inventory file, specify one with -i ?")
- self._vars_plugins = [ x for x in vars_loader.all(self) ]
+ self._vars_plugins = [ x for x in utils.plugins.vars_loader.all(self) ]
- # FIXME: shouldn't be required, since the group/host vars file
- # management will be done in VariableManager
# get group vars from group_vars/ files and vars plugins
for group in self.groups:
- # FIXME: combine_vars
- group.vars = combine_vars(group.vars, self.get_group_variables(group.name))
+ group.vars = utils.combine_vars(group.vars, self.get_group_variables(group.name, vault_password=self._vault_password))
# get host vars from host_vars/ files and vars plugins
for host in self.get_hosts():
- # FIXME: combine_vars
- host.vars = combine_vars(host.vars, self.get_host_variables(host.name))
+ host.vars = utils.combine_vars(host.vars, self.get_host_variables(host.name, vault_password=self._vault_password))
def _match(self, str, pattern_str):
@@ -204,9 +192,9 @@ class Inventory(object):
# exclude hosts mentioned in any restriction (ex: failed hosts)
if self._restriction is not None:
- hosts = [ h for h in hosts if h in self._restriction ]
+ hosts = [ h for h in hosts if h.name in self._restriction ]
if self._also_restriction is not None:
- hosts = [ h for h in hosts if h in self._also_restriction ]
+ hosts = [ h for h in hosts if h.name in self._also_restriction ]
return hosts
@@ -332,8 +320,6 @@ class Inventory(object):
new_host = Host(pattern)
new_host.set_variable("ansible_python_interpreter", sys.executable)
new_host.set_variable("ansible_connection", "local")
- new_host.ipv4_address = '127.0.0.1'
-
ungrouped = self.get_group("ungrouped")
if ungrouped is None:
self.add_group(Group('ungrouped'))
@@ -434,7 +420,7 @@ class Inventory(object):
group = self.get_group(groupname)
if group is None:
- raise Exception("group not found: %s" % groupname)
+ raise errors.AnsibleError("group not found: %s" % groupname)
vars = {}
@@ -442,21 +428,19 @@ class Inventory(object):
vars_results = [ plugin.get_group_vars(group, vault_password=vault_password) for plugin in self._vars_plugins if hasattr(plugin, 'get_group_vars')]
for updated in vars_results:
if updated is not None:
- # FIXME: combine_vars
- vars = combine_vars(vars, updated)
+ vars = utils.combine_vars(vars, updated)
# Read group_vars/ files
- # FIXME: combine_vars
- vars = combine_vars(vars, self.get_group_vars(group))
+ vars = utils.combine_vars(vars, self.get_group_vars(group))
return vars
- def get_vars(self, hostname, update_cached=False, vault_password=None):
+ def get_variables(self, hostname, update_cached=False, vault_password=None):
host = self.get_host(hostname)
if not host:
- raise Exception("host not found: %s" % hostname)
- return host.get_vars()
+ raise errors.AnsibleError("host not found: %s" % hostname)
+ return host.get_variables()
def get_host_variables(self, hostname, update_cached=False, vault_password=None):
@@ -476,26 +460,22 @@ class Inventory(object):
vars_results = [ plugin.run(host, vault_password=vault_password) for plugin in self._vars_plugins if hasattr(plugin, 'run')]
for updated in vars_results:
if updated is not None:
- # FIXME: combine_vars
- vars = combine_vars(vars, updated)
+ vars = utils.combine_vars(vars, updated)
# plugin.get_host_vars retrieves just vars for specific host
vars_results = [ plugin.get_host_vars(host, vault_password=vault_password) for plugin in self._vars_plugins if hasattr(plugin, 'get_host_vars')]
for updated in vars_results:
if updated is not None:
- # FIXME: combine_vars
- vars = combine_vars(vars, updated)
+ vars = utils.combine_vars(vars, updated)
# still need to check InventoryParser per host vars
# which actually means InventoryScript per host,
# which is not performant
if self.parser is not None:
- # FIXME: combine_vars
- vars = combine_vars(vars, self.parser.get_host_variables(host))
+ vars = utils.combine_vars(vars, self.parser.get_host_variables(host))
# Read host_vars/ files
- # FIXME: combine_vars
- vars = combine_vars(vars, self.get_host_vars(host))
+ vars = utils.combine_vars(vars, self.get_host_vars(host))
return vars
@@ -510,7 +490,7 @@ class Inventory(object):
""" return a list of hostnames for a pattern """
- result = [ h for h in self.get_hosts(pattern) ]
+ result = [ h.name for h in self.get_hosts(pattern) ]
if len(result) == 0 and pattern in ["localhost", "127.0.0.1"]:
result = [pattern]
return result
@@ -518,7 +498,11 @@ class Inventory(object):
def list_groups(self):
return sorted([ g.name for g in self.groups ], key=lambda x: x)
- def restrict_to_hosts(self, restriction):
+ # TODO: remove this function
+ def get_restriction(self):
+ return self._restriction
+
+ def restrict_to(self, restriction):
"""
Restrict list operations to the hosts given in restriction. This is used
to exclude failed hosts in main playbook code, don't use this for other
@@ -560,7 +544,7 @@ class Inventory(object):
results.append(x)
self._subset = results
- def remove_restriction(self):
+ def lift_restriction(self):
""" Do not restrict list operations """
self._restriction = None
@@ -604,12 +588,10 @@ class Inventory(object):
self._playbook_basedir = dir
# get group vars from group_vars/ files
for group in self.groups:
- # FIXME: combine_vars
- group.vars = combine_vars(group.vars, self.get_group_vars(group, new_pb_basedir=True))
+ group.vars = utils.combine_vars(group.vars, self.get_group_vars(group, new_pb_basedir=True))
# get host vars from host_vars/ files
for host in self.get_hosts():
- # FIXME: combine_vars
- host.vars = combine_vars(host.vars, self.get_host_vars(host, new_pb_basedir=True))
+ host.vars = utils.combine_vars(host.vars, self.get_host_vars(host, new_pb_basedir=True))
# invalidate cache
self._vars_per_host = {}
self._vars_per_group = {}
@@ -657,15 +639,15 @@ class Inventory(object):
if _basedir == self._playbook_basedir and scan_pass != 1:
continue
- # FIXME: these should go to VariableManager
if group and host is None:
# load vars in dir/group_vars/name_of_group
base_path = os.path.join(basedir, "group_vars/%s" % group.name)
- self._variable_manager.add_group_vars_file(base_path, self._loader)
+ results = utils.load_vars(base_path, results, vault_password=self._vault_password)
+
elif host and group is None:
# same for hostvars in dir/host_vars/name_of_host
base_path = os.path.join(basedir, "host_vars/%s" % host.name)
- self._variable_manager.add_host_vars_file(base_path, self._loader)
+ results = utils.load_vars(base_path, results, vault_password=self._vault_password)
# all done, results is a dictionary of variables for this particular host.
return results
diff --git a/v2/ansible/inventory/dir.py b/v1/ansible/inventory/dir.py
similarity index 91%
rename from v2/ansible/inventory/dir.py
rename to v1/ansible/inventory/dir.py
index 735f32d62c..9ac23fff89 100644
--- a/v2/ansible/inventory/dir.py
+++ b/v1/ansible/inventory/dir.py
@@ -17,25 +17,20 @@
# along with Ansible. If not, see .
#############################################
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
import os
-
-from ansible import constants as C
-from ansible.errors import AnsibleError
-
+import ansible.constants as C
from ansible.inventory.host import Host
from ansible.inventory.group import Group
from ansible.inventory.ini import InventoryParser
from ansible.inventory.script import InventoryScript
-from ansible.utils.path import is_executable
-from ansible.utils.vars import combine_vars
+from ansible import utils
+from ansible import errors
class InventoryDirectory(object):
''' Host inventory parser for ansible using a directory of inventories. '''
- def __init__(self, loader, filename=C.DEFAULT_HOST_LIST):
+ def __init__(self, filename=C.DEFAULT_HOST_LIST):
self.names = os.listdir(filename)
self.names.sort()
self.directory = filename
@@ -43,12 +38,10 @@ class InventoryDirectory(object):
self.hosts = {}
self.groups = {}
- self._loader = loader
-
for i in self.names:
# Skip files that end with certain extensions or characters
- if any(i.endswith(ext) for ext in ("~", ".orig", ".bak", ".ini", ".cfg", ".retry", ".pyc", ".pyo")):
+ if any(i.endswith(ext) for ext in ("~", ".orig", ".bak", ".ini", ".retry", ".pyc", ".pyo")):
continue
# Skip hidden files
if i.startswith('.') and not i.startswith('./'):
@@ -58,9 +51,9 @@ class InventoryDirectory(object):
continue
fullpath = os.path.join(self.directory, i)
if os.path.isdir(fullpath):
- parser = InventoryDirectory(loader=loader, filename=fullpath)
- elif is_executable(fullpath):
- parser = InventoryScript(loader=loader, filename=fullpath)
+ parser = InventoryDirectory(filename=fullpath)
+ elif utils.is_executable(fullpath):
+ parser = InventoryScript(filename=fullpath)
else:
parser = InventoryParser(filename=fullpath)
self.parsers.append(parser)
@@ -160,7 +153,7 @@ class InventoryDirectory(object):
# name
if group.name != newgroup.name:
- raise AnsibleError("Cannot merge group %s with %s" % (group.name, newgroup.name))
+ raise errors.AnsibleError("Cannot merge group %s with %s" % (group.name, newgroup.name))
# depth
group.depth = max([group.depth, newgroup.depth])
@@ -203,14 +196,14 @@ class InventoryDirectory(object):
self.groups[newparent.name].add_child_group(group)
# variables
- group.vars = combine_vars(group.vars, newgroup.vars)
+ group.vars = utils.combine_vars(group.vars, newgroup.vars)
def _merge_hosts(self,host, newhost):
""" Merge all of instance newhost into host """
# name
if host.name != newhost.name:
- raise AnsibleError("Cannot merge host %s with %s" % (host.name, newhost.name))
+ raise errors.AnsibleError("Cannot merge host %s with %s" % (host.name, newhost.name))
# group membership relation
for newgroup in newhost.groups:
@@ -225,7 +218,7 @@ class InventoryDirectory(object):
self.groups[newgroup.name].add_host(host)
# variables
- host.vars = combine_vars(host.vars, newhost.vars)
+ host.vars = utils.combine_vars(host.vars, newhost.vars)
def get_host_variables(self, host):
""" Gets additional host variables from all inventories """
diff --git a/v2/ansible/inventory/expand_hosts.py b/v1/ansible/inventory/expand_hosts.py
similarity index 97%
rename from v2/ansible/inventory/expand_hosts.py
rename to v1/ansible/inventory/expand_hosts.py
index b5a957c53f..f129740935 100644
--- a/v2/ansible/inventory/expand_hosts.py
+++ b/v1/ansible/inventory/expand_hosts.py
@@ -30,9 +30,6 @@ expanded into 001, 002 ...009, 010.
Note that when beg is specified with left zero padding, then the length of
end must be the same as that of beg, else an exception is raised.
'''
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
import string
from ansible import errors
diff --git a/v2/ansible/inventory/group.py b/v1/ansible/inventory/group.py
similarity index 69%
rename from v2/ansible/inventory/group.py
rename to v1/ansible/inventory/group.py
index 6525e69b46..262558e69c 100644
--- a/v2/ansible/inventory/group.py
+++ b/v1/ansible/inventory/group.py
@@ -14,15 +14,11 @@
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see .
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-from ansible.utils.debug import debug
-
-class Group:
+class Group(object):
''' a group of ansible hosts '''
- #__slots__ = [ 'name', 'hosts', 'vars', 'child_groups', 'parent_groups', 'depth', '_hosts_cache' ]
+ __slots__ = [ 'name', 'hosts', 'vars', 'child_groups', 'parent_groups', 'depth', '_hosts_cache' ]
def __init__(self, name=None):
@@ -33,49 +29,9 @@ class Group:
self.child_groups = []
self.parent_groups = []
self._hosts_cache = None
-
#self.clear_hosts_cache()
- #if self.name is None:
- # raise Exception("group name is required")
-
- def __repr__(self):
- return self.get_name()
-
- def __getstate__(self):
- return self.serialize()
-
- def __setstate__(self, data):
- return self.deserialize(data)
-
- def serialize(self):
- parent_groups = []
- for parent in self.parent_groups:
- parent_groups.append(parent.serialize())
-
- result = dict(
- name=self.name,
- vars=self.vars.copy(),
- parent_groups=parent_groups,
- depth=self.depth,
- )
-
- debug("serializing group, result is: %s" % result)
- return result
-
- def deserialize(self, data):
- debug("deserializing group, data is: %s" % data)
- self.__init__()
- self.name = data.get('name')
- self.vars = data.get('vars', dict())
-
- parent_groups = data.get('parent_groups', [])
- for parent_data in parent_groups:
- g = Group()
- g.deserialize(parent_data)
- self.parent_groups.append(g)
-
- def get_name(self):
- return self.name
+ if self.name is None:
+ raise Exception("group name is required")
def add_child_group(self, group):
@@ -144,7 +100,7 @@ class Group:
hosts.append(mine)
return hosts
- def get_vars(self):
+ def get_variables(self):
return self.vars.copy()
def _get_ancestors(self):
diff --git a/v1/ansible/inventory/host.py b/v1/ansible/inventory/host.py
new file mode 100644
index 0000000000..d4dc20fa46
--- /dev/null
+++ b/v1/ansible/inventory/host.py
@@ -0,0 +1,67 @@
+# (c) 2012-2014, Michael DeHaan
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see .
+
+import ansible.constants as C
+from ansible import utils
+
+class Host(object):
+ ''' a single ansible host '''
+
+ __slots__ = [ 'name', 'vars', 'groups' ]
+
+ def __init__(self, name=None, port=None):
+
+ self.name = name
+ self.vars = {}
+ self.groups = []
+ if port and port != C.DEFAULT_REMOTE_PORT:
+ self.set_variable('ansible_ssh_port', int(port))
+
+ if self.name is None:
+ raise Exception("host name is required")
+
+ def add_group(self, group):
+
+ self.groups.append(group)
+
+ def set_variable(self, key, value):
+
+ self.vars[key]=value
+
+ def get_groups(self):
+
+ groups = {}
+ for g in self.groups:
+ groups[g.name] = g
+ ancestors = g.get_ancestors()
+ for a in ancestors:
+ groups[a.name] = a
+ return groups.values()
+
+ def get_variables(self):
+
+ results = {}
+ groups = self.get_groups()
+ for group in sorted(groups, key=lambda g: g.depth):
+ results = utils.combine_vars(results, group.get_variables())
+ results = utils.combine_vars(results, self.vars)
+ results['inventory_hostname'] = self.name
+ results['inventory_hostname_short'] = self.name.split('.')[0]
+ results['group_names'] = sorted([ g.name for g in groups if g.name != 'all'])
+ return results
+
+
diff --git a/v2/ansible/inventory/ini.py b/v1/ansible/inventory/ini.py
similarity index 82%
rename from v2/ansible/inventory/ini.py
rename to v1/ansible/inventory/ini.py
index e004ee8bb7..bd9a98e7f8 100644
--- a/v2/ansible/inventory/ini.py
+++ b/v1/ansible/inventory/ini.py
@@ -16,20 +16,17 @@
# along with Ansible. If not, see .
#############################################
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-import ast
-import shlex
-import re
-
-from ansible import constants as C
-from ansible.errors import *
+import ansible.constants as C
from ansible.inventory.host import Host
from ansible.inventory.group import Group
from ansible.inventory.expand_hosts import detect_range
from ansible.inventory.expand_hosts import expand_hostname_range
-from ansible.utils.unicode import to_unicode
+from ansible import errors
+from ansible import utils
+import shlex
+import re
+import ast
class InventoryParser(object):
"""
@@ -37,8 +34,9 @@ class InventoryParser(object):
"""
def __init__(self, filename=C.DEFAULT_HOST_LIST):
- self.filename = filename
+
with open(filename) as fh:
+ self.filename = filename
self.lines = fh.readlines()
self.groups = {}
self.hosts = {}
@@ -56,7 +54,10 @@ class InventoryParser(object):
def _parse_value(v):
if "#" not in v:
try:
- v = ast.literal_eval(v)
+ ret = ast.literal_eval(v)
+ if not isinstance(ret, float):
+ # Do not trim floats. Eg: "1.20" to 1.2
+ return ret
# Using explicit exceptions.
# Likely a string that literal_eval does not like. We wil then just set it.
except ValueError:
@@ -65,7 +66,7 @@ class InventoryParser(object):
except SyntaxError:
# Is this a hash with an equals at the end?
pass
- return to_unicode(v, nonstring='passthru', errors='strict')
+ return v
# [webservers]
# alpha
@@ -90,8 +91,8 @@ class InventoryParser(object):
self.groups = dict(all=all, ungrouped=ungrouped)
active_group_name = 'ungrouped'
- for line in self.lines:
- line = self._before_comment(line).strip()
+ for lineno in range(len(self.lines)):
+ line = utils.before_comment(self.lines[lineno]).strip()
if line.startswith("[") and line.endswith("]"):
active_group_name = line.replace("[","").replace("]","")
if ":vars" in line or ":children" in line:
@@ -145,11 +146,8 @@ class InventoryParser(object):
try:
(k,v) = t.split("=", 1)
except ValueError, e:
- raise AnsibleError("Invalid ini entry in %s: %s - %s" % (self.filename, t, str(e)))
- if k == 'ansible_ssh_host':
- host.ipv4_address = self._parse_value(v)
- else:
- host.set_variable(k, self._parse_value(v))
+ raise errors.AnsibleError("%s:%s: Invalid ini entry: %s - %s" % (self.filename, lineno + 1, t, str(e)))
+ host.set_variable(k, self._parse_value(v))
self.groups[active_group_name].add_host(host)
# [southeast:children]
@@ -159,8 +157,8 @@ class InventoryParser(object):
def _parse_group_children(self):
group = None
- for line in self.lines:
- line = line.strip()
+ for lineno in range(len(self.lines)):
+ line = self.lines[lineno].strip()
if line is None or line == '':
continue
if line.startswith("[") and ":children]" in line:
@@ -175,7 +173,7 @@ class InventoryParser(object):
elif group:
kid_group = self.groups.get(line, None)
if kid_group is None:
- raise AnsibleError("child group is not defined: (%s)" % line)
+ raise errors.AnsibleError("%s:%d: child group is not defined: (%s)" % (self.filename, lineno + 1, line))
else:
group.add_child_group(kid_group)
@@ -186,13 +184,13 @@ class InventoryParser(object):
def _parse_group_variables(self):
group = None
- for line in self.lines:
- line = line.strip()
+ for lineno in range(len(self.lines)):
+ line = self.lines[lineno].strip()
if line.startswith("[") and ":vars]" in line:
line = line.replace("[","").replace(":vars]","")
group = self.groups.get(line, None)
if group is None:
- raise AnsibleError("can't add vars to undefined group: %s" % line)
+ raise errors.AnsibleError("%s:%d: can't add vars to undefined group: %s" % (self.filename, lineno + 1, line))
elif line.startswith("#") or line.startswith(";"):
pass
elif line.startswith("["):
@@ -201,18 +199,10 @@ class InventoryParser(object):
pass
elif group:
if "=" not in line:
- raise AnsibleError("variables assigned to group must be in key=value form")
+ raise errors.AnsibleError("%s:%d: variables assigned to group must be in key=value form" % (self.filename, lineno + 1))
else:
(k, v) = [e.strip() for e in line.split("=", 1)]
group.set_variable(k, self._parse_value(v))
def get_host_variables(self, host):
return {}
-
- def _before_comment(self, msg):
- ''' what's the part of a string before a comment? '''
- msg = msg.replace("\#","**NOT_A_COMMENT**")
- msg = msg.split("#")[0]
- msg = msg.replace("**NOT_A_COMMENT**","#")
- return msg
-
diff --git a/v2/ansible/inventory/script.py b/v1/ansible/inventory/script.py
similarity index 82%
rename from v2/ansible/inventory/script.py
rename to v1/ansible/inventory/script.py
index 9675d70f69..b83cb9bcc7 100644
--- a/v2/ansible/inventory/script.py
+++ b/v1/ansible/inventory/script.py
@@ -16,26 +16,22 @@
# along with Ansible. If not, see .
#############################################
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
import os
import subprocess
-import sys
-
-from ansible import constants as C
-from ansible.errors import *
+import ansible.constants as C
from ansible.inventory.host import Host
from ansible.inventory.group import Group
from ansible.module_utils.basic import json_dict_bytes_to_unicode
+from ansible import utils
+from ansible import errors
+import sys
-class InventoryScript:
+class InventoryScript(object):
''' Host inventory parser for ansible using external inventory scripts. '''
- def __init__(self, loader, filename=C.DEFAULT_HOST_LIST):
-
- self._loader = loader
+ def __init__(self, filename=C.DEFAULT_HOST_LIST):
# Support inventory scripts that are not prefixed with some
# path information but happen to be in the current working
@@ -45,11 +41,11 @@ class InventoryScript:
try:
sp = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
except OSError, e:
- raise AnsibleError("problem running %s (%s)" % (' '.join(cmd), e))
+ raise errors.AnsibleError("problem running %s (%s)" % (' '.join(cmd), e))
(stdout, stderr) = sp.communicate()
if sp.returncode != 0:
- raise AnsibleError("Inventory script (%s) had an execution error: %s " % (filename,stderr))
+ raise errors.AnsibleError("Inventory script (%s) had an execution error: %s " % (filename,stderr))
self.data = stdout
# see comment about _meta below
@@ -62,7 +58,7 @@ class InventoryScript:
all_hosts = {}
# not passing from_remote because data from CMDB is trusted
- self.raw = self._loader.load(self.data)
+ self.raw = utils.parse_json(self.data)
self.raw = json_dict_bytes_to_unicode(self.raw)
all = Group('all')
@@ -72,7 +68,7 @@ class InventoryScript:
if 'failed' in self.raw:
sys.stderr.write(err + "\n")
- raise AnsibleError("failed to parse executable inventory script results: %s" % self.raw)
+ raise errors.AnsibleError("failed to parse executable inventory script results: %s" % self.raw)
for (group_name, data) in self.raw.items():
@@ -96,12 +92,12 @@ class InventoryScript:
if not isinstance(data, dict):
data = {'hosts': data}
# is not those subkeys, then simplified syntax, host with vars
- elif not any(k in data for k in ('hosts','vars')):
+ elif not any(k in data for k in ('hosts','vars','children')):
data = {'hosts': [group_name], 'vars': data}
if 'hosts' in data:
if not isinstance(data['hosts'], list):
- raise AnsibleError("You defined a group \"%s\" with bad "
+ raise errors.AnsibleError("You defined a group \"%s\" with bad "
"data for the host list:\n %s" % (group_name, data))
for hostname in data['hosts']:
@@ -112,7 +108,7 @@ class InventoryScript:
if 'vars' in data:
if not isinstance(data['vars'], dict):
- raise AnsibleError("You defined a group \"%s\" with bad "
+ raise errors.AnsibleError("You defined a group \"%s\" with bad "
"data for variables:\n %s" % (group_name, data))
for k, v in data['vars'].iteritems():
@@ -147,12 +143,12 @@ class InventoryScript:
try:
sp = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
except OSError, e:
- raise AnsibleError("problem running %s (%s)" % (' '.join(cmd), e))
+ raise errors.AnsibleError("problem running %s (%s)" % (' '.join(cmd), e))
(out, err) = sp.communicate()
if out.strip() == '':
return dict()
try:
- return json_dict_bytes_to_unicode(self._loader.load(out))
+ return json_dict_bytes_to_unicode(utils.parse_json(out))
except ValueError:
- raise AnsibleError("could not parse post variable response: %s, %s" % (cmd, out))
+ raise errors.AnsibleError("could not parse post variable response: %s, %s" % (cmd, out))
diff --git a/lib/ansible/runner/connection_plugins/__init__.py b/v1/ansible/inventory/vars_plugins/__init__.py
similarity index 100%
rename from lib/ansible/runner/connection_plugins/__init__.py
rename to v1/ansible/inventory/vars_plugins/__init__.py
diff --git a/v2/ansible/inventory/vars_plugins/noop.py b/v1/ansible/inventory/vars_plugins/noop.py
similarity index 94%
rename from v2/ansible/inventory/vars_plugins/noop.py
rename to v1/ansible/inventory/vars_plugins/noop.py
index 8f0c98cad5..5d4b4b6658 100644
--- a/v2/ansible/inventory/vars_plugins/noop.py
+++ b/v1/ansible/inventory/vars_plugins/noop.py
@@ -15,8 +15,6 @@
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see .
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
class VarsModule(object):
diff --git a/lib/ansible/module_common.py b/v1/ansible/module_common.py
similarity index 100%
rename from lib/ansible/module_common.py
rename to v1/ansible/module_common.py
diff --git a/v2/ansible/module_utils/__init__.py b/v1/ansible/module_utils/__init__.py
similarity index 100%
rename from v2/ansible/module_utils/__init__.py
rename to v1/ansible/module_utils/__init__.py
diff --git a/v2/ansible/module_utils/a10.py b/v1/ansible/module_utils/a10.py
similarity index 100%
rename from v2/ansible/module_utils/a10.py
rename to v1/ansible/module_utils/a10.py
diff --git a/v2/ansible/module_utils/basic.py b/v1/ansible/module_utils/basic.py
similarity index 97%
rename from v2/ansible/module_utils/basic.py
rename to v1/ansible/module_utils/basic.py
index 8f9b03f882..54a1a9cfff 100644
--- a/v2/ansible/module_utils/basic.py
+++ b/v1/ansible/module_utils/basic.py
@@ -43,7 +43,7 @@ BOOLEANS = BOOLEANS_TRUE + BOOLEANS_FALSE
# can be inserted in any module source automatically by including
# #<> on a blank line by itself inside
# of an ansible module. The source of this common code lives
-# in ansible/executor/module_common.py
+# in lib/ansible/module_common.py
import locale
import os
@@ -65,7 +65,6 @@ import pwd
import platform
import errno
import tempfile
-from itertools import imap, repeat
try:
import json
@@ -235,7 +234,7 @@ def load_platform_subclass(cls, *args, **kwargs):
return super(cls, subclass).__new__(subclass)
-def json_dict_unicode_to_bytes(d, encoding='utf-8'):
+def json_dict_unicode_to_bytes(d):
''' Recursively convert dict keys and values to byte str
Specialized for json return because this only handles, lists, tuples,
@@ -243,17 +242,17 @@ def json_dict_unicode_to_bytes(d, encoding='utf-8'):
'''
if isinstance(d, unicode):
- return d.encode(encoding)
+ return d.encode('utf-8')
elif isinstance(d, dict):
- return dict(imap(json_dict_unicode_to_bytes, d.iteritems(), repeat(encoding)))
+ return dict(map(json_dict_unicode_to_bytes, d.iteritems()))
elif isinstance(d, list):
- return list(imap(json_dict_unicode_to_bytes, d, repeat(encoding)))
+ return list(map(json_dict_unicode_to_bytes, d))
elif isinstance(d, tuple):
- return tuple(imap(json_dict_unicode_to_bytes, d, repeat(encoding)))
+ return tuple(map(json_dict_unicode_to_bytes, d))
else:
return d
-def json_dict_bytes_to_unicode(d, encoding='utf-8'):
+def json_dict_bytes_to_unicode(d):
''' Recursively convert dict keys and values to byte str
Specialized for json return because this only handles, lists, tuples,
@@ -261,13 +260,13 @@ def json_dict_bytes_to_unicode(d, encoding='utf-8'):
'''
if isinstance(d, str):
- return unicode(d, encoding)
+ return unicode(d, 'utf-8')
elif isinstance(d, dict):
- return dict(imap(json_dict_bytes_to_unicode, d.iteritems(), repeat(encoding)))
+ return dict(map(json_dict_bytes_to_unicode, d.iteritems()))
elif isinstance(d, list):
- return list(imap(json_dict_bytes_to_unicode, d, repeat(encoding)))
+ return list(map(json_dict_bytes_to_unicode, d))
elif isinstance(d, tuple):
- return tuple(imap(json_dict_bytes_to_unicode, d, repeat(encoding)))
+ return tuple(map(json_dict_bytes_to_unicode, d))
else:
return d
@@ -360,9 +359,9 @@ class AnsibleModule(object):
# reset to LANG=C if it's an invalid/unavailable locale
self._check_locale()
- self.params = self._load_params()
+ (self.params, self.args) = self._load_params()
- self._legal_inputs = ['_ansible_check_mode', '_ansible_no_log']
+ self._legal_inputs = ['CHECKMODE', 'NO_LOG']
self.aliases = self._handle_aliases()
@@ -889,7 +888,7 @@ class AnsibleModule(object):
def _check_for_check_mode(self):
for (k,v) in self.params.iteritems():
- if k == '_ansible_check_mode':
+ if k == 'CHECKMODE':
if not self.supports_check_mode:
self.exit_json(skipped=True, msg="remote module does not support check mode")
if self.supports_check_mode:
@@ -897,13 +896,13 @@ class AnsibleModule(object):
def _check_for_no_log(self):
for (k,v) in self.params.iteritems():
- if k == '_ansible_no_log':
+ if k == 'NO_LOG':
self.no_log = self.boolean(v)
def _check_invalid_arguments(self):
for (k,v) in self.params.iteritems():
# these should be in legal inputs already
- #if k in ('_ansible_check_mode', '_ansible_no_log'):
+ #if k in ('CHECKMODE', 'NO_LOG'):
# continue
if k not in self._legal_inputs:
self.fail_json(msg="unsupported parameter for module: %s" % k)
@@ -1076,11 +1075,20 @@ class AnsibleModule(object):
def _load_params(self):
''' read the input and return a dictionary and the arguments string '''
- params = json_dict_unicode_to_bytes(json.loads(MODULE_COMPLEX_ARGS))
- if params is None:
- params = dict()
- return params
-
+ args = MODULE_ARGS
+ items = shlex.split(args)
+ params = {}
+ for x in items:
+ try:
+ (k, v) = x.split("=",1)
+ except Exception, e:
+ self.fail_json(msg="this module requires key=value arguments (%s)" % (items))
+ if k in params:
+ self.fail_json(msg="duplicate parameter: %s (value=%s)" % (k, v))
+ params[k] = v
+ params2 = json_dict_unicode_to_bytes(json.loads(MODULE_COMPLEX_ARGS))
+ params2.update(params)
+ return (params2, args)
def _log_invocation(self):
''' log that ansible ran the module '''
@@ -1201,17 +1209,13 @@ class AnsibleModule(object):
self.fail_json(msg='Boolean %s not in either boolean list' % arg)
def jsonify(self, data):
- for encoding in ("utf-8", "latin-1"):
+ for encoding in ("utf-8", "latin-1", "unicode_escape"):
try:
return json.dumps(data, encoding=encoding)
- # Old systems using old simplejson module does not support encoding keyword.
- except TypeError:
- try:
- new_data = json_dict_bytes_to_unicode(data, encoding=encoding)
- except UnicodeDecodeError:
- continue
- return json.dumps(new_data)
- except UnicodeDecodeError:
+ # Old systems using simplejson module does not support encoding keyword.
+ except TypeError, e:
+ return json.dumps(data)
+ except UnicodeDecodeError, e:
continue
self.fail_json(msg='Invalid unicode encoding encountered')
@@ -1448,7 +1452,7 @@ class AnsibleModule(object):
msg = None
st_in = None
- # Set a temporary env path if a prefix is passed
+ # Set a temporart env path if a prefix is passed
env=os.environ
if path_prefix:
env['PATH']="%s:%s" % (path_prefix, env['PATH'])
diff --git a/v2/ansible/module_utils/cloudstack.py b/v1/ansible/module_utils/cloudstack.py
similarity index 100%
rename from v2/ansible/module_utils/cloudstack.py
rename to v1/ansible/module_utils/cloudstack.py
diff --git a/v2/ansible/module_utils/database.py b/v1/ansible/module_utils/database.py
similarity index 100%
rename from v2/ansible/module_utils/database.py
rename to v1/ansible/module_utils/database.py
diff --git a/v2/ansible/module_utils/ec2.py b/v1/ansible/module_utils/ec2.py
similarity index 100%
rename from v2/ansible/module_utils/ec2.py
rename to v1/ansible/module_utils/ec2.py
diff --git a/v2/ansible/module_utils/facts.py b/v1/ansible/module_utils/facts.py
similarity index 100%
rename from v2/ansible/module_utils/facts.py
rename to v1/ansible/module_utils/facts.py
diff --git a/v2/ansible/module_utils/gce.py b/v1/ansible/module_utils/gce.py
similarity index 100%
rename from v2/ansible/module_utils/gce.py
rename to v1/ansible/module_utils/gce.py
diff --git a/v2/ansible/module_utils/known_hosts.py b/v1/ansible/module_utils/known_hosts.py
similarity index 100%
rename from v2/ansible/module_utils/known_hosts.py
rename to v1/ansible/module_utils/known_hosts.py
diff --git a/v2/ansible/module_utils/openstack.py b/v1/ansible/module_utils/openstack.py
similarity index 100%
rename from v2/ansible/module_utils/openstack.py
rename to v1/ansible/module_utils/openstack.py
diff --git a/v2/ansible/module_utils/powershell.ps1 b/v1/ansible/module_utils/powershell.ps1
similarity index 97%
rename from v2/ansible/module_utils/powershell.ps1
rename to v1/ansible/module_utils/powershell.ps1
index 57d2c1b101..ee7d3ddeca 100644
--- a/v2/ansible/module_utils/powershell.ps1
+++ b/v1/ansible/module_utils/powershell.ps1
@@ -142,14 +142,14 @@ Function ConvertTo-Bool
return
}
-# Helper function to calculate md5 of a file in a way which powershell 3
+# Helper function to calculate a hash of a file in a way which powershell 3
# and above can handle:
-Function Get-FileMd5($path)
+Function Get-FileChecksum($path)
{
$hash = ""
If (Test-Path -PathType Leaf $path)
{
- $sp = new-object -TypeName System.Security.Cryptography.MD5CryptoServiceProvider;
+ $sp = new-object -TypeName System.Security.Cryptography.SHA1CryptoServiceProvider;
$fp = [System.IO.File]::Open($path, [System.IO.Filemode]::Open, [System.IO.FileAccess]::Read);
[System.BitConverter]::ToString($sp.ComputeHash($fp)).Replace("-", "").ToLower();
$fp.Dispose();
diff --git a/v2/ansible/module_utils/rax.py b/v1/ansible/module_utils/rax.py
similarity index 100%
rename from v2/ansible/module_utils/rax.py
rename to v1/ansible/module_utils/rax.py
diff --git a/v2/ansible/module_utils/redhat.py b/v1/ansible/module_utils/redhat.py
similarity index 100%
rename from v2/ansible/module_utils/redhat.py
rename to v1/ansible/module_utils/redhat.py
diff --git a/v2/ansible/module_utils/splitter.py b/v1/ansible/module_utils/splitter.py
similarity index 100%
rename from v2/ansible/module_utils/splitter.py
rename to v1/ansible/module_utils/splitter.py
diff --git a/v2/ansible/module_utils/urls.py b/v1/ansible/module_utils/urls.py
similarity index 100%
rename from v2/ansible/module_utils/urls.py
rename to v1/ansible/module_utils/urls.py
diff --git a/lib/ansible/module_utils/vmware.py b/v1/ansible/module_utils/vmware.py
similarity index 100%
rename from lib/ansible/module_utils/vmware.py
rename to v1/ansible/module_utils/vmware.py
diff --git a/lib/ansible/runner/filter_plugins/__init__.py b/v1/ansible/modules/__init__.py
similarity index 100%
rename from lib/ansible/runner/filter_plugins/__init__.py
rename to v1/ansible/modules/__init__.py
diff --git a/v1/ansible/playbook/__init__.py b/v1/ansible/playbook/__init__.py
new file mode 100644
index 0000000000..24ba2d3c6e
--- /dev/null
+++ b/v1/ansible/playbook/__init__.py
@@ -0,0 +1,874 @@
+# (c) 2012-2014, Michael DeHaan
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see .
+
+import ansible.inventory
+import ansible.constants as C
+import ansible.runner
+from ansible.utils.template import template
+from ansible import utils
+from ansible import errors
+from ansible.module_utils.splitter import split_args, unquote
+import ansible.callbacks
+import ansible.cache
+import os
+import shlex
+import collections
+from play import Play
+import StringIO
+import pipes
+
+# the setup cache stores all variables about a host
+# gathered during the setup step, while the vars cache
+# holds all other variables about a host
+SETUP_CACHE = ansible.cache.FactCache()
+VARS_CACHE = collections.defaultdict(dict)
+RESERVED_TAGS = ['all','tagged','untagged','always']
+
+
+class PlayBook(object):
+ '''
+ runs an ansible playbook, given as a datastructure or YAML filename.
+ A playbook is a deployment, config management, or automation based
+ set of commands to run in series.
+
+ multiple plays/tasks do not execute simultaneously, but tasks in each
+ pattern do execute in parallel (according to the number of forks
+ requested) among the hosts they address
+ '''
+
+ # *****************************************************
+
+ def __init__(self,
+ playbook = None,
+ host_list = C.DEFAULT_HOST_LIST,
+ module_path = None,
+ forks = C.DEFAULT_FORKS,
+ timeout = C.DEFAULT_TIMEOUT,
+ remote_user = C.DEFAULT_REMOTE_USER,
+ remote_pass = C.DEFAULT_REMOTE_PASS,
+ remote_port = None,
+ transport = C.DEFAULT_TRANSPORT,
+ private_key_file = C.DEFAULT_PRIVATE_KEY_FILE,
+ callbacks = None,
+ runner_callbacks = None,
+ stats = None,
+ extra_vars = None,
+ only_tags = None,
+ skip_tags = None,
+ subset = C.DEFAULT_SUBSET,
+ inventory = None,
+ check = False,
+ diff = False,
+ any_errors_fatal = False,
+ vault_password = False,
+ force_handlers = False,
+ # privilege escalation
+ become = C.DEFAULT_BECOME,
+ become_method = C.DEFAULT_BECOME_METHOD,
+ become_user = C.DEFAULT_BECOME_USER,
+ become_pass = None,
+ ):
+
+ """
+ playbook: path to a playbook file
+ host_list: path to a file like /etc/ansible/hosts
+ module_path: path to ansible modules, like /usr/share/ansible/
+ forks: desired level of parallelism
+ timeout: connection timeout
+ remote_user: run as this user if not specified in a particular play
+ remote_pass: use this remote password (for all plays) vs using SSH keys
+ remote_port: default remote port to use if not specified with the host or play
+ transport: how to connect to hosts that don't specify a transport (local, paramiko, etc)
+ callbacks output callbacks for the playbook
+ runner_callbacks: more callbacks, this time for the runner API
+ stats: holds aggregrate data about events occurring to each host
+ inventory: can be specified instead of host_list to use a pre-existing inventory object
+ check: don't change anything, just try to detect some potential changes
+ any_errors_fatal: terminate the entire execution immediately when one of the hosts has failed
+ force_handlers: continue to notify and run handlers even if a task fails
+ """
+
+ self.SETUP_CACHE = SETUP_CACHE
+ self.VARS_CACHE = VARS_CACHE
+
+ arguments = []
+ if playbook is None:
+ arguments.append('playbook')
+ if callbacks is None:
+ arguments.append('callbacks')
+ if runner_callbacks is None:
+ arguments.append('runner_callbacks')
+ if stats is None:
+ arguments.append('stats')
+ if arguments:
+ raise Exception('PlayBook missing required arguments: %s' % ', '.join(arguments))
+
+ if extra_vars is None:
+ extra_vars = {}
+ if only_tags is None:
+ only_tags = [ 'all' ]
+ if skip_tags is None:
+ skip_tags = []
+
+ self.check = check
+ self.diff = diff
+ self.module_path = module_path
+ self.forks = forks
+ self.timeout = timeout
+ self.remote_user = remote_user
+ self.remote_pass = remote_pass
+ self.remote_port = remote_port
+ self.transport = transport
+ self.callbacks = callbacks
+ self.runner_callbacks = runner_callbacks
+ self.stats = stats
+ self.extra_vars = extra_vars
+ self.global_vars = {}
+ self.private_key_file = private_key_file
+ self.only_tags = only_tags
+ self.skip_tags = skip_tags
+ self.any_errors_fatal = any_errors_fatal
+ self.vault_password = vault_password
+ self.force_handlers = force_handlers
+
+ self.become = become
+ self.become_method = become_method
+ self.become_user = become_user
+ self.become_pass = become_pass
+
+ self.callbacks.playbook = self
+ self.runner_callbacks.playbook = self
+
+ if inventory is None:
+ self.inventory = ansible.inventory.Inventory(host_list)
+ self.inventory.subset(subset)
+ else:
+ self.inventory = inventory
+
+ if self.module_path is not None:
+ utils.plugins.module_finder.add_directory(self.module_path)
+
+ self.basedir = os.path.dirname(playbook) or '.'
+ utils.plugins.push_basedir(self.basedir)
+
+ # let inventory know the playbook basedir so it can load more vars
+ self.inventory.set_playbook_basedir(self.basedir)
+
+ vars = extra_vars.copy()
+ vars['playbook_dir'] = os.path.abspath(self.basedir)
+ if self.inventory.basedir() is not None:
+ vars['inventory_dir'] = self.inventory.basedir()
+
+ if self.inventory.src() is not None:
+ vars['inventory_file'] = self.inventory.src()
+
+ self.filename = playbook
+ (self.playbook, self.play_basedirs) = self._load_playbook_from_file(playbook, vars)
+ ansible.callbacks.load_callback_plugins()
+ ansible.callbacks.set_playbook(self.callbacks, self)
+
+ self._ansible_version = utils.version_info(gitinfo=True)
+
+ # *****************************************************
+
+ def _get_playbook_vars(self, play_ds, existing_vars):
+ '''
+ Gets the vars specified with the play and blends them
+ with any existing vars that have already been read in
+ '''
+ new_vars = existing_vars.copy()
+ if 'vars' in play_ds:
+ if isinstance(play_ds['vars'], dict):
+ new_vars.update(play_ds['vars'])
+ elif isinstance(play_ds['vars'], list):
+ for v in play_ds['vars']:
+ new_vars.update(v)
+ return new_vars
+
+ # *****************************************************
+
+ def _get_include_info(self, play_ds, basedir, existing_vars={}):
+ '''
+ Gets any key=value pairs specified with the included file
+ name and returns the merged vars along with the path
+ '''
+ new_vars = existing_vars.copy()
+ tokens = split_args(play_ds.get('include', ''))
+ for t in tokens[1:]:
+ try:
+ (k,v) = unquote(t).split("=", 1)
+ new_vars[k] = template(basedir, v, new_vars)
+ except ValueError, e:
+ raise errors.AnsibleError('included playbook variables must be in the form k=v, got: %s' % t)
+
+ return (new_vars, unquote(tokens[0]))
+
+ # *****************************************************
+
+ def _get_playbook_vars_files(self, play_ds, existing_vars_files):
+ new_vars_files = list(existing_vars_files)
+ if 'vars_files' in play_ds:
+ new_vars_files = utils.list_union(new_vars_files, play_ds['vars_files'])
+ return new_vars_files
+
+ # *****************************************************
+
+ def _extend_play_vars(self, play, vars={}):
+ '''
+ Extends the given play's variables with the additional specified vars.
+ '''
+
+ if 'vars' not in play or not play['vars']:
+ # someone left out or put an empty "vars:" entry in their playbook
+ return vars.copy()
+
+ play_vars = None
+ if isinstance(play['vars'], dict):
+ play_vars = play['vars'].copy()
+ play_vars.update(vars)
+ elif isinstance(play['vars'], list):
+ # nobody should really do this, but handle vars: a=1 b=2
+ play_vars = play['vars'][:]
+ play_vars.extend([{k:v} for k,v in vars.iteritems()])
+
+ return play_vars
+
+ # *****************************************************
+
+ def _load_playbook_from_file(self, path, vars={}, vars_files=[]):
+ '''
+ run top level error checking on playbooks and allow them to include other playbooks.
+ '''
+
+ playbook_data = utils.parse_yaml_from_file(path, vault_password=self.vault_password)
+ accumulated_plays = []
+ play_basedirs = []
+
+ if type(playbook_data) != list:
+ raise errors.AnsibleError("parse error: playbooks must be formatted as a YAML list, got %s" % type(playbook_data))
+
+ basedir = os.path.dirname(path) or '.'
+ utils.plugins.push_basedir(basedir)
+ for play in playbook_data:
+ if type(play) != dict:
+ raise errors.AnsibleError("parse error: each play in a playbook must be a YAML dictionary (hash), received: %s" % play)
+
+ if 'include' in play:
+ # a playbook (list of plays) decided to include some other list of plays
+ # from another file. The result is a flat list of plays in the end.
+
+ play_vars = self._get_playbook_vars(play, vars)
+ play_vars_files = self._get_playbook_vars_files(play, vars_files)
+ inc_vars, inc_path = self._get_include_info(play, basedir, play_vars)
+ play_vars.update(inc_vars)
+
+ included_path = utils.path_dwim(basedir, template(basedir, inc_path, play_vars))
+ (plays, basedirs) = self._load_playbook_from_file(included_path, vars=play_vars, vars_files=play_vars_files)
+ for p in plays:
+ # support for parameterized play includes works by passing
+ # those variables along to the subservient play
+ p['vars'] = self._extend_play_vars(p, play_vars)
+ # now add in the vars_files
+ p['vars_files'] = utils.list_union(p.get('vars_files', []), play_vars_files)
+
+ accumulated_plays.extend(plays)
+ play_basedirs.extend(basedirs)
+
+ else:
+
+ # this is a normal (non-included play)
+ accumulated_plays.append(play)
+ play_basedirs.append(basedir)
+
+ return (accumulated_plays, play_basedirs)
+
+ # *****************************************************
+
+ def run(self):
+ ''' run all patterns in the playbook '''
+ plays = []
+ matched_tags_all = set()
+ unmatched_tags_all = set()
+
+ # loop through all patterns and run them
+ self.callbacks.on_start()
+ for (play_ds, play_basedir) in zip(self.playbook, self.play_basedirs):
+ play = Play(self, play_ds, play_basedir, vault_password=self.vault_password)
+ assert play is not None
+
+ matched_tags, unmatched_tags = play.compare_tags(self.only_tags)
+
+ matched_tags_all = matched_tags_all | matched_tags
+ unmatched_tags_all = unmatched_tags_all | unmatched_tags
+
+ # Remove tasks we wish to skip
+ matched_tags = matched_tags - set(self.skip_tags)
+
+ # if we have matched_tags, the play must be run.
+ # if the play contains no tasks, assume we just want to gather facts
+ # in this case there are actually 3 meta tasks (handler flushes) not 0
+ # tasks, so that's why there's a check against 3
+ if (len(matched_tags) > 0 or len(play.tasks()) == 3):
+ plays.append(play)
+
+ # if the playbook is invoked with --tags or --skip-tags that don't
+ # exist at all in the playbooks then we need to raise an error so that
+ # the user can correct the arguments.
+ unknown_tags = ((set(self.only_tags) | set(self.skip_tags)) -
+ (matched_tags_all | unmatched_tags_all))
+
+ for t in RESERVED_TAGS:
+ unknown_tags.discard(t)
+
+ if len(unknown_tags) > 0:
+ for t in RESERVED_TAGS:
+ unmatched_tags_all.discard(t)
+ msg = 'tag(s) not found in playbook: %s. possible values: %s'
+ unknown = ','.join(sorted(unknown_tags))
+ unmatched = ','.join(sorted(unmatched_tags_all))
+ raise errors.AnsibleError(msg % (unknown, unmatched))
+
+ for play in plays:
+ ansible.callbacks.set_play(self.callbacks, play)
+ ansible.callbacks.set_play(self.runner_callbacks, play)
+ if not self._run_play(play):
+ break
+
+ ansible.callbacks.set_play(self.callbacks, None)
+ ansible.callbacks.set_play(self.runner_callbacks, None)
+
+ # summarize the results
+ results = {}
+ for host in self.stats.processed.keys():
+ results[host] = self.stats.summarize(host)
+ return results
+
+ # *****************************************************
+
+ def _async_poll(self, poller, async_seconds, async_poll_interval):
+ ''' launch an async job, if poll_interval is set, wait for completion '''
+
+ results = poller.wait(async_seconds, async_poll_interval)
+
+ # mark any hosts that are still listed as started as failed
+ # since these likely got killed by async_wrapper
+ for host in poller.hosts_to_poll:
+ reason = { 'failed' : 1, 'rc' : None, 'msg' : 'timed out' }
+ self.runner_callbacks.on_async_failed(host, reason, poller.runner.vars_cache[host]['ansible_job_id'])
+ results['contacted'][host] = reason
+
+ return results
+
+ # *****************************************************
+
+ def _trim_unavailable_hosts(self, hostlist=[], keep_failed=False):
+ ''' returns a list of hosts that haven't failed and aren't dark '''
+
+ return [ h for h in hostlist if (keep_failed or h not in self.stats.failures) and (h not in self.stats.dark)]
+
+ # *****************************************************
+
+ def _run_task_internal(self, task, include_failed=False):
+ ''' run a particular module step in a playbook '''
+
+ hosts = self._trim_unavailable_hosts(self.inventory.list_hosts(task.play._play_hosts), keep_failed=include_failed)
+ self.inventory.restrict_to(hosts)
+
+ runner = ansible.runner.Runner(
+ pattern=task.play.hosts,
+ inventory=self.inventory,
+ module_name=task.module_name,
+ module_args=task.module_args,
+ forks=self.forks,
+ remote_pass=self.remote_pass,
+ module_path=self.module_path,
+ timeout=self.timeout,
+ remote_user=task.remote_user,
+ remote_port=task.play.remote_port,
+ module_vars=task.module_vars,
+ play_vars=task.play_vars,
+ play_file_vars=task.play_file_vars,
+ role_vars=task.role_vars,
+ role_params=task.role_params,
+ default_vars=task.default_vars,
+ extra_vars=self.extra_vars,
+ private_key_file=self.private_key_file,
+ setup_cache=self.SETUP_CACHE,
+ vars_cache=self.VARS_CACHE,
+ basedir=task.play.basedir,
+ conditional=task.when,
+ callbacks=self.runner_callbacks,
+ transport=task.transport,
+ is_playbook=True,
+ check=self.check,
+ diff=self.diff,
+ environment=task.environment,
+ complex_args=task.args,
+ accelerate=task.play.accelerate,
+ accelerate_port=task.play.accelerate_port,
+ accelerate_ipv6=task.play.accelerate_ipv6,
+ error_on_undefined_vars=C.DEFAULT_UNDEFINED_VAR_BEHAVIOR,
+ vault_pass = self.vault_password,
+ run_hosts=hosts,
+ no_log=task.no_log,
+ run_once=task.run_once,
+ become=task.become,
+ become_method=task.become_method,
+ become_user=task.become_user,
+ become_pass=task.become_pass,
+ )
+
+ runner.module_vars.update({'play_hosts': hosts})
+ runner.module_vars.update({'ansible_version': self._ansible_version})
+
+ if task.async_seconds == 0:
+ results = runner.run()
+ else:
+ results, poller = runner.run_async(task.async_seconds)
+ self.stats.compute(results)
+ if task.async_poll_interval > 0:
+ # if not polling, playbook requested fire and forget, so don't poll
+ results = self._async_poll(poller, task.async_seconds, task.async_poll_interval)
+ else:
+ for (host, res) in results.get('contacted', {}).iteritems():
+ self.runner_callbacks.on_async_ok(host, res, poller.runner.vars_cache[host]['ansible_job_id'])
+
+ contacted = results.get('contacted',{})
+ dark = results.get('dark', {})
+
+ self.inventory.lift_restriction()
+
+ if len(contacted.keys()) == 0 and len(dark.keys()) == 0:
+ return None
+
+ return results
+
+ # *****************************************************
+
+ def _run_task(self, play, task, is_handler):
+ ''' run a single task in the playbook and recursively run any subtasks. '''
+
+ ansible.callbacks.set_task(self.callbacks, task)
+ ansible.callbacks.set_task(self.runner_callbacks, task)
+
+ if task.role_name:
+ name = '%s | %s' % (task.role_name, task.name)
+ else:
+ name = task.name
+
+ try:
+ # v1 HACK: we don't have enough information to template many names
+ # at this point. Rather than making this work for all cases in
+ # v1, just make this degrade gracefully. Will fix in v2
+ name = template(play.basedir, name, task.module_vars, lookup_fatal=False, filter_fatal=False)
+ except:
+ pass
+
+ self.callbacks.on_task_start(name, is_handler)
+ if hasattr(self.callbacks, 'skip_task') and self.callbacks.skip_task:
+ ansible.callbacks.set_task(self.callbacks, None)
+ ansible.callbacks.set_task(self.runner_callbacks, None)
+ return True
+
+ # template ignore_errors
+ # TODO: Is this needed here? cond is templated again in
+ # check_conditional after some more manipulations.
+ # TODO: we don't have enough information here to template cond either
+ # (see note on templating name above)
+ cond = template(play.basedir, task.ignore_errors, task.module_vars, expand_lists=False)
+ task.ignore_errors = utils.check_conditional(cond, play.basedir, task.module_vars, fail_on_undefined=C.DEFAULT_UNDEFINED_VAR_BEHAVIOR)
+
+ # load up an appropriate ansible runner to run the task in parallel
+ include_failed = is_handler and play.force_handlers
+ results = self._run_task_internal(task, include_failed=include_failed)
+
+ # if no hosts are matched, carry on
+ hosts_remaining = True
+ if results is None:
+ hosts_remaining = False
+ results = {}
+
+ contacted = results.get('contacted', {})
+ self.stats.compute(results, ignore_errors=task.ignore_errors)
+
+ def _register_play_vars(host, result):
+ # when 'register' is used, persist the result in the vars cache
+ # rather than the setup cache - vars should be transient between
+ # playbook executions
+ if 'stdout' in result and 'stdout_lines' not in result:
+ result['stdout_lines'] = result['stdout'].splitlines()
+ utils.update_hash(self.VARS_CACHE, host, {task.register: result})
+
+ def _save_play_facts(host, facts):
+ # saves play facts in SETUP_CACHE, unless the module executed was
+ # set_fact, in which case we add them to the VARS_CACHE
+ if task.module_name in ('set_fact', 'include_vars'):
+ utils.update_hash(self.VARS_CACHE, host, facts)
+ else:
+ utils.update_hash(self.SETUP_CACHE, host, facts)
+
+ # add facts to the global setup cache
+ for host, result in contacted.iteritems():
+ if 'results' in result:
+ # task ran with_ lookup plugin, so facts are encapsulated in
+ # multiple list items in the results key
+ for res in result['results']:
+ if type(res) == dict:
+ facts = res.get('ansible_facts', {})
+ _save_play_facts(host, facts)
+ else:
+ # when facts are returned, persist them in the setup cache
+ facts = result.get('ansible_facts', {})
+ _save_play_facts(host, facts)
+
+ # if requested, save the result into the registered variable name
+ if task.register:
+ _register_play_vars(host, result)
+
+ # also have to register some failed, but ignored, tasks
+ if task.ignore_errors and task.register:
+ failed = results.get('failed', {})
+ for host, result in failed.iteritems():
+ _register_play_vars(host, result)
+
+ # flag which notify handlers need to be run
+ if len(task.notify) > 0:
+ for host, results in results.get('contacted',{}).iteritems():
+ if results.get('changed', False):
+ for handler_name in task.notify:
+ self._flag_handler(play, template(play.basedir, handler_name, task.module_vars), host)
+
+ ansible.callbacks.set_task(self.callbacks, None)
+ ansible.callbacks.set_task(self.runner_callbacks, None)
+ return hosts_remaining
+
+ # *****************************************************
+
+ def _flag_handler(self, play, handler_name, host):
+ '''
+ if a task has any notify elements, flag handlers for run
+ at end of execution cycle for hosts that have indicated
+ changes have been made
+ '''
+
+ found = False
+ for x in play.handlers():
+ if handler_name == template(play.basedir, x.name, x.module_vars):
+ found = True
+ self.callbacks.on_notify(host, x.name)
+ x.notified_by.append(host)
+ if not found:
+ raise errors.AnsibleError("change handler (%s) is not defined" % handler_name)
+
+ # *****************************************************
+
+ def _do_setup_step(self, play):
+ ''' get facts from the remote system '''
+
+ host_list = self._trim_unavailable_hosts(play._play_hosts)
+
+ if play.gather_facts is None and C.DEFAULT_GATHERING == 'smart':
+ host_list = [h for h in host_list if h not in self.SETUP_CACHE or 'module_setup' not in self.SETUP_CACHE[h]]
+ if len(host_list) == 0:
+ return {}
+ elif play.gather_facts is False or (play.gather_facts is None and C.DEFAULT_GATHERING == 'explicit'):
+ return {}
+
+ self.callbacks.on_setup()
+ self.inventory.restrict_to(host_list)
+
+ ansible.callbacks.set_task(self.callbacks, None)
+ ansible.callbacks.set_task(self.runner_callbacks, None)
+
+ # push any variables down to the system
+ setup_results = ansible.runner.Runner(
+ basedir=self.basedir,
+ pattern=play.hosts,
+ module_name='setup',
+ module_args={},
+ inventory=self.inventory,
+ forks=self.forks,
+ module_path=self.module_path,
+ timeout=self.timeout,
+ remote_user=play.remote_user,
+ remote_pass=self.remote_pass,
+ remote_port=play.remote_port,
+ private_key_file=self.private_key_file,
+ setup_cache=self.SETUP_CACHE,
+ vars_cache=self.VARS_CACHE,
+ callbacks=self.runner_callbacks,
+ become=play.become,
+ become_method=play.become_method,
+ become_user=play.become_user,
+ become_pass=self.become_pass,
+ vault_pass=self.vault_password,
+ transport=play.transport,
+ is_playbook=True,
+ module_vars=play.vars,
+ play_vars=play.vars,
+ play_file_vars=play.vars_file_vars,
+ role_vars=play.role_vars,
+ default_vars=play.default_vars,
+ check=self.check,
+ diff=self.diff,
+ accelerate=play.accelerate,
+ accelerate_port=play.accelerate_port,
+ ).run()
+ self.stats.compute(setup_results, setup=True)
+
+ self.inventory.lift_restriction()
+
+ # now for each result, load into the setup cache so we can
+ # let runner template out future commands
+ setup_ok = setup_results.get('contacted', {})
+ for (host, result) in setup_ok.iteritems():
+ utils.update_hash(self.SETUP_CACHE, host, {'module_setup': True})
+ utils.update_hash(self.SETUP_CACHE, host, result.get('ansible_facts', {}))
+ return setup_results
+
+ # *****************************************************
+
+
+ def generate_retry_inventory(self, replay_hosts):
+ '''
+ called by /usr/bin/ansible when a playbook run fails. It generates an inventory
+ that allows re-running on ONLY the failed hosts. This may duplicate some
+ variable information in group_vars/host_vars but that is ok, and expected.
+ '''
+
+ buf = StringIO.StringIO()
+ for x in replay_hosts:
+ buf.write("%s\n" % x)
+ basedir = C.shell_expand_path(C.RETRY_FILES_SAVE_PATH)
+ filename = "%s.retry" % os.path.basename(self.filename)
+ filename = filename.replace(".yml","")
+ filename = os.path.join(basedir, filename)
+
+ try:
+ if not os.path.exists(basedir):
+ os.makedirs(basedir)
+
+ fd = open(filename, 'w')
+ fd.write(buf.getvalue())
+ fd.close()
+ except:
+ ansible.callbacks.display(
+ "\nERROR: could not create retry file. Check the value of \n"
+ + "the configuration variable 'retry_files_save_path' or set \n"
+ + "'retry_files_enabled' to False to avoid this message.\n",
+ color='red'
+ )
+ return None
+
+ return filename
+
+ # *****************************************************
+ def tasks_to_run_in_play(self, play):
+
+ tasks = []
+
+ for task in play.tasks():
+ # only run the task if the requested tags match or has 'always' tag
+ u = set(['untagged'])
+ task_set = set(task.tags)
+
+ if 'always' in task.tags:
+ should_run = True
+ else:
+ if 'all' in self.only_tags:
+ should_run = True
+ else:
+ should_run = False
+ if 'tagged' in self.only_tags:
+ if task_set != u:
+ should_run = True
+ elif 'untagged' in self.only_tags:
+ if task_set == u:
+ should_run = True
+ else:
+ if task_set.intersection(self.only_tags):
+ should_run = True
+
+ # Check for tags that we need to skip
+ if 'all' in self.skip_tags:
+ should_run = False
+ else:
+ if 'tagged' in self.skip_tags:
+ if task_set != u:
+ should_run = False
+ elif 'untagged' in self.skip_tags:
+ if task_set == u:
+ should_run = False
+ else:
+ if should_run:
+ if task_set.intersection(self.skip_tags):
+ should_run = False
+
+ if should_run:
+ tasks.append(task)
+
+ return tasks
+
+ # *****************************************************
+ def _run_play(self, play):
+ ''' run a list of tasks for a given pattern, in order '''
+
+ self.callbacks.on_play_start(play.name)
+ # Get the hosts for this play
+ play._play_hosts = self.inventory.list_hosts(play.hosts)
+ # if no hosts matches this play, drop out
+ if not play._play_hosts:
+ self.callbacks.on_no_hosts_matched()
+ return True
+
+ # get facts from system
+ self._do_setup_step(play)
+
+ # now with that data, handle contentional variable file imports!
+ all_hosts = self._trim_unavailable_hosts(play._play_hosts)
+ play.update_vars_files(all_hosts, vault_password=self.vault_password)
+ hosts_count = len(all_hosts)
+
+ if play.serial.endswith("%"):
+
+ # This is a percentage, so calculate it based on the
+ # number of hosts
+ serial_pct = int(play.serial.replace("%",""))
+ serial = int((serial_pct/100.0) * len(all_hosts))
+
+ # Ensure that no matter how small the percentage, serial
+ # can never fall below 1, so that things actually happen
+ serial = max(serial, 1)
+ else:
+ serial = int(play.serial)
+
+ serialized_batch = []
+ if serial <= 0:
+ serialized_batch = [all_hosts]
+ else:
+ # do N forks all the way through before moving to next
+ while len(all_hosts) > 0:
+ play_hosts = []
+ for x in range(serial):
+ if len(all_hosts) > 0:
+ play_hosts.append(all_hosts.pop(0))
+ serialized_batch.append(play_hosts)
+
+ task_errors = False
+ for on_hosts in serialized_batch:
+
+ # restrict the play to just the hosts we have in our on_hosts block that are
+ # available.
+ play._play_hosts = self._trim_unavailable_hosts(on_hosts)
+ self.inventory.also_restrict_to(on_hosts)
+
+ for task in self.tasks_to_run_in_play(play):
+
+ if task.meta is not None:
+ # meta tasks can force handlers to run mid-play
+ if task.meta == 'flush_handlers':
+ self.run_handlers(play)
+
+ # skip calling the handler till the play is finished
+ continue
+
+ if not self._run_task(play, task, False):
+ # whether no hosts matched is fatal or not depends if it was on the initial step.
+ # if we got exactly no hosts on the first step (setup!) then the host group
+ # just didn't match anything and that's ok
+ return False
+
+ # Get a new list of what hosts are left as available, the ones that
+ # did not go fail/dark during the task
+ host_list = self._trim_unavailable_hosts(play._play_hosts)
+
+ # Set max_fail_pct to 0, So if any hosts fails, bail out
+ if task.any_errors_fatal and len(host_list) < hosts_count:
+ play.max_fail_pct = 0
+
+ # If threshold for max nodes failed is exceeded, bail out.
+ if play.serial > 0:
+ # if serial is set, we need to shorten the size of host_count
+ play_count = len(play._play_hosts)
+ if (play_count - len(host_list)) > int((play.max_fail_pct)/100.0 * play_count):
+ host_list = None
+ else:
+ if (hosts_count - len(host_list)) > int((play.max_fail_pct)/100.0 * hosts_count):
+ host_list = None
+
+ # if no hosts remain, drop out
+ if not host_list:
+ if play.force_handlers:
+ task_errors = True
+ break
+ else:
+ self.callbacks.on_no_hosts_remaining()
+ return False
+
+ # lift restrictions after each play finishes
+ self.inventory.lift_also_restriction()
+
+ if task_errors and not play.force_handlers:
+ # if there were failed tasks and handler execution
+ # is not forced, quit the play with an error
+ return False
+ else:
+ # no errors, go ahead and execute all handlers
+ if not self.run_handlers(play):
+ return False
+
+ return True
+
+
+ def run_handlers(self, play):
+ on_hosts = play._play_hosts
+ hosts_count = len(on_hosts)
+ for task in play.tasks():
+ if task.meta is not None:
+
+ fired_names = {}
+ for handler in play.handlers():
+ if len(handler.notified_by) > 0:
+ self.inventory.restrict_to(handler.notified_by)
+
+ # Resolve the variables first
+ handler_name = template(play.basedir, handler.name, handler.module_vars)
+ if handler_name not in fired_names:
+ self._run_task(play, handler, True)
+ # prevent duplicate handler includes from running more than once
+ fired_names[handler_name] = 1
+
+ host_list = self._trim_unavailable_hosts(play._play_hosts)
+ if handler.any_errors_fatal and len(host_list) < hosts_count:
+ play.max_fail_pct = 0
+ if (hosts_count - len(host_list)) > int((play.max_fail_pct)/100.0 * hosts_count):
+ host_list = None
+ if not host_list and not play.force_handlers:
+ self.callbacks.on_no_hosts_remaining()
+ return False
+
+ self.inventory.lift_restriction()
+ new_list = handler.notified_by[:]
+ for host in handler.notified_by:
+ if host in on_hosts:
+ while host in new_list:
+ new_list.remove(host)
+ handler.notified_by = new_list
+
+ continue
+
+ return True
diff --git a/v1/ansible/playbook/play.py b/v1/ansible/playbook/play.py
new file mode 100644
index 0000000000..6ee85e0bf4
--- /dev/null
+++ b/v1/ansible/playbook/play.py
@@ -0,0 +1,949 @@
+# (c) 2012-2014, Michael DeHaan
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see .
+
+#############################################
+
+from ansible.utils.template import template
+from ansible import utils
+from ansible import errors
+from ansible.playbook.task import Task
+from ansible.module_utils.splitter import split_args, unquote
+import ansible.constants as C
+import pipes
+import shlex
+import os
+import sys
+import uuid
+
+
+class Play(object):
+
+ _pb_common = [
+ 'accelerate', 'accelerate_ipv6', 'accelerate_port', 'any_errors_fatal', 'become',
+ 'become_method', 'become_user', 'environment', 'force_handlers', 'gather_facts',
+ 'handlers', 'hosts', 'name', 'no_log', 'remote_user', 'roles', 'serial', 'su',
+ 'su_user', 'sudo', 'sudo_user', 'tags', 'vars', 'vars_files', 'vars_prompt',
+ 'vault_password',
+ ]
+
+ __slots__ = _pb_common + [
+ '_ds', '_handlers', '_play_hosts', '_tasks', 'any_errors_fatal', 'basedir',
+ 'default_vars', 'included_roles', 'max_fail_pct', 'playbook', 'remote_port',
+ 'role_vars', 'transport', 'vars_file_vars',
+ ]
+
+ # to catch typos and so forth -- these are userland names
+ # and don't line up 1:1 with how they are stored
+ VALID_KEYS = frozenset(_pb_common + [
+ 'connection', 'include', 'max_fail_percentage', 'port', 'post_tasks',
+ 'pre_tasks', 'role_names', 'tasks', 'user',
+ ])
+
+ # *************************************************
+
+ def __init__(self, playbook, ds, basedir, vault_password=None):
+ ''' constructor loads from a play datastructure '''
+
+ for x in ds.keys():
+ if not x in Play.VALID_KEYS:
+ raise errors.AnsibleError("%s is not a legal parameter of an Ansible Play" % x)
+
+ # allow all playbook keys to be set by --extra-vars
+ self.vars = ds.get('vars', {})
+ self.vars_prompt = ds.get('vars_prompt', {})
+ self.playbook = playbook
+ self.vars = self._get_vars()
+ self.vars_file_vars = dict() # these are vars read in from vars_files:
+ self.role_vars = dict() # these are vars read in from vars/main.yml files in roles
+ self.basedir = basedir
+ self.roles = ds.get('roles', None)
+ self.tags = ds.get('tags', None)
+ self.vault_password = vault_password
+ self.environment = ds.get('environment', {})
+
+ if self.tags is None:
+ self.tags = []
+ elif type(self.tags) in [ str, unicode ]:
+ self.tags = self.tags.split(",")
+ elif type(self.tags) != list:
+ self.tags = []
+
+ # make sure we have some special internal variables set, which
+ # we use later when loading tasks and handlers
+ load_vars = dict()
+ load_vars['playbook_dir'] = os.path.abspath(self.basedir)
+ if self.playbook.inventory.basedir() is not None:
+ load_vars['inventory_dir'] = self.playbook.inventory.basedir()
+ if self.playbook.inventory.src() is not None:
+ load_vars['inventory_file'] = self.playbook.inventory.src()
+
+ # We first load the vars files from the datastructure
+ # so we have the default variables to pass into the roles
+ self.vars_files = ds.get('vars_files', [])
+ if not isinstance(self.vars_files, list):
+ raise errors.AnsibleError('vars_files must be a list')
+ processed_vars_files = self._update_vars_files_for_host(None)
+
+ # now we load the roles into the datastructure
+ self.included_roles = []
+ ds = self._load_roles(self.roles, ds)
+
+ # and finally re-process the vars files as they may have been updated
+ # by the included roles, but exclude any which have been processed
+ self.vars_files = utils.list_difference(ds.get('vars_files', []), processed_vars_files)
+ if not isinstance(self.vars_files, list):
+ raise errors.AnsibleError('vars_files must be a list')
+
+ self._update_vars_files_for_host(None)
+
+ # template everything to be efficient, but do not pre-mature template
+ # tasks/handlers as they may have inventory scope overrides. We also
+ # create a set of temporary variables for templating, so we don't
+ # trample on the existing vars structures
+ _tasks = ds.pop('tasks', [])
+ _handlers = ds.pop('handlers', [])
+
+ temp_vars = utils.combine_vars(self.vars, self.vars_file_vars)
+ temp_vars = utils.combine_vars(temp_vars, self.playbook.extra_vars)
+
+ try:
+ ds = template(basedir, ds, temp_vars)
+ except errors.AnsibleError, e:
+ utils.warning("non fatal error while trying to template play variables: %s" % (str(e)))
+
+ ds['tasks'] = _tasks
+ ds['handlers'] = _handlers
+
+ self._ds = ds
+
+ hosts = ds.get('hosts')
+ if hosts is None:
+ raise errors.AnsibleError('hosts declaration is required')
+ elif isinstance(hosts, list):
+ try:
+ hosts = ';'.join(hosts)
+ except TypeError,e:
+ raise errors.AnsibleError('improper host declaration: %s' % str(e))
+
+ self.serial = str(ds.get('serial', 0))
+ self.hosts = hosts
+ self.name = ds.get('name', self.hosts)
+ self._tasks = ds.get('tasks', [])
+ self._handlers = ds.get('handlers', [])
+ self.remote_user = ds.get('remote_user', ds.get('user', self.playbook.remote_user))
+ self.remote_port = ds.get('port', self.playbook.remote_port)
+ self.transport = ds.get('connection', self.playbook.transport)
+ self.remote_port = self.remote_port
+ self.any_errors_fatal = utils.boolean(ds.get('any_errors_fatal', 'false'))
+ self.accelerate = utils.boolean(ds.get('accelerate', 'false'))
+ self.accelerate_port = ds.get('accelerate_port', None)
+ self.accelerate_ipv6 = ds.get('accelerate_ipv6', False)
+ self.max_fail_pct = int(ds.get('max_fail_percentage', 100))
+ self.no_log = utils.boolean(ds.get('no_log', 'false'))
+ self.force_handlers = utils.boolean(ds.get('force_handlers', self.playbook.force_handlers))
+
+ # Fail out if user specifies conflicting privilege escalations
+ if (ds.get('become') or ds.get('become_user')) and (ds.get('sudo') or ds.get('sudo_user')):
+ raise errors.AnsibleError('sudo params ("become", "become_user") and su params ("sudo", "sudo_user") cannot be used together')
+ if (ds.get('become') or ds.get('become_user')) and (ds.get('su') or ds.get('su_user')):
+ raise errors.AnsibleError('sudo params ("become", "become_user") and su params ("su", "su_user") cannot be used together')
+ if (ds.get('sudo') or ds.get('sudo_user')) and (ds.get('su') or ds.get('su_user')):
+ raise errors.AnsibleError('sudo params ("sudo", "sudo_user") and su params ("su", "su_user") cannot be used together')
+
+ # become settings are inherited and updated normally
+ self.become = ds.get('become', self.playbook.become)
+ self.become_method = ds.get('become_method', self.playbook.become_method)
+ self.become_user = ds.get('become_user', self.playbook.become_user)
+
+ # Make sure current play settings are reflected in become fields
+ if 'sudo' in ds:
+ self.become=ds['sudo']
+ self.become_method='sudo'
+ if 'sudo_user' in ds:
+ self.become_user=ds['sudo_user']
+ elif 'su' in ds:
+ self.become=True
+ self.become=ds['su']
+ self.become_method='su'
+ if 'su_user' in ds:
+ self.become_user=ds['su_user']
+
+ # gather_facts is not a simple boolean, as None means that a 'smart'
+ # fact gathering mode will be used, so we need to be careful here as
+ # calling utils.boolean(None) returns False
+ self.gather_facts = ds.get('gather_facts', None)
+ if self.gather_facts is not None:
+ self.gather_facts = utils.boolean(self.gather_facts)
+
+ load_vars['role_names'] = ds.get('role_names', [])
+
+ self._tasks = self._load_tasks(self._ds.get('tasks', []), load_vars)
+ self._handlers = self._load_tasks(self._ds.get('handlers', []), load_vars)
+
+ # apply any missing tags to role tasks
+ self._late_merge_role_tags()
+
+ # place holder for the discovered hosts to be used in this play
+ self._play_hosts = None
+
+ # *************************************************
+
+ def _get_role_path(self, role):
+ """
+ Returns the path on disk to the directory containing
+ the role directories like tasks, templates, etc. Also
+ returns any variables that were included with the role
+ """
+ orig_path = template(self.basedir,role,self.vars)
+
+ role_vars = {}
+ if type(orig_path) == dict:
+ # what, not a path?
+ role_name = orig_path.get('role', None)
+ if role_name is None:
+ raise errors.AnsibleError("expected a role name in dictionary: %s" % orig_path)
+ role_vars = orig_path
+ else:
+ role_name = utils.role_spec_parse(orig_path)["name"]
+
+ role_path = None
+
+ possible_paths = [
+ utils.path_dwim(self.basedir, os.path.join('roles', role_name)),
+ utils.path_dwim(self.basedir, role_name)
+ ]
+
+ if C.DEFAULT_ROLES_PATH:
+ search_locations = C.DEFAULT_ROLES_PATH.split(os.pathsep)
+ for loc in search_locations:
+ loc = os.path.expanduser(loc)
+ possible_paths.append(utils.path_dwim(loc, role_name))
+
+ for path_option in possible_paths:
+ if os.path.isdir(path_option):
+ role_path = path_option
+ break
+
+ if role_path is None:
+ raise errors.AnsibleError("cannot find role in %s" % " or ".join(possible_paths))
+
+ return (role_path, role_vars)
+
+ def _build_role_dependencies(self, roles, dep_stack, passed_vars={}, level=0):
+ # this number is arbitrary, but it seems sane
+ if level > 20:
+ raise errors.AnsibleError("too many levels of recursion while resolving role dependencies")
+ for role in roles:
+ role_path,role_vars = self._get_role_path(role)
+
+ # save just the role params for this role, which exclude the special
+ # keywords 'role', 'tags', and 'when'.
+ role_params = role_vars.copy()
+ for item in ('role', 'tags', 'when'):
+ if item in role_params:
+ del role_params[item]
+
+ role_vars = utils.combine_vars(passed_vars, role_vars)
+
+ vars = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(role_path, 'vars')))
+ vars_data = {}
+ if os.path.isfile(vars):
+ vars_data = utils.parse_yaml_from_file(vars, vault_password=self.vault_password)
+ if vars_data:
+ if not isinstance(vars_data, dict):
+ raise errors.AnsibleError("vars from '%s' are not a dict" % vars)
+ role_vars = utils.combine_vars(vars_data, role_vars)
+
+ defaults = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(role_path, 'defaults')))
+ defaults_data = {}
+ if os.path.isfile(defaults):
+ defaults_data = utils.parse_yaml_from_file(defaults, vault_password=self.vault_password)
+
+ # the meta directory contains the yaml that should
+ # hold the list of dependencies (if any)
+ meta = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(role_path, 'meta')))
+ if os.path.isfile(meta):
+ data = utils.parse_yaml_from_file(meta, vault_password=self.vault_password)
+ if data:
+ dependencies = data.get('dependencies',[])
+ if dependencies is None:
+ dependencies = []
+ for dep in dependencies:
+ allow_dupes = False
+ (dep_path,dep_vars) = self._get_role_path(dep)
+
+ # save the dep params, just as we did above
+ dep_params = dep_vars.copy()
+ for item in ('role', 'tags', 'when'):
+ if item in dep_params:
+ del dep_params[item]
+
+ meta = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(dep_path, 'meta')))
+ if os.path.isfile(meta):
+ meta_data = utils.parse_yaml_from_file(meta, vault_password=self.vault_password)
+ if meta_data:
+ allow_dupes = utils.boolean(meta_data.get('allow_duplicates',''))
+
+ # if any tags were specified as role/dep variables, merge
+ # them into the current dep_vars so they're passed on to any
+ # further dependencies too, and so we only have one place
+ # (dep_vars) to look for tags going forward
+ def __merge_tags(var_obj):
+ old_tags = dep_vars.get('tags', [])
+ if isinstance(old_tags, basestring):
+ old_tags = [old_tags, ]
+ if isinstance(var_obj, dict):
+ new_tags = var_obj.get('tags', [])
+ if isinstance(new_tags, basestring):
+ new_tags = [new_tags, ]
+ else:
+ new_tags = []
+ return list(set(old_tags).union(set(new_tags)))
+
+ dep_vars['tags'] = __merge_tags(role_vars)
+ dep_vars['tags'] = __merge_tags(passed_vars)
+
+ # if tags are set from this role, merge them
+ # into the tags list for the dependent role
+ if "tags" in passed_vars:
+ for included_role_dep in dep_stack:
+ included_dep_name = included_role_dep[0]
+ included_dep_vars = included_role_dep[2]
+ if included_dep_name == dep:
+ if "tags" in included_dep_vars:
+ included_dep_vars["tags"] = list(set(included_dep_vars["tags"]).union(set(passed_vars["tags"])))
+ else:
+ included_dep_vars["tags"] = passed_vars["tags"][:]
+
+ dep_vars = utils.combine_vars(passed_vars, dep_vars)
+ dep_vars = utils.combine_vars(role_vars, dep_vars)
+
+ vars = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(dep_path, 'vars')))
+ vars_data = {}
+ if os.path.isfile(vars):
+ vars_data = utils.parse_yaml_from_file(vars, vault_password=self.vault_password)
+ if vars_data:
+ dep_vars = utils.combine_vars(dep_vars, vars_data)
+ pass
+
+ defaults = self._resolve_main(utils.path_dwim(self.basedir, os.path.join(dep_path, 'defaults')))
+ dep_defaults_data = {}
+ if os.path.isfile(defaults):
+ dep_defaults_data = utils.parse_yaml_from_file(defaults, vault_password=self.vault_password)
+ if 'role' in dep_vars:
+ del dep_vars['role']
+
+ if not allow_dupes:
+ if dep in self.included_roles:
+ # skip back to the top, since we don't want to
+ # do anything else with this role
+ continue
+ else:
+ self.included_roles.append(dep)
+
+ def _merge_conditional(cur_conditionals, new_conditionals):
+ if isinstance(new_conditionals, (basestring, bool)):
+ cur_conditionals.append(new_conditionals)
+ elif isinstance(new_conditionals, list):
+ cur_conditionals.extend(new_conditionals)
+
+ # pass along conditionals from roles to dep roles
+ passed_when = passed_vars.get('when')
+ role_when = role_vars.get('when')
+ dep_when = dep_vars.get('when')
+
+ tmpcond = []
+ _merge_conditional(tmpcond, passed_when)
+ _merge_conditional(tmpcond, role_when)
+ _merge_conditional(tmpcond, dep_when)
+
+ if len(tmpcond) > 0:
+ dep_vars['when'] = tmpcond
+
+ self._build_role_dependencies([dep], dep_stack, passed_vars=dep_vars, level=level+1)
+ dep_stack.append([dep, dep_path, dep_vars, dep_params, dep_defaults_data])
+
+ # only add the current role when we're at the top level,
+ # otherwise we'll end up in a recursive loop
+ if level == 0:
+ self.included_roles.append(role)
+ dep_stack.append([role, role_path, role_vars, role_params, defaults_data])
+ return dep_stack
+
+ def _load_role_vars_files(self, vars_files):
+ # process variables stored in vars/main.yml files
+ role_vars = {}
+ for filename in vars_files:
+ if os.path.exists(filename):
+ new_vars = utils.parse_yaml_from_file(filename, vault_password=self.vault_password)
+ if new_vars:
+ if type(new_vars) != dict:
+ raise errors.AnsibleError("%s must be stored as dictionary/hash: %s" % (filename, type(new_vars)))
+ role_vars = utils.combine_vars(role_vars, new_vars)
+
+ return role_vars
+
+ def _load_role_defaults(self, defaults_files):
+ # process default variables
+ default_vars = {}
+ for filename in defaults_files:
+ if os.path.exists(filename):
+ new_default_vars = utils.parse_yaml_from_file(filename, vault_password=self.vault_password)
+ if new_default_vars:
+ if type(new_default_vars) != dict:
+ raise errors.AnsibleError("%s must be stored as dictionary/hash: %s" % (filename, type(new_default_vars)))
+ default_vars = utils.combine_vars(default_vars, new_default_vars)
+
+ return default_vars
+
+ def _load_roles(self, roles, ds):
+ # a role is a name that auto-includes the following if they exist
+ # /tasks/main.yml
+ # /handlers/main.yml
+ # /vars/main.yml
+ # /library
+ # and it auto-extends tasks/handlers/vars_files/module paths as appropriate if found
+
+ if roles is None:
+ roles = []
+ if type(roles) != list:
+ raise errors.AnsibleError("value of 'roles:' must be a list")
+
+ new_tasks = []
+ new_handlers = []
+ role_vars_files = []
+ defaults_files = []
+
+ pre_tasks = ds.get('pre_tasks', None)
+ if type(pre_tasks) != list:
+ pre_tasks = []
+ for x in pre_tasks:
+ new_tasks.append(x)
+
+ # flush handlers after pre_tasks
+ new_tasks.append(dict(meta='flush_handlers'))
+
+ roles = self._build_role_dependencies(roles, [], {})
+
+ # give each role an uuid and
+ # make role_path available as variable to the task
+ for idx, val in enumerate(roles):
+ this_uuid = str(uuid.uuid4())
+ roles[idx][-3]['role_uuid'] = this_uuid
+ roles[idx][-3]['role_path'] = roles[idx][1]
+
+ role_names = []
+
+ for (role, role_path, role_vars, role_params, default_vars) in roles:
+ # special vars must be extracted from the dict to the included tasks
+ special_keys = [ "sudo", "sudo_user", "when", "with_items", "su", "su_user", "become", "become_user" ]
+ special_vars = {}
+ for k in special_keys:
+ if k in role_vars:
+ special_vars[k] = role_vars[k]
+
+ task_basepath = utils.path_dwim(self.basedir, os.path.join(role_path, 'tasks'))
+ handler_basepath = utils.path_dwim(self.basedir, os.path.join(role_path, 'handlers'))
+ vars_basepath = utils.path_dwim(self.basedir, os.path.join(role_path, 'vars'))
+ meta_basepath = utils.path_dwim(self.basedir, os.path.join(role_path, 'meta'))
+ defaults_basepath = utils.path_dwim(self.basedir, os.path.join(role_path, 'defaults'))
+
+ task = self._resolve_main(task_basepath)
+ handler = self._resolve_main(handler_basepath)
+ vars_file = self._resolve_main(vars_basepath)
+ meta_file = self._resolve_main(meta_basepath)
+ defaults_file = self._resolve_main(defaults_basepath)
+
+ library = utils.path_dwim(self.basedir, os.path.join(role_path, 'library'))
+
+ missing = lambda f: not os.path.isfile(f)
+ if missing(task) and missing(handler) and missing(vars_file) and missing(defaults_file) and missing(meta_file) and not os.path.isdir(library):
+ raise errors.AnsibleError("found role at %s, but cannot find %s or %s or %s or %s or %s or %s" % (role_path, task, handler, vars_file, defaults_file, meta_file, library))
+
+ if isinstance(role, dict):
+ role_name = role['role']
+ else:
+ role_name = utils.role_spec_parse(role)["name"]
+
+ role_names.append(role_name)
+ if os.path.isfile(task):
+ nt = dict(include=pipes.quote(task), vars=role_vars, role_params=role_params, default_vars=default_vars, role_name=role_name)
+ for k in special_keys:
+ if k in special_vars:
+ nt[k] = special_vars[k]
+ new_tasks.append(nt)
+ if os.path.isfile(handler):
+ nt = dict(include=pipes.quote(handler), vars=role_vars, role_params=role_params, role_name=role_name)
+ for k in special_keys:
+ if k in special_vars:
+ nt[k] = special_vars[k]
+ new_handlers.append(nt)
+ if os.path.isfile(vars_file):
+ role_vars_files.append(vars_file)
+ if os.path.isfile(defaults_file):
+ defaults_files.append(defaults_file)
+ if os.path.isdir(library):
+ utils.plugins.module_finder.add_directory(library)
+
+ tasks = ds.get('tasks', None)
+ post_tasks = ds.get('post_tasks', None)
+ handlers = ds.get('handlers', None)
+ vars_files = ds.get('vars_files', None)
+
+ if type(tasks) != list:
+ tasks = []
+ if type(handlers) != list:
+ handlers = []
+ if type(vars_files) != list:
+ vars_files = []
+ if type(post_tasks) != list:
+ post_tasks = []
+
+ new_tasks.extend(tasks)
+ # flush handlers after tasks + role tasks
+ new_tasks.append(dict(meta='flush_handlers'))
+ new_tasks.extend(post_tasks)
+ # flush handlers after post tasks
+ new_tasks.append(dict(meta='flush_handlers'))
+
+ new_handlers.extend(handlers)
+
+ ds['tasks'] = new_tasks
+ ds['handlers'] = new_handlers
+ ds['role_names'] = role_names
+
+ self.role_vars = self._load_role_vars_files(role_vars_files)
+ self.default_vars = self._load_role_defaults(defaults_files)
+
+ return ds
+
+ # *************************************************
+
+ def _resolve_main(self, basepath):
+ ''' flexibly handle variations in main filenames '''
+ # these filenames are acceptable:
+ mains = (
+ os.path.join(basepath, 'main'),
+ os.path.join(basepath, 'main.yml'),
+ os.path.join(basepath, 'main.yaml'),
+ os.path.join(basepath, 'main.json'),
+ )
+ if sum([os.path.isfile(x) for x in mains]) > 1:
+ raise errors.AnsibleError("found multiple main files at %s, only one allowed" % (basepath))
+ else:
+ for m in mains:
+ if os.path.isfile(m):
+ return m # exactly one main file
+ return mains[0] # zero mains (we still need to return something)
+
+ # *************************************************
+
+ def _load_tasks(self, tasks, vars=None, role_params=None, default_vars=None, become_vars=None,
+ additional_conditions=None, original_file=None, role_name=None):
+ ''' handle task and handler include statements '''
+
+ results = []
+ if tasks is None:
+ # support empty handler files, and the like.
+ tasks = []
+ if additional_conditions is None:
+ additional_conditions = []
+ if vars is None:
+ vars = {}
+ if role_params is None:
+ role_params = {}
+ if default_vars is None:
+ default_vars = {}
+ if become_vars is None:
+ become_vars = {}
+
+ old_conditions = list(additional_conditions)
+
+ for x in tasks:
+
+ # prevent assigning the same conditions to each task on an include
+ included_additional_conditions = list(old_conditions)
+
+ if not isinstance(x, dict):
+ raise errors.AnsibleError("expecting dict; got: %s, error in %s" % (x, original_file))
+
+ # evaluate privilege escalation vars for current and child tasks
+ included_become_vars = {}
+ for k in ["become", "become_user", "become_method", "become_exe", "sudo", "su", "sudo_user", "su_user"]:
+ if k in x:
+ included_become_vars[k] = x[k]
+ elif k in become_vars:
+ included_become_vars[k] = become_vars[k]
+ x[k] = become_vars[k]
+
+ task_vars = vars.copy()
+ if original_file:
+ task_vars['_original_file'] = original_file
+
+ if 'meta' in x:
+ if x['meta'] == 'flush_handlers':
+ if role_name and 'role_name' not in x:
+ x['role_name'] = role_name
+ results.append(Task(self, x, module_vars=task_vars, role_name=role_name))
+ continue
+
+ if 'include' in x:
+ tokens = split_args(str(x['include']))
+ included_additional_conditions = list(additional_conditions)
+ include_vars = {}
+ for k in x:
+ if k.startswith("with_"):
+ if original_file:
+ offender = " (in %s)" % original_file
+ else:
+ offender = ""
+ utils.deprecated("include + with_items is a removed deprecated feature" + offender, "1.5", removed=True)
+ elif k.startswith("when_"):
+ utils.deprecated("\"when_:\" is a removed deprecated feature, use the simplified 'when:' conditional directly", None, removed=True)
+ elif k == 'when':
+ if isinstance(x[k], (basestring, bool)):
+ included_additional_conditions.append(x[k])
+ elif type(x[k]) is list:
+ included_additional_conditions.extend(x[k])
+ elif k in ("include", "vars", "role_params", "default_vars", "sudo", "sudo_user", "role_name", "no_log", "become", "become_user", "su", "su_user"):
+ continue
+ else:
+ include_vars[k] = x[k]
+
+ # get any role parameters specified
+ role_params = x.get('role_params', {})
+
+ # get any role default variables specified
+ default_vars = x.get('default_vars', {})
+ if not default_vars:
+ default_vars = self.default_vars
+ else:
+ default_vars = utils.combine_vars(self.default_vars, default_vars)
+
+ # append the vars defined with the include (from above)
+ # as well as the old-style 'vars' element. The old-style
+ # vars are given higher precedence here (just in case)
+ task_vars = utils.combine_vars(task_vars, include_vars)
+ if 'vars' in x:
+ task_vars = utils.combine_vars(task_vars, x['vars'])
+
+ new_role = None
+ if 'role_name' in x:
+ new_role = x['role_name']
+
+ mv = task_vars.copy()
+ for t in tokens[1:]:
+ (k,v) = t.split("=", 1)
+ v = unquote(v)
+ mv[k] = template(self.basedir, v, mv)
+ dirname = self.basedir
+ if original_file:
+ dirname = os.path.dirname(original_file)
+
+ # temp vars are used here to avoid trampling on the existing vars structures
+ temp_vars = utils.combine_vars(self.vars, self.vars_file_vars)
+ temp_vars = utils.combine_vars(temp_vars, mv)
+ temp_vars = utils.combine_vars(temp_vars, self.playbook.extra_vars)
+ include_file = template(dirname, tokens[0], temp_vars)
+ include_filename = utils.path_dwim(dirname, include_file)
+
+ data = utils.parse_yaml_from_file(include_filename, vault_password=self.vault_password)
+ if 'role_name' in x and data is not None:
+ for y in data:
+ if isinstance(y, dict) and 'include' in y:
+ y['role_name'] = new_role
+ loaded = self._load_tasks(data, mv, role_params, default_vars, included_become_vars, list(included_additional_conditions), original_file=include_filename, role_name=new_role)
+ results += loaded
+ elif type(x) == dict:
+ task = Task(
+ self, x,
+ module_vars=task_vars,
+ play_vars=self.vars,
+ play_file_vars=self.vars_file_vars,
+ role_vars=self.role_vars,
+ role_params=role_params,
+ default_vars=default_vars,
+ additional_conditions=list(additional_conditions),
+ role_name=role_name
+ )
+ results.append(task)
+ else:
+ raise Exception("unexpected task type")
+
+ for x in results:
+ if self.tags is not None:
+ x.tags.extend(self.tags)
+
+ return results
+
+ # *************************************************
+
+ def tasks(self):
+ ''' return task objects for this play '''
+ return self._tasks
+
+ def handlers(self):
+ ''' return handler objects for this play '''
+ return self._handlers
+
+ # *************************************************
+
+ def _get_vars(self):
+ ''' load the vars section from a play, accounting for all sorts of variable features
+ including loading from yaml files, prompting, and conditional includes of the first
+ file found in a list. '''
+
+ if self.vars is None:
+ self.vars = {}
+
+ if type(self.vars) not in [dict, list]:
+ raise errors.AnsibleError("'vars' section must contain only key/value pairs")
+
+ vars = {}
+
+ # translate a list of vars into a dict
+ if type(self.vars) == list:
+ for item in self.vars:
+ if getattr(item, 'items', None) is None:
+ raise errors.AnsibleError("expecting a key-value pair in 'vars' section")
+ k, v = item.items()[0]
+ vars[k] = v
+ else:
+ vars.update(self.vars)
+
+ if type(self.vars_prompt) == list:
+ for var in self.vars_prompt:
+ if not 'name' in var:
+ raise errors.AnsibleError("'vars_prompt' item is missing 'name:'")
+
+ vname = var['name']
+ prompt = var.get("prompt", vname)
+ default = var.get("default", None)
+ private = var.get("private", True)
+
+ confirm = var.get("confirm", False)
+ encrypt = var.get("encrypt", None)
+ salt_size = var.get("salt_size", None)
+ salt = var.get("salt", None)
+
+ if vname not in self.playbook.extra_vars:
+ vars[vname] = self.playbook.callbacks.on_vars_prompt(
+ vname, private, prompt, encrypt, confirm, salt_size, salt, default
+ )
+
+ elif type(self.vars_prompt) == dict:
+ for (vname, prompt) in self.vars_prompt.iteritems():
+ prompt_msg = "%s: " % prompt
+ if vname not in self.playbook.extra_vars:
+ vars[vname] = self.playbook.callbacks.on_vars_prompt(
+ varname=vname, private=False, prompt=prompt_msg, default=None
+ )
+
+ else:
+ raise errors.AnsibleError("'vars_prompt' section is malformed, see docs")
+
+ if type(self.playbook.extra_vars) == dict:
+ vars = utils.combine_vars(vars, self.playbook.extra_vars)
+
+ return vars
+
+ # *************************************************
+
+ def update_vars_files(self, hosts, vault_password=None):
+ ''' calculate vars_files, which requires that setup runs first so ansible facts can be mixed in '''
+
+ # now loop through all the hosts...
+ for h in hosts:
+ self._update_vars_files_for_host(h, vault_password=vault_password)
+
+ # *************************************************
+
+ def compare_tags(self, tags):
+ ''' given a list of tags that the user has specified, return two lists:
+ matched_tags: tags were found within the current play and match those given
+ by the user
+ unmatched_tags: tags that were found within the current play but do not match
+ any provided by the user '''
+
+ # gather all the tags in all the tasks and handlers into one list
+ # FIXME: isn't this in self.tags already?
+
+ all_tags = []
+ for task in self._tasks:
+ if not task.meta:
+ all_tags.extend(task.tags)
+ for handler in self._handlers:
+ all_tags.extend(handler.tags)
+
+ # compare the lists of tags using sets and return the matched and unmatched
+ all_tags_set = set(all_tags)
+ tags_set = set(tags)
+
+ matched_tags = all_tags_set.intersection(tags_set)
+ unmatched_tags = all_tags_set.difference(tags_set)
+
+ a = set(['always'])
+ u = set(['untagged'])
+ if 'always' in all_tags_set:
+ matched_tags = matched_tags.union(a)
+ unmatched_tags = all_tags_set.difference(a)
+
+ if 'all' in tags_set:
+ matched_tags = matched_tags.union(all_tags_set)
+ unmatched_tags = set()
+
+ if 'tagged' in tags_set:
+ matched_tags = all_tags_set.difference(u)
+ unmatched_tags = u
+
+ if 'untagged' in tags_set and 'untagged' in all_tags_set:
+ matched_tags = matched_tags.union(u)
+ unmatched_tags = unmatched_tags.difference(u)
+
+ return matched_tags, unmatched_tags
+
+ # *************************************************
+
+ def _late_merge_role_tags(self):
+ # build a local dict of tags for roles
+ role_tags = {}
+ for task in self._ds['tasks']:
+ if 'role_name' in task:
+ this_role = task['role_name'] + "-" + task['vars']['role_uuid']
+
+ if this_role not in role_tags:
+ role_tags[this_role] = []
+
+ if 'tags' in task['vars']:
+ if isinstance(task['vars']['tags'], basestring):
+ role_tags[this_role] += shlex.split(task['vars']['tags'])
+ else:
+ role_tags[this_role] += task['vars']['tags']
+
+ # apply each role's tags to its tasks
+ for idx, val in enumerate(self._tasks):
+ if getattr(val, 'role_name', None) is not None:
+ this_role = val.role_name + "-" + val.module_vars['role_uuid']
+ if this_role in role_tags:
+ self._tasks[idx].tags = sorted(set(self._tasks[idx].tags + role_tags[this_role]))
+
+ # *************************************************
+
+ def _update_vars_files_for_host(self, host, vault_password=None):
+
+ def generate_filenames(host, inject, filename):
+
+ """ Render the raw filename into 3 forms """
+
+ # filename2 is the templated version of the filename, which will
+ # be fully rendered if any variables contained within it are
+ # non-inventory related
+ filename2 = template(self.basedir, filename, self.vars)
+
+ # filename3 is the same as filename2, but when the host object is
+ # available, inventory variables will be expanded as well since the
+ # name is templated with the injected variables
+ filename3 = filename2
+ if host is not None:
+ filename3 = template(self.basedir, filename2, inject)
+
+ # filename4 is the dwim'd path, but may also be mixed-scope, so we use
+ # both play scoped vars and host scoped vars to template the filepath
+ if utils.contains_vars(filename3) and host is not None:
+ inject.update(self.vars)
+ filename4 = template(self.basedir, filename3, inject)
+ filename4 = utils.path_dwim(self.basedir, filename4)
+ else:
+ filename4 = utils.path_dwim(self.basedir, filename3)
+
+ return filename2, filename3, filename4
+
+
+ def update_vars_cache(host, data, target_filename=None):
+
+ """ update a host's varscache with new var data """
+
+ self.playbook.VARS_CACHE[host] = utils.combine_vars(self.playbook.VARS_CACHE.get(host, {}), data)
+ if target_filename:
+ self.playbook.callbacks.on_import_for_host(host, target_filename)
+
+ def process_files(filename, filename2, filename3, filename4, host=None):
+
+ """ pseudo-algorithm for deciding where new vars should go """
+
+ data = utils.parse_yaml_from_file(filename4, vault_password=self.vault_password)
+ if data:
+ if type(data) != dict:
+ raise errors.AnsibleError("%s must be stored as a dictionary/hash" % filename4)
+ if host is not None:
+ target_filename = None
+ if utils.contains_vars(filename2):
+ if not utils.contains_vars(filename3):
+ target_filename = filename3
+ else:
+ target_filename = filename4
+ update_vars_cache(host, data, target_filename=target_filename)
+ else:
+ self.vars_file_vars = utils.combine_vars(self.vars_file_vars, data)
+ # we did process this file
+ return True
+ # we did not process this file
+ return False
+
+ # Enforce that vars_files is always a list
+ if type(self.vars_files) != list:
+ self.vars_files = [ self.vars_files ]
+
+ # Build an inject if this is a host run started by self.update_vars_files
+ if host is not None:
+ inject = {}
+ inject.update(self.playbook.inventory.get_variables(host, vault_password=vault_password))
+ inject.update(self.playbook.SETUP_CACHE.get(host, {}))
+ inject.update(self.playbook.VARS_CACHE.get(host, {}))
+ else:
+ inject = None
+
+ processed = []
+ for filename in self.vars_files:
+ if type(filename) == list:
+ # loop over all filenames, loading the first one, and failing if none found
+ found = False
+ sequence = []
+ for real_filename in filename:
+ filename2, filename3, filename4 = generate_filenames(host, inject, real_filename)
+ sequence.append(filename4)
+ if os.path.exists(filename4):
+ found = True
+ if process_files(filename, filename2, filename3, filename4, host=host):
+ processed.append(filename)
+ elif host is not None:
+ self.playbook.callbacks.on_not_import_for_host(host, filename4)
+ if found:
+ break
+ if not found and host is not None:
+ raise errors.AnsibleError(
+ "%s: FATAL, no files matched for vars_files import sequence: %s" % (host, sequence)
+ )
+ else:
+ # just one filename supplied, load it!
+ filename2, filename3, filename4 = generate_filenames(host, inject, filename)
+ if utils.contains_vars(filename4):
+ continue
+ if process_files(filename, filename2, filename3, filename4, host=host):
+ processed.append(filename)
+
+ return processed
diff --git a/v1/ansible/playbook/task.py b/v1/ansible/playbook/task.py
new file mode 100644
index 0000000000..70c1bc8df6
--- /dev/null
+++ b/v1/ansible/playbook/task.py
@@ -0,0 +1,346 @@
+# (c) 2012-2014, Michael DeHaan
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see .
+
+from ansible import errors
+from ansible import utils
+from ansible.module_utils.splitter import split_args
+import os
+import ansible.utils.template as template
+import sys
+
+class Task(object):
+
+ _t_common = [
+ 'action', 'always_run', 'any_errors_fatal', 'args', 'become', 'become_method', 'become_pass',
+ 'become_user', 'changed_when', 'delay', 'delegate_to', 'environment', 'failed_when',
+ 'first_available_file', 'ignore_errors', 'local_action', 'meta', 'name', 'no_log',
+ 'notify', 'register', 'remote_user', 'retries', 'run_once', 'su', 'su_pass', 'su_user',
+ 'sudo', 'sudo_pass', 'sudo_user', 'tags', 'transport', 'until', 'when',
+ ]
+
+ __slots__ = [
+ 'async_poll_interval', 'async_seconds', 'default_vars', 'first_available_file',
+ 'items_lookup_plugin', 'items_lookup_terms', 'module_args', 'module_name', 'module_vars',
+ 'notified_by', 'play', 'play_file_vars', 'play_vars', 'role_name', 'role_params', 'role_vars',
+ ] + _t_common
+
+ # to prevent typos and such
+ VALID_KEYS = frozenset([
+ 'async', 'connection', 'include', 'poll',
+ ] + _t_common)
+
+ def __init__(self, play, ds, module_vars=None, play_vars=None, play_file_vars=None, role_vars=None, role_params=None, default_vars=None, additional_conditions=None, role_name=None):
+ ''' constructor loads from a task or handler datastructure '''
+
+ # meta directives are used to tell things like ansible/playbook to run
+ # operations like handler execution. Meta tasks are not executed
+ # normally.
+ if 'meta' in ds:
+ self.meta = ds['meta']
+ self.tags = []
+ self.module_vars = module_vars
+ self.role_name = role_name
+ return
+ else:
+ self.meta = None
+
+
+ library = os.path.join(play.basedir, 'library')
+ if os.path.exists(library):
+ utils.plugins.module_finder.add_directory(library)
+
+ for x in ds.keys():
+
+ # code to allow for saying "modulename: args" versus "action: modulename args"
+ if x in utils.plugins.module_finder:
+
+ if 'action' in ds:
+ raise errors.AnsibleError("multiple actions specified in task: '%s' and '%s'" % (x, ds.get('name', ds['action'])))
+ if isinstance(ds[x], dict):
+ if 'args' in ds:
+ raise errors.AnsibleError("can't combine args: and a dict for %s: in task %s" % (x, ds.get('name', "%s: %s" % (x, ds[x]))))
+ ds['args'] = ds[x]
+ ds[x] = ''
+ elif ds[x] is None:
+ ds[x] = ''
+ if not isinstance(ds[x], basestring):
+ raise errors.AnsibleError("action specified for task %s has invalid type %s" % (ds.get('name', "%s: %s" % (x, ds[x])), type(ds[x])))
+ ds['action'] = x + " " + ds[x]
+ ds.pop(x)
+
+ # code to allow "with_glob" and to reference a lookup plugin named glob
+ elif x.startswith("with_"):
+ if isinstance(ds[x], basestring):
+ param = ds[x].strip()
+
+ plugin_name = x.replace("with_","")
+ if plugin_name in utils.plugins.lookup_loader:
+ ds['items_lookup_plugin'] = plugin_name
+ ds['items_lookup_terms'] = ds[x]
+ ds.pop(x)
+ else:
+ raise errors.AnsibleError("cannot find lookup plugin named %s for usage in with_%s" % (plugin_name, plugin_name))
+
+ elif x in [ 'changed_when', 'failed_when', 'when']:
+ if isinstance(ds[x], basestring):
+ param = ds[x].strip()
+ # Only a variable, no logic
+ if (param.startswith('{{') and
+ param.find('}}') == len(ds[x]) - 2 and
+ param.find('|') == -1):
+ utils.warning("It is unnecessary to use '{{' in conditionals, leave variables in loop expressions bare.")
+ elif x.startswith("when_"):
+ utils.deprecated("The 'when_' conditional has been removed. Switch to using the regular unified 'when' statements as described on docs.ansible.com.","1.5", removed=True)
+
+ if 'when' in ds:
+ raise errors.AnsibleError("multiple when_* statements specified in task %s" % (ds.get('name', ds['action'])))
+ when_name = x.replace("when_","")
+ ds['when'] = "%s %s" % (when_name, ds[x])
+ ds.pop(x)
+ elif not x in Task.VALID_KEYS:
+ raise errors.AnsibleError("%s is not a legal parameter in an Ansible task or handler" % x)
+
+ self.module_vars = module_vars
+ self.play_vars = play_vars
+ self.play_file_vars = play_file_vars
+ self.role_vars = role_vars
+ self.role_params = role_params
+ self.default_vars = default_vars
+ self.play = play
+
+ # load various attributes
+ self.name = ds.get('name', None)
+ self.tags = [ 'untagged' ]
+ self.register = ds.get('register', None)
+ self.environment = ds.get('environment', play.environment)
+ self.role_name = role_name
+ self.no_log = utils.boolean(ds.get('no_log', "false")) or self.play.no_log
+ self.run_once = utils.boolean(ds.get('run_once', 'false'))
+
+ #Code to allow do until feature in a Task
+ if 'until' in ds:
+ if not ds.get('register'):
+ raise errors.AnsibleError("register keyword is mandatory when using do until feature")
+ self.module_vars['delay'] = ds.get('delay', 5)
+ self.module_vars['retries'] = ds.get('retries', 3)
+ self.module_vars['register'] = ds.get('register', None)
+ self.until = ds.get('until')
+ self.module_vars['until'] = self.until
+
+ # rather than simple key=value args on the options line, these represent structured data and the values
+ # can be hashes and lists, not just scalars
+ self.args = ds.get('args', {})
+
+ # get remote_user for task, then play, then playbook
+ if ds.get('remote_user') is not None:
+ self.remote_user = ds.get('remote_user')
+ elif ds.get('remote_user', play.remote_user) is not None:
+ self.remote_user = ds.get('remote_user', play.remote_user)
+ else:
+ self.remote_user = ds.get('remote_user', play.playbook.remote_user)
+
+ # Fail out if user specifies privilege escalation params in conflict
+ if (ds.get('become') or ds.get('become_user') or ds.get('become_pass')) and (ds.get('sudo') or ds.get('sudo_user') or ds.get('sudo_pass')):
+ raise errors.AnsibleError('incompatible parameters ("become", "become_user", "become_pass") and sudo params "sudo", "sudo_user", "sudo_pass" in task: %s' % self.name)
+
+ if (ds.get('become') or ds.get('become_user') or ds.get('become_pass')) and (ds.get('su') or ds.get('su_user') or ds.get('su_pass')):
+ raise errors.AnsibleError('incompatible parameters ("become", "become_user", "become_pass") and su params "su", "su_user", "sudo_pass" in task: %s' % self.name)
+
+ if (ds.get('sudo') or ds.get('sudo_user') or ds.get('sudo_pass')) and (ds.get('su') or ds.get('su_user') or ds.get('su_pass')):
+ raise errors.AnsibleError('incompatible parameters ("su", "su_user", "su_pass") and sudo params "sudo", "sudo_user", "sudo_pass" in task: %s' % self.name)
+
+ self.become = utils.boolean(ds.get('become', play.become))
+ self.become_method = ds.get('become_method', play.become_method)
+ self.become_user = ds.get('become_user', play.become_user)
+ self.become_pass = ds.get('become_pass', play.playbook.become_pass)
+
+ # set only if passed in current task data
+ if 'sudo' in ds or 'sudo_user' in ds:
+ self.become_method='sudo'
+
+ if 'sudo' in ds:
+ self.become=ds['sudo']
+ del ds['sudo']
+ else:
+ self.become=True
+ if 'sudo_user' in ds:
+ self.become_user = ds['sudo_user']
+ del ds['sudo_user']
+ if 'sudo_pass' in ds:
+ self.become_pass = ds['sudo_pass']
+ del ds['sudo_pass']
+
+ elif 'su' in ds or 'su_user' in ds:
+ self.become_method='su'
+
+ if 'su' in ds:
+ self.become=ds['su']
+ else:
+ self.become=True
+ del ds['su']
+ if 'su_user' in ds:
+ self.become_user = ds['su_user']
+ del ds['su_user']
+ if 'su_pass' in ds:
+ self.become_pass = ds['su_pass']
+ del ds['su_pass']
+
+ # Both are defined
+ if ('action' in ds) and ('local_action' in ds):
+ raise errors.AnsibleError("the 'action' and 'local_action' attributes can not be used together")
+ # Both are NOT defined
+ elif (not 'action' in ds) and (not 'local_action' in ds):
+ raise errors.AnsibleError("'action' or 'local_action' attribute missing in task \"%s\"" % ds.get('name', ''))
+ # Only one of them is defined
+ elif 'local_action' in ds:
+ self.action = ds.get('local_action', '')
+ self.delegate_to = '127.0.0.1'
+ else:
+ self.action = ds.get('action', '')
+ self.delegate_to = ds.get('delegate_to', None)
+ self.transport = ds.get('connection', ds.get('transport', play.transport))
+
+ if isinstance(self.action, dict):
+ if 'module' not in self.action:
+ raise errors.AnsibleError("'module' attribute missing from action in task \"%s\"" % ds.get('name', '%s' % self.action))
+ if self.args:
+ raise errors.AnsibleError("'args' cannot be combined with dict 'action' in task \"%s\"" % ds.get('name', '%s' % self.action))
+ self.args = self.action
+ self.action = self.args.pop('module')
+
+ # delegate_to can use variables
+ if not (self.delegate_to is None):
+ # delegate_to: localhost should use local transport
+ if self.delegate_to in ['127.0.0.1', 'localhost']:
+ self.transport = 'local'
+
+ # notified by is used by Playbook code to flag which hosts
+ # need to run a notifier
+ self.notified_by = []
+
+ # if no name is specified, use the action line as the name
+ if self.name is None:
+ self.name = self.action
+
+ # load various attributes
+ self.when = ds.get('when', None)
+ self.changed_when = ds.get('changed_when', None)
+ self.failed_when = ds.get('failed_when', None)
+
+ # combine the default and module vars here for use in templating
+ all_vars = self.default_vars.copy()
+ all_vars = utils.combine_vars(all_vars, self.play_vars)
+ all_vars = utils.combine_vars(all_vars, self.play_file_vars)
+ all_vars = utils.combine_vars(all_vars, self.role_vars)
+ all_vars = utils.combine_vars(all_vars, self.module_vars)
+ all_vars = utils.combine_vars(all_vars, self.role_params)
+
+ self.async_seconds = ds.get('async', 0) # not async by default
+ self.async_seconds = template.template_from_string(play.basedir, self.async_seconds, all_vars)
+ self.async_seconds = int(self.async_seconds)
+ self.async_poll_interval = ds.get('poll', 10) # default poll = 10 seconds
+ self.async_poll_interval = template.template_from_string(play.basedir, self.async_poll_interval, all_vars)
+ self.async_poll_interval = int(self.async_poll_interval)
+ self.notify = ds.get('notify', [])
+ self.first_available_file = ds.get('first_available_file', None)
+
+ self.items_lookup_plugin = ds.get('items_lookup_plugin', None)
+ self.items_lookup_terms = ds.get('items_lookup_terms', None)
+
+
+ self.ignore_errors = ds.get('ignore_errors', False)
+ self.any_errors_fatal = ds.get('any_errors_fatal', play.any_errors_fatal)
+
+ self.always_run = ds.get('always_run', False)
+
+ # action should be a string
+ if not isinstance(self.action, basestring):
+ raise errors.AnsibleError("action is of type '%s' and not a string in task. name: %s" % (type(self.action).__name__, self.name))
+
+ # notify can be a string or a list, store as a list
+ if isinstance(self.notify, basestring):
+ self.notify = [ self.notify ]
+
+ # split the action line into a module name + arguments
+ try:
+ tokens = split_args(self.action)
+ except Exception, e:
+ if "unbalanced" in str(e):
+ raise errors.AnsibleError("There was an error while parsing the task %s.\n" % repr(self.action) + \
+ "Make sure quotes are matched or escaped properly")
+ else:
+ raise
+ if len(tokens) < 1:
+ raise errors.AnsibleError("invalid/missing action in task. name: %s" % self.name)
+ self.module_name = tokens[0]
+ self.module_args = ''
+ if len(tokens) > 1:
+ self.module_args = " ".join(tokens[1:])
+
+ import_tags = self.module_vars.get('tags',[])
+ if type(import_tags) in [int,float]:
+ import_tags = str(import_tags)
+ elif type(import_tags) in [str,unicode]:
+ # allow the user to list comma delimited tags
+ import_tags = import_tags.split(",")
+
+ # handle mutually incompatible options
+ incompatibles = [ x for x in [ self.first_available_file, self.items_lookup_plugin ] if x is not None ]
+ if len(incompatibles) > 1:
+ raise errors.AnsibleError("with_(plugin), and first_available_file are mutually incompatible in a single task")
+
+ # make first_available_file accessible to Runner code
+ if self.first_available_file:
+ self.module_vars['first_available_file'] = self.first_available_file
+ # make sure that the 'item' variable is set when using
+ # first_available_file (issue #8220)
+ if 'item' not in self.module_vars:
+ self.module_vars['item'] = ''
+
+ if self.items_lookup_plugin is not None:
+ self.module_vars['items_lookup_plugin'] = self.items_lookup_plugin
+ self.module_vars['items_lookup_terms'] = self.items_lookup_terms
+
+ # allow runner to see delegate_to option
+ self.module_vars['delegate_to'] = self.delegate_to
+
+ # make some task attributes accessible to Runner code
+ self.module_vars['ignore_errors'] = self.ignore_errors
+ self.module_vars['register'] = self.register
+ self.module_vars['changed_when'] = self.changed_when
+ self.module_vars['failed_when'] = self.failed_when
+ self.module_vars['always_run'] = self.always_run
+
+ # tags allow certain parts of a playbook to be run without running the whole playbook
+ apply_tags = ds.get('tags', None)
+ if apply_tags is not None:
+ if type(apply_tags) in [ str, unicode ]:
+ self.tags.append(apply_tags)
+ elif type(apply_tags) in [ int, float ]:
+ self.tags.append(str(apply_tags))
+ elif type(apply_tags) == list:
+ self.tags.extend(apply_tags)
+ self.tags.extend(import_tags)
+
+ if len(self.tags) > 1:
+ self.tags.remove('untagged')
+
+ if additional_conditions:
+ new_conditions = additional_conditions[:]
+ if self.when:
+ new_conditions.append(self.when)
+ self.when = new_conditions
diff --git a/lib/ansible/runner/__init__.py b/v1/ansible/runner/__init__.py
similarity index 100%
rename from lib/ansible/runner/__init__.py
rename to v1/ansible/runner/__init__.py
diff --git a/lib/ansible/runner/lookup_plugins/__init__.py b/v1/ansible/runner/action_plugins/__init__.py
similarity index 100%
rename from lib/ansible/runner/lookup_plugins/__init__.py
rename to v1/ansible/runner/action_plugins/__init__.py
diff --git a/lib/ansible/runner/action_plugins/add_host.py b/v1/ansible/runner/action_plugins/add_host.py
similarity index 100%
rename from lib/ansible/runner/action_plugins/add_host.py
rename to v1/ansible/runner/action_plugins/add_host.py
diff --git a/lib/ansible/runner/action_plugins/assemble.py b/v1/ansible/runner/action_plugins/assemble.py
similarity index 100%
rename from lib/ansible/runner/action_plugins/assemble.py
rename to v1/ansible/runner/action_plugins/assemble.py
diff --git a/lib/ansible/runner/action_plugins/assert.py b/v1/ansible/runner/action_plugins/assert.py
similarity index 100%
rename from lib/ansible/runner/action_plugins/assert.py
rename to v1/ansible/runner/action_plugins/assert.py
diff --git a/lib/ansible/runner/action_plugins/async.py b/v1/ansible/runner/action_plugins/async.py
similarity index 100%
rename from lib/ansible/runner/action_plugins/async.py
rename to v1/ansible/runner/action_plugins/async.py
diff --git a/lib/ansible/runner/action_plugins/copy.py b/v1/ansible/runner/action_plugins/copy.py
similarity index 100%
rename from lib/ansible/runner/action_plugins/copy.py
rename to v1/ansible/runner/action_plugins/copy.py
diff --git a/lib/ansible/runner/action_plugins/debug.py b/v1/ansible/runner/action_plugins/debug.py
similarity index 100%
rename from lib/ansible/runner/action_plugins/debug.py
rename to v1/ansible/runner/action_plugins/debug.py
diff --git a/lib/ansible/runner/action_plugins/fail.py b/v1/ansible/runner/action_plugins/fail.py
similarity index 100%
rename from lib/ansible/runner/action_plugins/fail.py
rename to v1/ansible/runner/action_plugins/fail.py
diff --git a/lib/ansible/runner/action_plugins/fetch.py b/v1/ansible/runner/action_plugins/fetch.py
similarity index 100%
rename from lib/ansible/runner/action_plugins/fetch.py
rename to v1/ansible/runner/action_plugins/fetch.py
diff --git a/lib/ansible/runner/action_plugins/group_by.py b/v1/ansible/runner/action_plugins/group_by.py
similarity index 100%
rename from lib/ansible/runner/action_plugins/group_by.py
rename to v1/ansible/runner/action_plugins/group_by.py
diff --git a/lib/ansible/runner/action_plugins/include_vars.py b/v1/ansible/runner/action_plugins/include_vars.py
similarity index 100%
rename from lib/ansible/runner/action_plugins/include_vars.py
rename to v1/ansible/runner/action_plugins/include_vars.py
diff --git a/lib/ansible/runner/action_plugins/normal.py b/v1/ansible/runner/action_plugins/normal.py
similarity index 100%
rename from lib/ansible/runner/action_plugins/normal.py
rename to v1/ansible/runner/action_plugins/normal.py
diff --git a/lib/ansible/runner/action_plugins/patch.py b/v1/ansible/runner/action_plugins/patch.py
similarity index 100%
rename from lib/ansible/runner/action_plugins/patch.py
rename to v1/ansible/runner/action_plugins/patch.py
diff --git a/lib/ansible/runner/action_plugins/pause.py b/v1/ansible/runner/action_plugins/pause.py
similarity index 100%
rename from lib/ansible/runner/action_plugins/pause.py
rename to v1/ansible/runner/action_plugins/pause.py
diff --git a/lib/ansible/runner/action_plugins/raw.py b/v1/ansible/runner/action_plugins/raw.py
similarity index 100%
rename from lib/ansible/runner/action_plugins/raw.py
rename to v1/ansible/runner/action_plugins/raw.py
diff --git a/lib/ansible/runner/action_plugins/script.py b/v1/ansible/runner/action_plugins/script.py
similarity index 100%
rename from lib/ansible/runner/action_plugins/script.py
rename to v1/ansible/runner/action_plugins/script.py
diff --git a/lib/ansible/runner/action_plugins/set_fact.py b/v1/ansible/runner/action_plugins/set_fact.py
similarity index 100%
rename from lib/ansible/runner/action_plugins/set_fact.py
rename to v1/ansible/runner/action_plugins/set_fact.py
diff --git a/lib/ansible/runner/action_plugins/synchronize.py b/v1/ansible/runner/action_plugins/synchronize.py
similarity index 100%
rename from lib/ansible/runner/action_plugins/synchronize.py
rename to v1/ansible/runner/action_plugins/synchronize.py
diff --git a/lib/ansible/runner/action_plugins/template.py b/v1/ansible/runner/action_plugins/template.py
similarity index 100%
rename from lib/ansible/runner/action_plugins/template.py
rename to v1/ansible/runner/action_plugins/template.py
diff --git a/lib/ansible/runner/action_plugins/unarchive.py b/v1/ansible/runner/action_plugins/unarchive.py
similarity index 100%
rename from lib/ansible/runner/action_plugins/unarchive.py
rename to v1/ansible/runner/action_plugins/unarchive.py
diff --git a/lib/ansible/runner/action_plugins/win_copy.py b/v1/ansible/runner/action_plugins/win_copy.py
similarity index 100%
rename from lib/ansible/runner/action_plugins/win_copy.py
rename to v1/ansible/runner/action_plugins/win_copy.py
diff --git a/lib/ansible/runner/action_plugins/win_template.py b/v1/ansible/runner/action_plugins/win_template.py
similarity index 100%
rename from lib/ansible/runner/action_plugins/win_template.py
rename to v1/ansible/runner/action_plugins/win_template.py
diff --git a/lib/ansible/runner/connection.py b/v1/ansible/runner/connection.py
similarity index 100%
rename from lib/ansible/runner/connection.py
rename to v1/ansible/runner/connection.py
diff --git a/lib/ansible/runner/shell_plugins/__init__.py b/v1/ansible/runner/connection_plugins/__init__.py
similarity index 100%
rename from lib/ansible/runner/shell_plugins/__init__.py
rename to v1/ansible/runner/connection_plugins/__init__.py
diff --git a/lib/ansible/runner/connection_plugins/accelerate.py b/v1/ansible/runner/connection_plugins/accelerate.py
similarity index 100%
rename from lib/ansible/runner/connection_plugins/accelerate.py
rename to v1/ansible/runner/connection_plugins/accelerate.py
diff --git a/lib/ansible/runner/connection_plugins/chroot.py b/v1/ansible/runner/connection_plugins/chroot.py
similarity index 100%
rename from lib/ansible/runner/connection_plugins/chroot.py
rename to v1/ansible/runner/connection_plugins/chroot.py
diff --git a/lib/ansible/runner/connection_plugins/fireball.py b/v1/ansible/runner/connection_plugins/fireball.py
similarity index 100%
rename from lib/ansible/runner/connection_plugins/fireball.py
rename to v1/ansible/runner/connection_plugins/fireball.py
diff --git a/lib/ansible/runner/connection_plugins/funcd.py b/v1/ansible/runner/connection_plugins/funcd.py
similarity index 100%
rename from lib/ansible/runner/connection_plugins/funcd.py
rename to v1/ansible/runner/connection_plugins/funcd.py
diff --git a/lib/ansible/runner/connection_plugins/jail.py b/v1/ansible/runner/connection_plugins/jail.py
similarity index 100%
rename from lib/ansible/runner/connection_plugins/jail.py
rename to v1/ansible/runner/connection_plugins/jail.py
diff --git a/lib/ansible/runner/connection_plugins/libvirt_lxc.py b/v1/ansible/runner/connection_plugins/libvirt_lxc.py
similarity index 100%
rename from lib/ansible/runner/connection_plugins/libvirt_lxc.py
rename to v1/ansible/runner/connection_plugins/libvirt_lxc.py
diff --git a/lib/ansible/runner/connection_plugins/local.py b/v1/ansible/runner/connection_plugins/local.py
similarity index 100%
rename from lib/ansible/runner/connection_plugins/local.py
rename to v1/ansible/runner/connection_plugins/local.py
diff --git a/lib/ansible/runner/connection_plugins/paramiko_ssh.py b/v1/ansible/runner/connection_plugins/paramiko_ssh.py
similarity index 100%
rename from lib/ansible/runner/connection_plugins/paramiko_ssh.py
rename to v1/ansible/runner/connection_plugins/paramiko_ssh.py
diff --git a/lib/ansible/runner/connection_plugins/ssh.py b/v1/ansible/runner/connection_plugins/ssh.py
similarity index 100%
rename from lib/ansible/runner/connection_plugins/ssh.py
rename to v1/ansible/runner/connection_plugins/ssh.py
diff --git a/lib/ansible/runner/connection_plugins/winrm.py b/v1/ansible/runner/connection_plugins/winrm.py
similarity index 100%
rename from lib/ansible/runner/connection_plugins/winrm.py
rename to v1/ansible/runner/connection_plugins/winrm.py
diff --git a/lib/ansible/runner/connection_plugins/zone.py b/v1/ansible/runner/connection_plugins/zone.py
similarity index 100%
rename from lib/ansible/runner/connection_plugins/zone.py
rename to v1/ansible/runner/connection_plugins/zone.py
diff --git a/lib/ansible/utils/module_docs_fragments/__init__.py b/v1/ansible/runner/filter_plugins/__init__.py
similarity index 100%
rename from lib/ansible/utils/module_docs_fragments/__init__.py
rename to v1/ansible/runner/filter_plugins/__init__.py
diff --git a/lib/ansible/runner/filter_plugins/core.py b/v1/ansible/runner/filter_plugins/core.py
similarity index 100%
rename from lib/ansible/runner/filter_plugins/core.py
rename to v1/ansible/runner/filter_plugins/core.py
diff --git a/lib/ansible/runner/filter_plugins/ipaddr.py b/v1/ansible/runner/filter_plugins/ipaddr.py
similarity index 100%
rename from lib/ansible/runner/filter_plugins/ipaddr.py
rename to v1/ansible/runner/filter_plugins/ipaddr.py
diff --git a/lib/ansible/runner/filter_plugins/mathstuff.py b/v1/ansible/runner/filter_plugins/mathstuff.py
similarity index 100%
rename from lib/ansible/runner/filter_plugins/mathstuff.py
rename to v1/ansible/runner/filter_plugins/mathstuff.py
diff --git a/v2/ansible/inventory/vars_plugins/__init__.py b/v1/ansible/runner/lookup_plugins/__init__.py
similarity index 100%
rename from v2/ansible/inventory/vars_plugins/__init__.py
rename to v1/ansible/runner/lookup_plugins/__init__.py
diff --git a/lib/ansible/runner/lookup_plugins/cartesian.py b/v1/ansible/runner/lookup_plugins/cartesian.py
similarity index 100%
rename from lib/ansible/runner/lookup_plugins/cartesian.py
rename to v1/ansible/runner/lookup_plugins/cartesian.py
diff --git a/lib/ansible/runner/lookup_plugins/consul_kv.py b/v1/ansible/runner/lookup_plugins/consul_kv.py
similarity index 100%
rename from lib/ansible/runner/lookup_plugins/consul_kv.py
rename to v1/ansible/runner/lookup_plugins/consul_kv.py
diff --git a/lib/ansible/runner/lookup_plugins/csvfile.py b/v1/ansible/runner/lookup_plugins/csvfile.py
similarity index 100%
rename from lib/ansible/runner/lookup_plugins/csvfile.py
rename to v1/ansible/runner/lookup_plugins/csvfile.py
diff --git a/lib/ansible/runner/lookup_plugins/dict.py b/v1/ansible/runner/lookup_plugins/dict.py
similarity index 100%
rename from lib/ansible/runner/lookup_plugins/dict.py
rename to v1/ansible/runner/lookup_plugins/dict.py
diff --git a/lib/ansible/runner/lookup_plugins/dig.py b/v1/ansible/runner/lookup_plugins/dig.py
similarity index 100%
rename from lib/ansible/runner/lookup_plugins/dig.py
rename to v1/ansible/runner/lookup_plugins/dig.py
diff --git a/lib/ansible/runner/lookup_plugins/dnstxt.py b/v1/ansible/runner/lookup_plugins/dnstxt.py
similarity index 100%
rename from lib/ansible/runner/lookup_plugins/dnstxt.py
rename to v1/ansible/runner/lookup_plugins/dnstxt.py
diff --git a/lib/ansible/runner/lookup_plugins/env.py b/v1/ansible/runner/lookup_plugins/env.py
similarity index 100%
rename from lib/ansible/runner/lookup_plugins/env.py
rename to v1/ansible/runner/lookup_plugins/env.py
diff --git a/lib/ansible/runner/lookup_plugins/etcd.py b/v1/ansible/runner/lookup_plugins/etcd.py
similarity index 100%
rename from lib/ansible/runner/lookup_plugins/etcd.py
rename to v1/ansible/runner/lookup_plugins/etcd.py
diff --git a/lib/ansible/runner/lookup_plugins/file.py b/v1/ansible/runner/lookup_plugins/file.py
similarity index 100%
rename from lib/ansible/runner/lookup_plugins/file.py
rename to v1/ansible/runner/lookup_plugins/file.py
diff --git a/lib/ansible/runner/lookup_plugins/fileglob.py b/v1/ansible/runner/lookup_plugins/fileglob.py
similarity index 100%
rename from lib/ansible/runner/lookup_plugins/fileglob.py
rename to v1/ansible/runner/lookup_plugins/fileglob.py
diff --git a/lib/ansible/runner/lookup_plugins/first_found.py b/v1/ansible/runner/lookup_plugins/first_found.py
similarity index 100%
rename from lib/ansible/runner/lookup_plugins/first_found.py
rename to v1/ansible/runner/lookup_plugins/first_found.py
diff --git a/lib/ansible/runner/lookup_plugins/flattened.py b/v1/ansible/runner/lookup_plugins/flattened.py
similarity index 100%
rename from lib/ansible/runner/lookup_plugins/flattened.py
rename to v1/ansible/runner/lookup_plugins/flattened.py
diff --git a/lib/ansible/runner/lookup_plugins/indexed_items.py b/v1/ansible/runner/lookup_plugins/indexed_items.py
similarity index 100%
rename from lib/ansible/runner/lookup_plugins/indexed_items.py
rename to v1/ansible/runner/lookup_plugins/indexed_items.py
diff --git a/lib/ansible/runner/lookup_plugins/inventory_hostnames.py b/v1/ansible/runner/lookup_plugins/inventory_hostnames.py
similarity index 100%
rename from lib/ansible/runner/lookup_plugins/inventory_hostnames.py
rename to v1/ansible/runner/lookup_plugins/inventory_hostnames.py
diff --git a/lib/ansible/runner/lookup_plugins/items.py b/v1/ansible/runner/lookup_plugins/items.py
similarity index 100%
rename from lib/ansible/runner/lookup_plugins/items.py
rename to v1/ansible/runner/lookup_plugins/items.py
diff --git a/lib/ansible/runner/lookup_plugins/lines.py b/v1/ansible/runner/lookup_plugins/lines.py
similarity index 100%
rename from lib/ansible/runner/lookup_plugins/lines.py
rename to v1/ansible/runner/lookup_plugins/lines.py
diff --git a/lib/ansible/runner/lookup_plugins/nested.py b/v1/ansible/runner/lookup_plugins/nested.py
similarity index 100%
rename from lib/ansible/runner/lookup_plugins/nested.py
rename to v1/ansible/runner/lookup_plugins/nested.py
diff --git a/lib/ansible/runner/lookup_plugins/password.py b/v1/ansible/runner/lookup_plugins/password.py
similarity index 100%
rename from lib/ansible/runner/lookup_plugins/password.py
rename to v1/ansible/runner/lookup_plugins/password.py
diff --git a/lib/ansible/runner/lookup_plugins/pipe.py b/v1/ansible/runner/lookup_plugins/pipe.py
similarity index 100%
rename from lib/ansible/runner/lookup_plugins/pipe.py
rename to v1/ansible/runner/lookup_plugins/pipe.py
diff --git a/lib/ansible/runner/lookup_plugins/random_choice.py b/v1/ansible/runner/lookup_plugins/random_choice.py
similarity index 100%
rename from lib/ansible/runner/lookup_plugins/random_choice.py
rename to v1/ansible/runner/lookup_plugins/random_choice.py
diff --git a/lib/ansible/runner/lookup_plugins/redis_kv.py b/v1/ansible/runner/lookup_plugins/redis_kv.py
similarity index 100%
rename from lib/ansible/runner/lookup_plugins/redis_kv.py
rename to v1/ansible/runner/lookup_plugins/redis_kv.py
diff --git a/lib/ansible/runner/lookup_plugins/sequence.py b/v1/ansible/runner/lookup_plugins/sequence.py
similarity index 100%
rename from lib/ansible/runner/lookup_plugins/sequence.py
rename to v1/ansible/runner/lookup_plugins/sequence.py
diff --git a/lib/ansible/runner/lookup_plugins/subelements.py b/v1/ansible/runner/lookup_plugins/subelements.py
similarity index 100%
rename from lib/ansible/runner/lookup_plugins/subelements.py
rename to v1/ansible/runner/lookup_plugins/subelements.py
diff --git a/lib/ansible/runner/lookup_plugins/template.py b/v1/ansible/runner/lookup_plugins/template.py
similarity index 100%
rename from lib/ansible/runner/lookup_plugins/template.py
rename to v1/ansible/runner/lookup_plugins/template.py
diff --git a/lib/ansible/runner/lookup_plugins/together.py b/v1/ansible/runner/lookup_plugins/together.py
similarity index 100%
rename from lib/ansible/runner/lookup_plugins/together.py
rename to v1/ansible/runner/lookup_plugins/together.py
diff --git a/lib/ansible/runner/lookup_plugins/url.py b/v1/ansible/runner/lookup_plugins/url.py
similarity index 100%
rename from lib/ansible/runner/lookup_plugins/url.py
rename to v1/ansible/runner/lookup_plugins/url.py
diff --git a/lib/ansible/runner/poller.py b/v1/ansible/runner/poller.py
similarity index 100%
rename from lib/ansible/runner/poller.py
rename to v1/ansible/runner/poller.py
diff --git a/lib/ansible/runner/return_data.py b/v1/ansible/runner/return_data.py
similarity index 100%
rename from lib/ansible/runner/return_data.py
rename to v1/ansible/runner/return_data.py
diff --git a/v2/test/parsing/yaml/__init__.py b/v1/ansible/runner/shell_plugins/__init__.py
similarity index 100%
rename from v2/test/parsing/yaml/__init__.py
rename to v1/ansible/runner/shell_plugins/__init__.py
diff --git a/lib/ansible/runner/shell_plugins/csh.py b/v1/ansible/runner/shell_plugins/csh.py
similarity index 100%
rename from lib/ansible/runner/shell_plugins/csh.py
rename to v1/ansible/runner/shell_plugins/csh.py
diff --git a/lib/ansible/runner/shell_plugins/fish.py b/v1/ansible/runner/shell_plugins/fish.py
similarity index 100%
rename from lib/ansible/runner/shell_plugins/fish.py
rename to v1/ansible/runner/shell_plugins/fish.py
diff --git a/lib/ansible/runner/shell_plugins/powershell.py b/v1/ansible/runner/shell_plugins/powershell.py
similarity index 100%
rename from lib/ansible/runner/shell_plugins/powershell.py
rename to v1/ansible/runner/shell_plugins/powershell.py
diff --git a/lib/ansible/runner/shell_plugins/sh.py b/v1/ansible/runner/shell_plugins/sh.py
similarity index 100%
rename from lib/ansible/runner/shell_plugins/sh.py
rename to v1/ansible/runner/shell_plugins/sh.py
diff --git a/v1/ansible/utils/__init__.py b/v1/ansible/utils/__init__.py
new file mode 100644
index 0000000000..7ed07a54c8
--- /dev/null
+++ b/v1/ansible/utils/__init__.py
@@ -0,0 +1,1660 @@
+# (c) 2012-2014, Michael DeHaan
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see .
+
+import errno
+import sys
+import re
+import os
+import shlex
+import yaml
+import copy
+import optparse
+import operator
+from ansible import errors
+from ansible import __version__
+from ansible.utils.display_functions import *
+from ansible.utils.plugins import *
+from ansible.utils.su_prompts import *
+from ansible.utils.hashing import secure_hash, secure_hash_s, checksum, checksum_s, md5, md5s
+from ansible.callbacks import display
+from ansible.module_utils.splitter import split_args, unquote
+from ansible.module_utils.basic import heuristic_log_sanitize
+from ansible.utils.unicode import to_bytes, to_unicode
+import ansible.constants as C
+import ast
+import time
+import StringIO
+import stat
+import termios
+import tty
+import pipes
+import random
+import difflib
+import warnings
+import traceback
+import getpass
+import sys
+import subprocess
+import contextlib
+
+from vault import VaultLib
+
+VERBOSITY=0
+
+MAX_FILE_SIZE_FOR_DIFF=1*1024*1024
+
+# caching the compilation of the regex used
+# to check for lookup calls within data
+LOOKUP_REGEX = re.compile(r'lookup\s*\(')
+PRINT_CODE_REGEX = re.compile(r'(?:{[{%]|[%}]})')
+CODE_REGEX = re.compile(r'(?:{%|%})')
+
+
+try:
+ # simplejson can be much faster if it's available
+ import simplejson as json
+except ImportError:
+ import json
+
+try:
+ from yaml import CSafeLoader as Loader
+except ImportError:
+ from yaml import SafeLoader as Loader
+
+PASSLIB_AVAILABLE = False
+try:
+ import passlib.hash
+ PASSLIB_AVAILABLE = True
+except:
+ pass
+
+try:
+ import builtin
+except ImportError:
+ import __builtin__ as builtin
+
+KEYCZAR_AVAILABLE=False
+try:
+ try:
+ # some versions of pycrypto may not have this?
+ from Crypto.pct_warnings import PowmInsecureWarning
+ except ImportError:
+ PowmInsecureWarning = RuntimeWarning
+
+ with warnings.catch_warnings(record=True) as warning_handler:
+ warnings.simplefilter("error", PowmInsecureWarning)
+ try:
+ import keyczar.errors as key_errors
+ from keyczar.keys import AesKey
+ except PowmInsecureWarning:
+ system_warning(
+ "The version of gmp you have installed has a known issue regarding " + \
+ "timing vulnerabilities when used with pycrypto. " + \
+ "If possible, you should update it (i.e. yum update gmp)."
+ )
+ warnings.resetwarnings()
+ warnings.simplefilter("ignore")
+ import keyczar.errors as key_errors
+ from keyczar.keys import AesKey
+ KEYCZAR_AVAILABLE=True
+except ImportError:
+ pass
+
+
+###############################################################
+# Abstractions around keyczar
+###############################################################
+
+def key_for_hostname(hostname):
+ # fireball mode is an implementation of ansible firing up zeromq via SSH
+ # to use no persistent daemons or key management
+
+ if not KEYCZAR_AVAILABLE:
+ raise errors.AnsibleError("python-keyczar must be installed on the control machine to use accelerated modes")
+
+ key_path = os.path.expanduser(C.ACCELERATE_KEYS_DIR)
+ if not os.path.exists(key_path):
+ os.makedirs(key_path, mode=0700)
+ os.chmod(key_path, int(C.ACCELERATE_KEYS_DIR_PERMS, 8))
+ elif not os.path.isdir(key_path):
+ raise errors.AnsibleError('ACCELERATE_KEYS_DIR is not a directory.')
+
+ if stat.S_IMODE(os.stat(key_path).st_mode) != int(C.ACCELERATE_KEYS_DIR_PERMS, 8):
+ raise errors.AnsibleError('Incorrect permissions on the private key directory. Use `chmod 0%o %s` to correct this issue, and make sure any of the keys files contained within that directory are set to 0%o' % (int(C.ACCELERATE_KEYS_DIR_PERMS, 8), C.ACCELERATE_KEYS_DIR, int(C.ACCELERATE_KEYS_FILE_PERMS, 8)))
+
+ key_path = os.path.join(key_path, hostname)
+
+ # use new AES keys every 2 hours, which means fireball must not allow running for longer either
+ if not os.path.exists(key_path) or (time.time() - os.path.getmtime(key_path) > 60*60*2):
+ key = AesKey.Generate()
+ fd = os.open(key_path, os.O_WRONLY | os.O_CREAT, int(C.ACCELERATE_KEYS_FILE_PERMS, 8))
+ fh = os.fdopen(fd, 'w')
+ fh.write(str(key))
+ fh.close()
+ return key
+ else:
+ if stat.S_IMODE(os.stat(key_path).st_mode) != int(C.ACCELERATE_KEYS_FILE_PERMS, 8):
+ raise errors.AnsibleError('Incorrect permissions on the key file for this host. Use `chmod 0%o %s` to correct this issue.' % (int(C.ACCELERATE_KEYS_FILE_PERMS, 8), key_path))
+ fh = open(key_path)
+ key = AesKey.Read(fh.read())
+ fh.close()
+ return key
+
+def encrypt(key, msg):
+ return key.Encrypt(msg)
+
+def decrypt(key, msg):
+ try:
+ return key.Decrypt(msg)
+ except key_errors.InvalidSignatureError:
+ raise errors.AnsibleError("decryption failed")
+
+###############################################################
+# UTILITY FUNCTIONS FOR COMMAND LINE TOOLS
+###############################################################
+
+def read_vault_file(vault_password_file):
+ """Read a vault password from a file or if executable, execute the script and
+ retrieve password from STDOUT
+ """
+ if vault_password_file:
+ this_path = os.path.realpath(os.path.expanduser(vault_password_file))
+ if is_executable(this_path):
+ try:
+ # STDERR not captured to make it easier for users to prompt for input in their scripts
+ p = subprocess.Popen(this_path, stdout=subprocess.PIPE)
+ except OSError, e:
+ raise errors.AnsibleError("problem running %s (%s)" % (' '.join(this_path), e))
+ stdout, stderr = p.communicate()
+ vault_pass = stdout.strip('\r\n')
+ else:
+ try:
+ f = open(this_path, "rb")
+ vault_pass=f.read().strip()
+ f.close()
+ except (OSError, IOError), e:
+ raise errors.AnsibleError("Could not read %s: %s" % (this_path, e))
+
+ return vault_pass
+ else:
+ return None
+
+def err(msg):
+ ''' print an error message to stderr '''
+
+ print >> sys.stderr, msg
+
+def exit(msg, rc=1):
+ ''' quit with an error to stdout and a failure code '''
+
+ err(msg)
+ sys.exit(rc)
+
+def jsonify(result, format=False):
+ ''' format JSON output (uncompressed or uncompressed) '''
+
+ if result is None:
+ return "{}"
+ result2 = result.copy()
+ for key, value in result2.items():
+ if type(value) is str:
+ result2[key] = value.decode('utf-8', 'ignore')
+
+ indent = None
+ if format:
+ indent = 4
+
+ try:
+ return json.dumps(result2, sort_keys=True, indent=indent, ensure_ascii=False)
+ except UnicodeDecodeError:
+ return json.dumps(result2, sort_keys=True, indent=indent)
+
+def write_tree_file(tree, hostname, buf):
+ ''' write something into treedir/hostname '''
+
+ # TODO: might be nice to append playbook runs per host in a similar way
+ # in which case, we'd want append mode.
+ path = os.path.join(tree, hostname)
+ fd = open(path, "w+")
+ fd.write(buf)
+ fd.close()
+
+def is_failed(result):
+ ''' is a given JSON result a failed result? '''
+
+ return ((result.get('rc', 0) != 0) or (result.get('failed', False) in [ True, 'True', 'true']))
+
+def is_changed(result):
+ ''' is a given JSON result a changed result? '''
+
+ return (result.get('changed', False) in [ True, 'True', 'true'])
+
+def check_conditional(conditional, basedir, inject, fail_on_undefined=False):
+ from ansible.utils import template
+
+ if conditional is None or conditional == '':
+ return True
+
+ if isinstance(conditional, list):
+ for x in conditional:
+ if not check_conditional(x, basedir, inject, fail_on_undefined=fail_on_undefined):
+ return False
+ return True
+
+ if not isinstance(conditional, basestring):
+ return conditional
+
+ conditional = conditional.replace("jinja2_compare ","")
+ # allow variable names
+ if conditional in inject and '-' not in to_unicode(inject[conditional], nonstring='simplerepr'):
+ conditional = to_unicode(inject[conditional], nonstring='simplerepr')
+ conditional = template.template(basedir, conditional, inject, fail_on_undefined=fail_on_undefined)
+ original = to_unicode(conditional, nonstring='simplerepr').replace("jinja2_compare ","")
+ # a Jinja2 evaluation that results in something Python can eval!
+ presented = "{%% if %s %%} True {%% else %%} False {%% endif %%}" % conditional
+ conditional = template.template(basedir, presented, inject)
+ val = conditional.strip()
+ if val == presented:
+ # the templating failed, meaning most likely a
+ # variable was undefined. If we happened to be
+ # looking for an undefined variable, return True,
+ # otherwise fail
+ if "is undefined" in conditional:
+ return True
+ elif "is defined" in conditional:
+ return False
+ else:
+ raise errors.AnsibleError("error while evaluating conditional: %s" % original)
+ elif val == "True":
+ return True
+ elif val == "False":
+ return False
+ else:
+ raise errors.AnsibleError("unable to evaluate conditional: %s" % original)
+
+def is_executable(path):
+ '''is the given path executable?'''
+ return (stat.S_IXUSR & os.stat(path)[stat.ST_MODE]
+ or stat.S_IXGRP & os.stat(path)[stat.ST_MODE]
+ or stat.S_IXOTH & os.stat(path)[stat.ST_MODE])
+
+def unfrackpath(path):
+ '''
+ returns a path that is free of symlinks, environment
+ variables, relative path traversals and symbols (~)
+ example:
+ '$HOME/../../var/mail' becomes '/var/spool/mail'
+ '''
+ return os.path.normpath(os.path.realpath(os.path.expandvars(os.path.expanduser(path))))
+
+def prepare_writeable_dir(tree,mode=0777):
+ ''' make sure a directory exists and is writeable '''
+
+ # modify the mode to ensure the owner at least
+ # has read/write access to this directory
+ mode |= 0700
+
+ # make sure the tree path is always expanded
+ # and normalized and free of symlinks
+ tree = unfrackpath(tree)
+
+ if not os.path.exists(tree):
+ try:
+ os.makedirs(tree, mode)
+ except (IOError, OSError), e:
+ raise errors.AnsibleError("Could not make dir %s: %s" % (tree, e))
+ if not os.access(tree, os.W_OK):
+ raise errors.AnsibleError("Cannot write to path %s" % tree)
+ return tree
+
+def path_dwim(basedir, given):
+ '''
+ make relative paths work like folks expect.
+ '''
+
+ if given.startswith("'"):
+ given = given[1:-1]
+
+ if given.startswith("/"):
+ return os.path.abspath(given)
+ elif given.startswith("~"):
+ return os.path.abspath(os.path.expanduser(given))
+ else:
+ if basedir is None:
+ basedir = "."
+ return os.path.abspath(os.path.join(basedir, given))
+
+def path_dwim_relative(original, dirname, source, playbook_base, check=True):
+ ''' find one file in a directory one level up in a dir named dirname relative to current '''
+ # (used by roles code)
+
+ from ansible.utils import template
+
+
+ basedir = os.path.dirname(original)
+ if os.path.islink(basedir):
+ basedir = unfrackpath(basedir)
+ template2 = os.path.join(basedir, dirname, source)
+ else:
+ template2 = os.path.join(basedir, '..', dirname, source)
+ source2 = path_dwim(basedir, template2)
+ if os.path.exists(source2):
+ return source2
+ obvious_local_path = path_dwim(playbook_base, source)
+ if os.path.exists(obvious_local_path):
+ return obvious_local_path
+ if check:
+ raise errors.AnsibleError("input file not found at %s or %s" % (source2, obvious_local_path))
+ return source2 # which does not exist
+
+def repo_url_to_role_name(repo_url):
+ # gets the role name out of a repo like
+ # http://git.example.com/repos/repo.git" => "repo"
+
+ if '://' not in repo_url and '@' not in repo_url:
+ return repo_url
+ trailing_path = repo_url.split('/')[-1]
+ if trailing_path.endswith('.git'):
+ trailing_path = trailing_path[:-4]
+ if trailing_path.endswith('.tar.gz'):
+ trailing_path = trailing_path[:-7]
+ if ',' in trailing_path:
+ trailing_path = trailing_path.split(',')[0]
+ return trailing_path
+
+
+def role_spec_parse(role_spec):
+ # takes a repo and a version like
+ # git+http://git.example.com/repos/repo.git,v1.0
+ # and returns a list of properties such as:
+ # {
+ # 'scm': 'git',
+ # 'src': 'http://git.example.com/repos/repo.git',
+ # 'version': 'v1.0',
+ # 'name': 'repo'
+ # }
+
+ role_spec = role_spec.strip()
+ role_version = ''
+ default_role_versions = dict(git='master', hg='tip')
+ if role_spec == "" or role_spec.startswith("#"):
+ return (None, None, None, None)
+
+ tokens = [s.strip() for s in role_spec.split(',')]
+
+ # assume https://github.com URLs are git+https:// URLs and not
+ # tarballs unless they end in '.zip'
+ if 'github.com/' in tokens[0] and not tokens[0].startswith("git+") and not tokens[0].endswith('.tar.gz'):
+ tokens[0] = 'git+' + tokens[0]
+
+ if '+' in tokens[0]:
+ (scm, role_url) = tokens[0].split('+')
+ else:
+ scm = None
+ role_url = tokens[0]
+ if len(tokens) >= 2:
+ role_version = tokens[1]
+ if len(tokens) == 3:
+ role_name = tokens[2]
+ else:
+ role_name = repo_url_to_role_name(tokens[0])
+ if scm and not role_version:
+ role_version = default_role_versions.get(scm, '')
+ return dict(scm=scm, src=role_url, version=role_version, name=role_name)
+
+
+def role_yaml_parse(role):
+ if 'role' in role:
+ # Old style: {role: "galaxy.role,version,name", other_vars: "here" }
+ role_info = role_spec_parse(role['role'])
+ if isinstance(role_info, dict):
+ # Warning: Slight change in behaviour here. name may be being
+ # overloaded. Previously, name was only a parameter to the role.
+ # Now it is both a parameter to the role and the name that
+ # ansible-galaxy will install under on the local system.
+ if 'name' in role and 'name' in role_info:
+ del role_info['name']
+ role.update(role_info)
+ else:
+ # New style: { src: 'galaxy.role,version,name', other_vars: "here" }
+ if 'github.com' in role["src"] and 'http' in role["src"] and '+' not in role["src"] and not role["src"].endswith('.tar.gz'):
+ role["src"] = "git+" + role["src"]
+
+ if '+' in role["src"]:
+ (scm, src) = role["src"].split('+')
+ role["scm"] = scm
+ role["src"] = src
+
+ if 'name' not in role:
+ role["name"] = repo_url_to_role_name(role["src"])
+
+ if 'version' not in role:
+ role['version'] = ''
+
+ if 'scm' not in role:
+ role['scm'] = None
+
+ return role
+
+
+def json_loads(data):
+ ''' parse a JSON string and return a data structure '''
+ try:
+ loaded = json.loads(data)
+ except ValueError,e:
+ raise errors.AnsibleError("Unable to read provided data as JSON: %s" % str(e))
+
+ return loaded
+
+def _clean_data(orig_data, from_remote=False, from_inventory=False):
+ ''' remove jinja2 template tags from a string '''
+
+ if not isinstance(orig_data, basestring):
+ return orig_data
+
+ # when the data is marked as having come from a remote, we always
+ # replace any print blocks (ie. {{var}}), however when marked as coming
+ # from inventory we only replace print blocks that contain a call to
+ # a lookup plugin (ie. {{lookup('foo','bar'))}})
+ replace_prints = from_remote or (from_inventory and '{{' in orig_data and LOOKUP_REGEX.search(orig_data) is not None)
+
+ regex = PRINT_CODE_REGEX if replace_prints else CODE_REGEX
+
+ with contextlib.closing(StringIO.StringIO(orig_data)) as data:
+ # these variables keep track of opening block locations, as we only
+ # want to replace matched pairs of print/block tags
+ print_openings = []
+ block_openings = []
+ for mo in regex.finditer(orig_data):
+ token = mo.group(0)
+ token_start = mo.start(0)
+
+ if token[0] == '{':
+ if token == '{%':
+ block_openings.append(token_start)
+ elif token == '{{':
+ print_openings.append(token_start)
+
+ elif token[1] == '}':
+ prev_idx = None
+ if token == '%}' and block_openings:
+ prev_idx = block_openings.pop()
+ elif token == '}}' and print_openings:
+ prev_idx = print_openings.pop()
+
+ if prev_idx is not None:
+ # replace the opening
+ data.seek(prev_idx, os.SEEK_SET)
+ data.write('{#')
+ # replace the closing
+ data.seek(token_start, os.SEEK_SET)
+ data.write('#}')
+
+ else:
+ assert False, 'Unhandled regex match'
+
+ return data.getvalue()
+
+def _clean_data_struct(orig_data, from_remote=False, from_inventory=False):
+ '''
+ walk a complex data structure, and use _clean_data() to
+ remove any template tags that may exist
+ '''
+ if not from_remote and not from_inventory:
+ raise errors.AnsibleErrors("when cleaning data, you must specify either from_remote or from_inventory")
+ if isinstance(orig_data, dict):
+ data = orig_data.copy()
+ for key in data:
+ new_key = _clean_data_struct(key, from_remote, from_inventory)
+ new_val = _clean_data_struct(data[key], from_remote, from_inventory)
+ if key != new_key:
+ del data[key]
+ data[new_key] = new_val
+ elif isinstance(orig_data, list):
+ data = orig_data[:]
+ for i in range(0, len(data)):
+ data[i] = _clean_data_struct(data[i], from_remote, from_inventory)
+ elif isinstance(orig_data, basestring):
+ data = _clean_data(orig_data, from_remote, from_inventory)
+ else:
+ data = orig_data
+ return data
+
+def parse_json(raw_data, from_remote=False, from_inventory=False, no_exceptions=False):
+ ''' this version for module return data only '''
+
+ orig_data = raw_data
+
+ # ignore stuff like tcgetattr spewage or other warnings
+ data = filter_leading_non_json_lines(raw_data)
+
+ try:
+ results = json.loads(data)
+ except:
+ if no_exceptions:
+ return dict(failed=True, parsed=False, msg=raw_data)
+ else:
+ raise
+
+ if from_remote:
+ results = _clean_data_struct(results, from_remote, from_inventory)
+
+ return results
+
+def serialize_args(args):
+ '''
+ Flattens a dictionary args to a k=v string
+ '''
+ module_args = ""
+ for (k,v) in args.iteritems():
+ if isinstance(v, basestring):
+ module_args = "%s=%s %s" % (k, pipes.quote(v), module_args)
+ elif isinstance(v, bool):
+ module_args = "%s=%s %s" % (k, str(v), module_args)
+ return module_args.strip()
+
+def merge_module_args(current_args, new_args):
+ '''
+ merges either a dictionary or string of k=v pairs with another string of k=v pairs,
+ and returns a new k=v string without duplicates.
+ '''
+ if not isinstance(current_args, basestring):
+ raise errors.AnsibleError("expected current_args to be a basestring")
+ # we use parse_kv to split up the current args into a dictionary
+ final_args = parse_kv(current_args)
+ if isinstance(new_args, dict):
+ final_args.update(new_args)
+ elif isinstance(new_args, basestring):
+ new_args_kv = parse_kv(new_args)
+ final_args.update(new_args_kv)
+ return serialize_args(final_args)
+
+def parse_yaml(data, path_hint=None):
+ ''' convert a yaml string to a data structure. Also supports JSON, ssssssh!!!'''
+
+ stripped_data = data.lstrip()
+ loaded = None
+ if stripped_data.startswith("{") or stripped_data.startswith("["):
+ # since the line starts with { or [ we can infer this is a JSON document.
+ try:
+ loaded = json.loads(data)
+ except ValueError, ve:
+ if path_hint:
+ raise errors.AnsibleError(path_hint + ": " + str(ve))
+ else:
+ raise errors.AnsibleError(str(ve))
+ else:
+ # else this is pretty sure to be a YAML document
+ loaded = yaml.load(data, Loader=Loader)
+
+ return loaded
+
+def process_common_errors(msg, probline, column):
+ replaced = probline.replace(" ","")
+
+ if ":{{" in replaced and "}}" in replaced:
+ msg = msg + """
+This one looks easy to fix. YAML thought it was looking for the start of a
+hash/dictionary and was confused to see a second "{". Most likely this was
+meant to be an ansible template evaluation instead, so we have to give the
+parser a small hint that we wanted a string instead. The solution here is to
+just quote the entire value.
+
+For instance, if the original line was:
+
+ app_path: {{ base_path }}/foo
+
+It should be written as:
+
+ app_path: "{{ base_path }}/foo"
+"""
+ return msg
+
+ elif len(probline) and len(probline) > 1 and len(probline) > column and probline[column] == ":" and probline.count(':') > 1:
+ msg = msg + """
+This one looks easy to fix. There seems to be an extra unquoted colon in the line
+and this is confusing the parser. It was only expecting to find one free
+colon. The solution is just add some quotes around the colon, or quote the
+entire line after the first colon.
+
+For instance, if the original line was:
+
+ copy: src=file.txt dest=/path/filename:with_colon.txt
+
+It can be written as:
+
+ copy: src=file.txt dest='/path/filename:with_colon.txt'
+
+Or:
+
+ copy: 'src=file.txt dest=/path/filename:with_colon.txt'
+
+
+"""
+ return msg
+ else:
+ parts = probline.split(":")
+ if len(parts) > 1:
+ middle = parts[1].strip()
+ match = False
+ unbalanced = False
+ if middle.startswith("'") and not middle.endswith("'"):
+ match = True
+ elif middle.startswith('"') and not middle.endswith('"'):
+ match = True
+ if len(middle) > 0 and middle[0] in [ '"', "'" ] and middle[-1] in [ '"', "'" ] and probline.count("'") > 2 or probline.count('"') > 2:
+ unbalanced = True
+ if match:
+ msg = msg + """
+This one looks easy to fix. It seems that there is a value started
+with a quote, and the YAML parser is expecting to see the line ended
+with the same kind of quote. For instance:
+
+ when: "ok" in result.stdout
+
+Could be written as:
+
+ when: '"ok" in result.stdout'
+
+or equivalently:
+
+ when: "'ok' in result.stdout"
+
+"""
+ return msg
+
+ if unbalanced:
+ msg = msg + """
+We could be wrong, but this one looks like it might be an issue with
+unbalanced quotes. If starting a value with a quote, make sure the
+line ends with the same set of quotes. For instance this arbitrary
+example:
+
+ foo: "bad" "wolf"
+
+Could be written as:
+
+ foo: '"bad" "wolf"'
+
+"""
+ return msg
+
+ return msg
+
+def process_yaml_error(exc, data, path=None, show_content=True):
+ if hasattr(exc, 'problem_mark'):
+ mark = exc.problem_mark
+ if show_content:
+ if mark.line -1 >= 0:
+ before_probline = data.split("\n")[mark.line-1]
+ else:
+ before_probline = ''
+ probline = data.split("\n")[mark.line]
+ arrow = " " * mark.column + "^"
+ msg = """Syntax Error while loading YAML script, %s
+Note: The error may actually appear before this position: line %s, column %s
+
+%s
+%s
+%s""" % (path, mark.line + 1, mark.column + 1, before_probline, probline, arrow)
+
+ unquoted_var = None
+ if '{{' in probline and '}}' in probline:
+ if '"{{' not in probline or "'{{" not in probline:
+ unquoted_var = True
+
+ if not unquoted_var:
+ msg = process_common_errors(msg, probline, mark.column)
+ else:
+ msg = msg + """
+We could be wrong, but this one looks like it might be an issue with
+missing quotes. Always quote template expression brackets when they
+start a value. For instance:
+
+ with_items:
+ - {{ foo }}
+
+Should be written as:
+
+ with_items:
+ - "{{ foo }}"
+
+"""
+ else:
+ # most likely displaying a file with sensitive content,
+ # so don't show any of the actual lines of yaml just the
+ # line number itself
+ msg = """Syntax error while loading YAML script, %s
+The error appears to have been on line %s, column %s, but may actually
+be before there depending on the exact syntax problem.
+""" % (path, mark.line + 1, mark.column + 1)
+
+ else:
+ # No problem markers means we have to throw a generic
+ # "stuff messed up" type message. Sry bud.
+ if path:
+ msg = "Could not parse YAML. Check over %s again." % path
+ else:
+ msg = "Could not parse YAML."
+ raise errors.AnsibleYAMLValidationFailed(msg)
+
+
+def parse_yaml_from_file(path, vault_password=None):
+ ''' convert a yaml file to a data structure '''
+
+ data = None
+ show_content = True
+
+ try:
+ data = open(path).read()
+ except IOError:
+ raise errors.AnsibleError("file could not read: %s" % path)
+
+ vault = VaultLib(password=vault_password)
+ if vault.is_encrypted(data):
+ # if the file is encrypted and no password was specified,
+ # the decrypt call would throw an error, but we check first
+ # since the decrypt function doesn't know the file name
+ if vault_password is None:
+ raise errors.AnsibleError("A vault password must be specified to decrypt %s" % path)
+ data = vault.decrypt(data)
+ show_content = False
+
+ try:
+ return parse_yaml(data, path_hint=path)
+ except yaml.YAMLError, exc:
+ process_yaml_error(exc, data, path, show_content)
+
+def parse_kv(args):
+ ''' convert a string of key/value items to a dict '''
+ options = {}
+ if args is not None:
+ try:
+ vargs = split_args(args)
+ except ValueError, ve:
+ if 'no closing quotation' in str(ve).lower():
+ raise errors.AnsibleError("error parsing argument string, try quoting the entire line.")
+ else:
+ raise
+ for x in vargs:
+ if "=" in x:
+ k, v = x.split("=",1)
+ options[k.strip()] = unquote(v.strip())
+ return options
+
+def _validate_both_dicts(a, b):
+
+ if not (isinstance(a, dict) and isinstance(b, dict)):
+ raise errors.AnsibleError(
+ "failed to combine variables, expected dicts but got a '%s' and a '%s'" % (type(a).__name__, type(b).__name__)
+ )
+
+def merge_hash(a, b):
+ ''' recursively merges hash b into a
+ keys from b take precedence over keys from a '''
+
+ result = {}
+
+ # we check here as well as in combine_vars() since this
+ # function can work recursively with nested dicts
+ _validate_both_dicts(a, b)
+
+ for dicts in a, b:
+ # next, iterate over b keys and values
+ for k, v in dicts.iteritems():
+ # if there's already such key in a
+ # and that key contains dict
+ if k in result and isinstance(result[k], dict):
+ # merge those dicts recursively
+ result[k] = merge_hash(a[k], v)
+ else:
+ # otherwise, just copy a value from b to a
+ result[k] = v
+
+ return result
+
+def default(value, function):
+ ''' syntactic sugar around lazy evaluation of defaults '''
+ if value is None:
+ return function()
+ return value
+
+
+def _git_repo_info(repo_path):
+ ''' returns a string containing git branch, commit id and commit date '''
+ result = None
+ if os.path.exists(repo_path):
+ # Check if the .git is a file. If it is a file, it means that we are in a submodule structure.
+ if os.path.isfile(repo_path):
+ try:
+ gitdir = yaml.safe_load(open(repo_path)).get('gitdir')
+ # There is a possibility the .git file to have an absolute path.
+ if os.path.isabs(gitdir):
+ repo_path = gitdir
+ else:
+ repo_path = os.path.join(repo_path[:-4], gitdir)
+ except (IOError, AttributeError):
+ return ''
+ f = open(os.path.join(repo_path, "HEAD"))
+ branch = f.readline().split('/')[-1].rstrip("\n")
+ f.close()
+ branch_path = os.path.join(repo_path, "refs", "heads", branch)
+ if os.path.exists(branch_path):
+ f = open(branch_path)
+ commit = f.readline()[:10]
+ f.close()
+ else:
+ # detached HEAD
+ commit = branch[:10]
+ branch = 'detached HEAD'
+ branch_path = os.path.join(repo_path, "HEAD")
+
+ date = time.localtime(os.stat(branch_path).st_mtime)
+ if time.daylight == 0:
+ offset = time.timezone
+ else:
+ offset = time.altzone
+ result = "({0} {1}) last updated {2} (GMT {3:+04d})".format(branch, commit,
+ time.strftime("%Y/%m/%d %H:%M:%S", date), offset / -36)
+ else:
+ result = ''
+ return result
+
+
+def _gitinfo():
+ basedir = os.path.join(os.path.dirname(__file__), '..', '..', '..')
+ repo_path = os.path.join(basedir, '.git')
+ result = _git_repo_info(repo_path)
+ submodules = os.path.join(basedir, '.gitmodules')
+ if not os.path.exists(submodules):
+ return result
+ f = open(submodules)
+ for line in f:
+ tokens = line.strip().split(' ')
+ if tokens[0] == 'path':
+ submodule_path = tokens[2]
+ submodule_info =_git_repo_info(os.path.join(basedir, submodule_path, '.git'))
+ if not submodule_info:
+ submodule_info = ' not found - use git submodule update --init ' + submodule_path
+ result += "\n {0}: {1}".format(submodule_path, submodule_info)
+ f.close()
+ return result
+
+
+def version(prog):
+ result = "{0} {1}".format(prog, __version__)
+ gitinfo = _gitinfo()
+ if gitinfo:
+ result = result + " {0}".format(gitinfo)
+ result = result + "\n configured module search path = %s" % C.DEFAULT_MODULE_PATH
+ return result
+
+def version_info(gitinfo=False):
+ if gitinfo:
+ # expensive call, user with care
+ ansible_version_string = version('')
+ else:
+ ansible_version_string = __version__
+ ansible_version = ansible_version_string.split()[0]
+ ansible_versions = ansible_version.split('.')
+ for counter in range(len(ansible_versions)):
+ if ansible_versions[counter] == "":
+ ansible_versions[counter] = 0
+ try:
+ ansible_versions[counter] = int(ansible_versions[counter])
+ except:
+ pass
+ if len(ansible_versions) < 3:
+ for counter in range(len(ansible_versions), 3):
+ ansible_versions.append(0)
+ return {'string': ansible_version_string.strip(),
+ 'full': ansible_version,
+ 'major': ansible_versions[0],
+ 'minor': ansible_versions[1],
+ 'revision': ansible_versions[2]}
+
+def getch():
+ ''' read in a single character '''
+ fd = sys.stdin.fileno()
+ old_settings = termios.tcgetattr(fd)
+ try:
+ tty.setraw(sys.stdin.fileno())
+ ch = sys.stdin.read(1)
+ finally:
+ termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
+ return ch
+
+def sanitize_output(arg_string):
+ ''' strips private info out of a string '''
+
+ private_keys = ('password', 'login_password')
+
+ output = []
+ for part in arg_string.split():
+ try:
+ (k, v) = part.split('=', 1)
+ except ValueError:
+ v = heuristic_log_sanitize(part)
+ output.append(v)
+ continue
+
+ if k in private_keys:
+ v = 'VALUE_HIDDEN'
+ else:
+ v = heuristic_log_sanitize(v)
+ output.append('%s=%s' % (k, v))
+
+ output = ' '.join(output)
+ return output
+
+
+####################################################################
+# option handling code for /usr/bin/ansible and ansible-playbook
+# below this line
+
+class SortedOptParser(optparse.OptionParser):
+ '''Optparser which sorts the options by opt before outputting --help'''
+
+ def format_help(self, formatter=None):
+ self.option_list.sort(key=operator.methodcaller('get_opt_string'))
+ return optparse.OptionParser.format_help(self, formatter=None)
+
+def increment_debug(option, opt, value, parser):
+ global VERBOSITY
+ VERBOSITY += 1
+
+def base_parser(constants=C, usage="", output_opts=False, runas_opts=False,
+ async_opts=False, connect_opts=False, subset_opts=False, check_opts=False, diff_opts=False):
+ ''' create an options parser for any ansible script '''
+
+ parser = SortedOptParser(usage, version=version("%prog"))
+ parser.add_option('-v','--verbose', default=False, action="callback",
+ callback=increment_debug, help="verbose mode (-vvv for more, -vvvv to enable connection debugging)")
+
+ parser.add_option('-f','--forks', dest='forks', default=constants.DEFAULT_FORKS, type='int',
+ help="specify number of parallel processes to use (default=%s)" % constants.DEFAULT_FORKS)
+ parser.add_option('-i', '--inventory-file', dest='inventory',
+ help="specify inventory host file (default=%s)" % constants.DEFAULT_HOST_LIST,
+ default=constants.DEFAULT_HOST_LIST)
+ parser.add_option('-e', '--extra-vars', dest="extra_vars", action="append",
+ help="set additional variables as key=value or YAML/JSON", default=[])
+ parser.add_option('-u', '--user', default=constants.DEFAULT_REMOTE_USER, dest='remote_user',
+ help='connect as this user (default=%s)' % constants.DEFAULT_REMOTE_USER)
+ parser.add_option('-k', '--ask-pass', default=False, dest='ask_pass', action='store_true',
+ help='ask for SSH password')
+ parser.add_option('--private-key', default=constants.DEFAULT_PRIVATE_KEY_FILE, dest='private_key_file',
+ help='use this file to authenticate the connection')
+ parser.add_option('--ask-vault-pass', default=False, dest='ask_vault_pass', action='store_true',
+ help='ask for vault password')
+ parser.add_option('--vault-password-file', default=constants.DEFAULT_VAULT_PASSWORD_FILE,
+ dest='vault_password_file', help="vault password file")
+ parser.add_option('--list-hosts', dest='listhosts', action='store_true',
+ help='outputs a list of matching hosts; does not execute anything else')
+ parser.add_option('-M', '--module-path', dest='module_path',
+ help="specify path(s) to module library (default=%s)" % constants.DEFAULT_MODULE_PATH,
+ default=None)
+
+ if subset_opts:
+ parser.add_option('-l', '--limit', default=constants.DEFAULT_SUBSET, dest='subset',
+ help='further limit selected hosts to an additional pattern')
+
+ parser.add_option('-T', '--timeout', default=constants.DEFAULT_TIMEOUT, type='int',
+ dest='timeout',
+ help="override the SSH timeout in seconds (default=%s)" % constants.DEFAULT_TIMEOUT)
+
+ if output_opts:
+ parser.add_option('-o', '--one-line', dest='one_line', action='store_true',
+ help='condense output')
+ parser.add_option('-t', '--tree', dest='tree', default=None,
+ help='log output to this directory')
+
+ if runas_opts:
+ # priv user defaults to root later on to enable detecting when this option was given here
+ parser.add_option('-K', '--ask-sudo-pass', default=False, dest='ask_sudo_pass', action='store_true',
+ help='ask for sudo password (deprecated, use become)')
+ parser.add_option('--ask-su-pass', default=False, dest='ask_su_pass', action='store_true',
+ help='ask for su password (deprecated, use become)')
+ parser.add_option("-s", "--sudo", default=constants.DEFAULT_SUDO, action="store_true", dest='sudo',
+ help="run operations with sudo (nopasswd) (deprecated, use become)")
+ parser.add_option('-U', '--sudo-user', dest='sudo_user', default=None,
+ help='desired sudo user (default=root) (deprecated, use become)')
+ parser.add_option('-S', '--su', default=constants.DEFAULT_SU, action='store_true',
+ help='run operations with su (deprecated, use become)')
+ parser.add_option('-R', '--su-user', default=None,
+ help='run operations with su as this user (default=%s) (deprecated, use become)' % constants.DEFAULT_SU_USER)
+
+ # consolidated privilege escalation (become)
+ parser.add_option("-b", "--become", default=constants.DEFAULT_BECOME, action="store_true", dest='become',
+ help="run operations with become (nopasswd implied)")
+ parser.add_option('--become-method', dest='become_method', default=constants.DEFAULT_BECOME_METHOD, type='string',
+ help="privilege escalation method to use (default=%s), valid choices: [ %s ]" % (constants.DEFAULT_BECOME_METHOD, ' | '.join(constants.BECOME_METHODS)))
+ parser.add_option('--become-user', default=None, dest='become_user', type='string',
+ help='run operations as this user (default=%s)' % constants.DEFAULT_BECOME_USER)
+ parser.add_option('--ask-become-pass', default=False, dest='become_ask_pass', action='store_true',
+ help='ask for privilege escalation password')
+
+
+ if connect_opts:
+ parser.add_option('-c', '--connection', dest='connection',
+ default=constants.DEFAULT_TRANSPORT,
+ help="connection type to use (default=%s)" % constants.DEFAULT_TRANSPORT)
+
+ if async_opts:
+ parser.add_option('-P', '--poll', default=constants.DEFAULT_POLL_INTERVAL, type='int',
+ dest='poll_interval',
+ help="set the poll interval if using -B (default=%s)" % constants.DEFAULT_POLL_INTERVAL)
+ parser.add_option('-B', '--background', dest='seconds', type='int', default=0,
+ help='run asynchronously, failing after X seconds (default=N/A)')
+
+ if check_opts:
+ parser.add_option("-C", "--check", default=False, dest='check', action='store_true',
+ help="don't make any changes; instead, try to predict some of the changes that may occur"
+ )
+
+ if diff_opts:
+ parser.add_option("-D", "--diff", default=False, dest='diff', action='store_true',
+ help="when changing (small) files and templates, show the differences in those files; works great with --check"
+ )
+
+ return parser
+
+def parse_extra_vars(extra_vars_opts, vault_pass):
+ extra_vars = {}
+ for extra_vars_opt in extra_vars_opts:
+ extra_vars_opt = to_unicode(extra_vars_opt)
+ if extra_vars_opt.startswith(u"@"):
+ # Argument is a YAML file (JSON is a subset of YAML)
+ extra_vars = combine_vars(extra_vars, parse_yaml_from_file(extra_vars_opt[1:], vault_password=vault_pass))
+ elif extra_vars_opt and extra_vars_opt[0] in u'[{':
+ # Arguments as YAML
+ extra_vars = combine_vars(extra_vars, parse_yaml(extra_vars_opt))
+ else:
+ # Arguments as Key-value
+ extra_vars = combine_vars(extra_vars, parse_kv(extra_vars_opt))
+ return extra_vars
+
+def ask_vault_passwords(ask_vault_pass=False, ask_new_vault_pass=False, confirm_vault=False, confirm_new=False):
+
+ vault_pass = None
+ new_vault_pass = None
+
+ if ask_vault_pass:
+ vault_pass = getpass.getpass(prompt="Vault password: ")
+
+ if ask_vault_pass and confirm_vault:
+ vault_pass2 = getpass.getpass(prompt="Confirm Vault password: ")
+ if vault_pass != vault_pass2:
+ raise errors.AnsibleError("Passwords do not match")
+
+ if ask_new_vault_pass:
+ new_vault_pass = getpass.getpass(prompt="New Vault password: ")
+
+ if ask_new_vault_pass and confirm_new:
+ new_vault_pass2 = getpass.getpass(prompt="Confirm New Vault password: ")
+ if new_vault_pass != new_vault_pass2:
+ raise errors.AnsibleError("Passwords do not match")
+
+ # enforce no newline chars at the end of passwords
+ if vault_pass:
+ vault_pass = to_bytes(vault_pass, errors='strict', nonstring='simplerepr').strip()
+ if new_vault_pass:
+ new_vault_pass = to_bytes(new_vault_pass, errors='strict', nonstring='simplerepr').strip()
+
+ return vault_pass, new_vault_pass
+
+def ask_passwords(ask_pass=False, become_ask_pass=False, ask_vault_pass=False, become_method=C.DEFAULT_BECOME_METHOD):
+ sshpass = None
+ becomepass = None
+ vaultpass = None
+ become_prompt = ''
+
+ if ask_pass:
+ sshpass = getpass.getpass(prompt="SSH password: ")
+ become_prompt = "%s password[defaults to SSH password]: " % become_method.upper()
+ if sshpass:
+ sshpass = to_bytes(sshpass, errors='strict', nonstring='simplerepr')
+ else:
+ become_prompt = "%s password: " % become_method.upper()
+
+ if become_ask_pass:
+ becomepass = getpass.getpass(prompt=become_prompt)
+ if ask_pass and becomepass == '':
+ becomepass = sshpass
+ if becomepass:
+ becomepass = to_bytes(becomepass)
+
+ if ask_vault_pass:
+ vaultpass = getpass.getpass(prompt="Vault password: ")
+ if vaultpass:
+ vaultpass = to_bytes(vaultpass, errors='strict', nonstring='simplerepr').strip()
+
+ return (sshpass, becomepass, vaultpass)
+
+
+def choose_pass_prompt(options):
+
+ if options.ask_su_pass:
+ return 'su'
+ elif options.ask_sudo_pass:
+ return 'sudo'
+
+ return options.become_method
+
+def normalize_become_options(options):
+
+ options.become_ask_pass = options.become_ask_pass or options.ask_sudo_pass or options.ask_su_pass or C.DEFAULT_BECOME_ASK_PASS
+ options.become_user = options.become_user or options.sudo_user or options.su_user or C.DEFAULT_BECOME_USER
+
+ if options.become:
+ pass
+ elif options.sudo:
+ options.become = True
+ options.become_method = 'sudo'
+ elif options.su:
+ options.become = True
+ options.become_method = 'su'
+
+
+def do_encrypt(result, encrypt, salt_size=None, salt=None):
+ if PASSLIB_AVAILABLE:
+ try:
+ crypt = getattr(passlib.hash, encrypt)
+ except:
+ raise errors.AnsibleError("passlib does not support '%s' algorithm" % encrypt)
+
+ if salt_size:
+ result = crypt.encrypt(result, salt_size=salt_size)
+ elif salt:
+ result = crypt.encrypt(result, salt=salt)
+ else:
+ result = crypt.encrypt(result)
+ else:
+ raise errors.AnsibleError("passlib must be installed to encrypt vars_prompt values")
+
+ return result
+
+def last_non_blank_line(buf):
+
+ all_lines = buf.splitlines()
+ all_lines.reverse()
+ for line in all_lines:
+ if (len(line) > 0):
+ return line
+ # shouldn't occur unless there's no output
+ return ""
+
+def filter_leading_non_json_lines(buf):
+ '''
+ used to avoid random output from SSH at the top of JSON output, like messages from
+ tcagetattr, or where dropbear spews MOTD on every single command (which is nuts).
+
+ need to filter anything which starts not with '{', '[', ', '=' or is an empty line.
+ filter only leading lines since multiline JSON is valid.
+ '''
+
+ filtered_lines = StringIO.StringIO()
+ stop_filtering = False
+ for line in buf.splitlines():
+ if stop_filtering or line.startswith('{') or line.startswith('['):
+ stop_filtering = True
+ filtered_lines.write(line + '\n')
+ return filtered_lines.getvalue()
+
+def boolean(value):
+ val = str(value)
+ if val.lower() in [ "true", "t", "y", "1", "yes" ]:
+ return True
+ else:
+ return False
+
+def make_become_cmd(cmd, user, shell, method, flags=None, exe=None):
+ """
+ helper function for connection plugins to create privilege escalation commands
+ """
+
+ randbits = ''.join(chr(random.randint(ord('a'), ord('z'))) for x in xrange(32))
+ success_key = 'BECOME-SUCCESS-%s' % randbits
+ prompt = None
+ becomecmd = None
+
+ shell = shell or '$SHELL'
+
+ if method == 'sudo':
+ # Rather than detect if sudo wants a password this time, -k makes sudo always ask for
+ # a password if one is required. Passing a quoted compound command to sudo (or sudo -s)
+ # directly doesn't work, so we shellquote it with pipes.quote() and pass the quoted
+ # string to the user's shell. We loop reading output until we see the randomly-generated
+ # sudo prompt set with the -p option.
+ prompt = '[sudo via ansible, key=%s] password: ' % randbits
+ exe = exe or C.DEFAULT_SUDO_EXE
+ becomecmd = '%s -k && %s %s -S -p "%s" -u %s %s -c %s' % \
+ (exe, exe, flags or C.DEFAULT_SUDO_FLAGS, prompt, user, shell, pipes.quote('echo %s; %s' % (success_key, cmd)))
+
+ elif method == 'su':
+ exe = exe or C.DEFAULT_SU_EXE
+ flags = flags or C.DEFAULT_SU_FLAGS
+ becomecmd = '%s %s %s -c "%s -c %s"' % (exe, flags, user, shell, pipes.quote('echo %s; %s' % (success_key, cmd)))
+
+ elif method == 'pbrun':
+ prompt = 'assword:'
+ exe = exe or 'pbrun'
+ flags = flags or ''
+ becomecmd = '%s -b -l %s -u %s "%s"' % (exe, flags, user, pipes.quote('echo %s; %s' % (success_key,cmd)))
+
+ elif method == 'pfexec':
+ exe = exe or 'pfexec'
+ flags = flags or ''
+ # No user as it uses it's own exec_attr to figure it out
+ becomecmd = '%s %s "%s"' % (exe, flags, pipes.quote('echo %s; %s' % (success_key,cmd)))
+
+ if becomecmd is None:
+ raise errors.AnsibleError("Privilege escalation method not found: %s" % method)
+
+ return (('%s -c ' % shell) + pipes.quote(becomecmd), prompt, success_key)
+
+
+def make_sudo_cmd(sudo_exe, sudo_user, executable, cmd):
+ """
+ helper function for connection plugins to create sudo commands
+ """
+ return make_become_cmd(cmd, sudo_user, executable, 'sudo', C.DEFAULT_SUDO_FLAGS, sudo_exe)
+
+
+def make_su_cmd(su_user, executable, cmd):
+ """
+ Helper function for connection plugins to create direct su commands
+ """
+ return make_become_cmd(cmd, su_user, executable, 'su', C.DEFAULT_SU_FLAGS, C.DEFAULT_SU_EXE)
+
+def get_diff(diff):
+ # called by --diff usage in playbook and runner via callbacks
+ # include names in diffs 'before' and 'after' and do diff -U 10
+
+ try:
+ with warnings.catch_warnings():
+ warnings.simplefilter('ignore')
+ ret = []
+ if 'dst_binary' in diff:
+ ret.append("diff skipped: destination file appears to be binary\n")
+ if 'src_binary' in diff:
+ ret.append("diff skipped: source file appears to be binary\n")
+ if 'dst_larger' in diff:
+ ret.append("diff skipped: destination file size is greater than %d\n" % diff['dst_larger'])
+ if 'src_larger' in diff:
+ ret.append("diff skipped: source file size is greater than %d\n" % diff['src_larger'])
+ if 'before' in diff and 'after' in diff:
+ if 'before_header' in diff:
+ before_header = "before: %s" % diff['before_header']
+ else:
+ before_header = 'before'
+ if 'after_header' in diff:
+ after_header = "after: %s" % diff['after_header']
+ else:
+ after_header = 'after'
+ differ = difflib.unified_diff(to_unicode(diff['before']).splitlines(True), to_unicode(diff['after']).splitlines(True), before_header, after_header, '', '', 10)
+ for line in list(differ):
+ ret.append(line)
+ return u"".join(ret)
+ except UnicodeDecodeError:
+ return ">> the files are different, but the diff library cannot compare unicode strings"
+
+def is_list_of_strings(items):
+ for x in items:
+ if not isinstance(x, basestring):
+ return False
+ return True
+
+def list_union(a, b):
+ result = []
+ for x in a:
+ if x not in result:
+ result.append(x)
+ for x in b:
+ if x not in result:
+ result.append(x)
+ return result
+
+def list_intersection(a, b):
+ result = []
+ for x in a:
+ if x in b and x not in result:
+ result.append(x)
+ return result
+
+def list_difference(a, b):
+ result = []
+ for x in a:
+ if x not in b and x not in result:
+ result.append(x)
+ for x in b:
+ if x not in a and x not in result:
+ result.append(x)
+ return result
+
+def contains_vars(data):
+ '''
+ returns True if the data contains a variable pattern
+ '''
+ return "$" in data or "{{" in data
+
+def safe_eval(expr, locals={}, include_exceptions=False):
+ '''
+ This is intended for allowing things like:
+ with_items: a_list_variable
+
+ Where Jinja2 would return a string but we do not want to allow it to
+ call functions (outside of Jinja2, where the env is constrained). If
+ the input data to this function came from an untrusted (remote) source,
+ it should first be run through _clean_data_struct() to ensure the data
+ is further sanitized prior to evaluation.
+
+ Based on:
+ http://stackoverflow.com/questions/12523516/using-ast-and-whitelists-to-make-pythons-eval-safe
+ '''
+
+ # this is the whitelist of AST nodes we are going to
+ # allow in the evaluation. Any node type other than
+ # those listed here will raise an exception in our custom
+ # visitor class defined below.
+ SAFE_NODES = set(
+ (
+ ast.Add,
+ ast.BinOp,
+ ast.Call,
+ ast.Compare,
+ ast.Dict,
+ ast.Div,
+ ast.Expression,
+ ast.List,
+ ast.Load,
+ ast.Mult,
+ ast.Num,
+ ast.Name,
+ ast.Str,
+ ast.Sub,
+ ast.Tuple,
+ ast.UnaryOp,
+ )
+ )
+
+ # AST node types were expanded after 2.6
+ if not sys.version.startswith('2.6'):
+ SAFE_NODES.union(
+ set(
+ (ast.Set,)
+ )
+ )
+
+ filter_list = []
+ for filter in filter_loader.all():
+ filter_list.extend(filter.filters().keys())
+
+ CALL_WHITELIST = C.DEFAULT_CALLABLE_WHITELIST + filter_list
+
+ class CleansingNodeVisitor(ast.NodeVisitor):
+ def generic_visit(self, node, inside_call=False):
+ if type(node) not in SAFE_NODES:
+ raise Exception("invalid expression (%s)" % expr)
+ elif isinstance(node, ast.Call):
+ inside_call = True
+ elif isinstance(node, ast.Name) and inside_call:
+ if hasattr(builtin, node.id) and node.id not in CALL_WHITELIST:
+ raise Exception("invalid function: %s" % node.id)
+ # iterate over all child nodes
+ for child_node in ast.iter_child_nodes(node):
+ self.generic_visit(child_node, inside_call)
+
+ if not isinstance(expr, basestring):
+ # already templated to a datastructure, perhaps?
+ if include_exceptions:
+ return (expr, None)
+ return expr
+
+ cnv = CleansingNodeVisitor()
+ try:
+ parsed_tree = ast.parse(expr, mode='eval')
+ cnv.visit(parsed_tree)
+ compiled = compile(parsed_tree, expr, 'eval')
+ result = eval(compiled, {}, locals)
+
+ if include_exceptions:
+ return (result, None)
+ else:
+ return result
+ except SyntaxError, e:
+ # special handling for syntax errors, we just return
+ # the expression string back as-is
+ if include_exceptions:
+ return (expr, None)
+ return expr
+ except Exception, e:
+ if include_exceptions:
+ return (expr, e)
+ return expr
+
+
+def listify_lookup_plugin_terms(terms, basedir, inject):
+
+ from ansible.utils import template
+
+ if isinstance(terms, basestring):
+ # someone did:
+ # with_items: alist
+ # OR
+ # with_items: {{ alist }}
+
+ stripped = terms.strip()
+ if not (stripped.startswith('{') or stripped.startswith('[')) and \
+ not stripped.startswith("/") and \
+ not stripped.startswith('set([') and \
+ not LOOKUP_REGEX.search(terms):
+ # if not already a list, get ready to evaluate with Jinja2
+ # not sure why the "/" is in above code :)
+ try:
+ new_terms = template.template(basedir, "{{ %s }}" % terms, inject)
+ if isinstance(new_terms, basestring) and "{{" in new_terms:
+ pass
+ else:
+ terms = new_terms
+ except:
+ pass
+
+ if '{' in terms or '[' in terms:
+ # Jinja2 already evaluated a variable to a list.
+ # Jinja2-ified list needs to be converted back to a real type
+ # TODO: something a bit less heavy than eval
+ return safe_eval(terms)
+
+ if isinstance(terms, basestring):
+ terms = [ terms ]
+
+ return terms
+
+def combine_vars(a, b):
+
+ _validate_both_dicts(a, b)
+
+ if C.DEFAULT_HASH_BEHAVIOUR == "merge":
+ return merge_hash(a, b)
+ else:
+ return dict(a.items() + b.items())
+
+def random_password(length=20, chars=C.DEFAULT_PASSWORD_CHARS):
+ '''Return a random password string of length containing only chars.'''
+
+ password = []
+ while len(password) < length:
+ new_char = os.urandom(1)
+ if new_char in chars:
+ password.append(new_char)
+
+ return ''.join(password)
+
+def before_comment(msg):
+ ''' what's the part of a string before a comment? '''
+ msg = msg.replace("\#","**NOT_A_COMMENT**")
+ msg = msg.split("#")[0]
+ msg = msg.replace("**NOT_A_COMMENT**","#")
+ return msg
+
+def load_vars(basepath, results, vault_password=None):
+ """
+ Load variables from any potential yaml filename combinations of basepath,
+ returning result.
+ """
+
+ paths_to_check = [ "".join([basepath, ext])
+ for ext in C.YAML_FILENAME_EXTENSIONS ]
+
+ found_paths = []
+
+ for path in paths_to_check:
+ found, results = _load_vars_from_path(path, results, vault_password=vault_password)
+ if found:
+ found_paths.append(path)
+
+
+ # disallow the potentially confusing situation that there are multiple
+ # variable files for the same name. For example if both group_vars/all.yml
+ # and group_vars/all.yaml
+ if len(found_paths) > 1:
+ raise errors.AnsibleError("Multiple variable files found. "
+ "There should only be one. %s" % ( found_paths, ))
+
+ return results
+
+## load variables from yaml files/dirs
+# e.g. host/group_vars
+#
+def _load_vars_from_path(path, results, vault_password=None):
+ """
+ Robustly access the file at path and load variables, carefully reporting
+ errors in a friendly/informative way.
+
+ Return the tuple (found, new_results, )
+ """
+
+ try:
+ # in the case of a symbolic link, we want the stat of the link itself,
+ # not its target
+ pathstat = os.lstat(path)
+ except os.error, err:
+ # most common case is that nothing exists at that path.
+ if err.errno == errno.ENOENT:
+ return False, results
+ # otherwise this is a condition we should report to the user
+ raise errors.AnsibleError(
+ "%s is not accessible: %s."
+ " Please check its permissions." % ( path, err.strerror))
+
+ # symbolic link
+ if stat.S_ISLNK(pathstat.st_mode):
+ try:
+ target = os.path.realpath(path)
+ except os.error, err2:
+ raise errors.AnsibleError("The symbolic link at %s "
+ "is not readable: %s. Please check its permissions."
+ % (path, err2.strerror, ))
+ # follow symbolic link chains by recursing, so we repeat the same
+ # permissions checks above and provide useful errors.
+ return _load_vars_from_path(target, results, vault_password)
+
+ # directory
+ if stat.S_ISDIR(pathstat.st_mode):
+
+ # support organizing variables across multiple files in a directory
+ return True, _load_vars_from_folder(path, results, vault_password=vault_password)
+
+ # regular file
+ elif stat.S_ISREG(pathstat.st_mode):
+ data = parse_yaml_from_file(path, vault_password=vault_password)
+ if data and type(data) != dict:
+ raise errors.AnsibleError(
+ "%s must be stored as a dictionary/hash" % path)
+ elif data is None:
+ data = {}
+
+ # combine vars overrides by default but can be configured to do a
+ # hash merge in settings
+ results = combine_vars(results, data)
+ return True, results
+
+ # something else? could be a fifo, socket, device, etc.
+ else:
+ raise errors.AnsibleError("Expected a variable file or directory "
+ "but found a non-file object at path %s" % (path, ))
+
+def _load_vars_from_folder(folder_path, results, vault_password=None):
+ """
+ Load all variables within a folder recursively.
+ """
+
+ # this function and _load_vars_from_path are mutually recursive
+
+ try:
+ names = os.listdir(folder_path)
+ except os.error, err:
+ raise errors.AnsibleError(
+ "This folder cannot be listed: %s: %s."
+ % ( folder_path, err.strerror))
+
+ # evaluate files in a stable order rather than whatever order the
+ # filesystem lists them.
+ names.sort()
+
+ # do not parse hidden files or dirs, e.g. .svn/
+ paths = [os.path.join(folder_path, name) for name in names if not name.startswith('.')]
+ for path in paths:
+ _found, results = _load_vars_from_path(path, results, vault_password=vault_password)
+ return results
+
+def update_hash(hash, key, new_value):
+ ''' used to avoid nested .update calls on the parent '''
+
+ value = hash.get(key, {})
+ value.update(new_value)
+ hash[key] = value
+
+def censor_unlogged_data(data):
+ '''
+ used when the no_log: True attribute is passed to a task to keep data from a callback.
+ NOT intended to prevent variable registration, but only things from showing up on
+ screen
+ '''
+ new_data = {}
+ for (x,y) in data.iteritems():
+ if x in [ 'skipped', 'changed', 'failed', 'rc' ]:
+ new_data[x] = y
+ new_data['censored'] = 'results hidden due to no_log parameter'
+ return new_data
+
+def check_mutually_exclusive_privilege(options, parser):
+
+ # privilege escalation command line arguments need to be mutually exclusive
+ if (options.su or options.su_user or options.ask_su_pass) and \
+ (options.sudo or options.sudo_user or options.ask_sudo_pass) or \
+ (options.su or options.su_user or options.ask_su_pass) and \
+ (options.become or options.become_user or options.become_ask_pass) or \
+ (options.sudo or options.sudo_user or options.ask_sudo_pass) and \
+ (options.become or options.become_user or options.become_ask_pass):
+
+ parser.error("Sudo arguments ('--sudo', '--sudo-user', and '--ask-sudo-pass') "
+ "and su arguments ('-su', '--su-user', and '--ask-su-pass') "
+ "and become arguments ('--become', '--become-user', and '--ask-become-pass')"
+ " are exclusive of each other")
+
+
diff --git a/lib/ansible/utils/cmd_functions.py b/v1/ansible/utils/cmd_functions.py
similarity index 100%
rename from lib/ansible/utils/cmd_functions.py
rename to v1/ansible/utils/cmd_functions.py
diff --git a/lib/ansible/utils/display_functions.py b/v1/ansible/utils/display_functions.py
similarity index 100%
rename from lib/ansible/utils/display_functions.py
rename to v1/ansible/utils/display_functions.py
diff --git a/v2/ansible/utils/hashing.py b/v1/ansible/utils/hashing.py
similarity index 92%
rename from v2/ansible/utils/hashing.py
rename to v1/ansible/utils/hashing.py
index 5e378db79f..a7d142e5bd 100644
--- a/v2/ansible/utils/hashing.py
+++ b/v1/ansible/utils/hashing.py
@@ -20,7 +20,6 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
-from ansible.errors import AnsibleError
# Note, sha1 is the only hash algorithm compatible with python2.4 and with
# FIPS-140 mode (as of 11-2014)
@@ -44,8 +43,6 @@ def secure_hash_s(data, hash_func=sha1):
digest = hash_func()
try:
- if not isinstance(data, basestring):
- data = "%s" % data
digest.update(data)
except UnicodeEncodeError:
digest.update(data.encode('utf-8'))
@@ -65,8 +62,8 @@ def secure_hash(filename, hash_func=sha1):
digest.update(block)
block = infile.read(blocksize)
infile.close()
- except IOError as e:
- raise AnsibleError("error while accessing the file %s, error was: %s" % (filename, e))
+ except IOError, e:
+ raise errors.AnsibleError("error while accessing the file %s, error was: %s" % (filename, e))
return digest.hexdigest()
# The checksum algorithm must match with the algorithm in ShellModule.checksum() method
diff --git a/v2/ansible/utils/module_docs.py b/v1/ansible/utils/module_docs.py
similarity index 96%
rename from v2/ansible/utils/module_docs.py
rename to v1/ansible/utils/module_docs.py
index 632b4a00c2..ee99af2cb5 100644
--- a/v2/ansible/utils/module_docs.py
+++ b/v1/ansible/utils/module_docs.py
@@ -23,7 +23,7 @@ import ast
import yaml
import traceback
-from ansible.plugins import fragment_loader
+from ansible import utils
# modules that are ok that they do not have documentation strings
BLACKLIST_MODULES = [
@@ -66,7 +66,7 @@ def get_docstring(filename, verbose=False):
if fragment_slug != 'doesnotexist':
- fragment_class = fragment_loader.get(fragment_name)
+ fragment_class = utils.plugins.fragment_loader.get(fragment_name)
assert fragment_class is not None
fragment_yaml = getattr(fragment_class, fragment_var, '{}')
diff --git a/v1/ansible/utils/module_docs_fragments/__init__.py b/v1/ansible/utils/module_docs_fragments/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/lib/ansible/utils/module_docs_fragments/aws.py b/v1/ansible/utils/module_docs_fragments/aws.py
similarity index 100%
rename from lib/ansible/utils/module_docs_fragments/aws.py
rename to v1/ansible/utils/module_docs_fragments/aws.py
diff --git a/lib/ansible/utils/module_docs_fragments/cloudstack.py b/v1/ansible/utils/module_docs_fragments/cloudstack.py
similarity index 100%
rename from lib/ansible/utils/module_docs_fragments/cloudstack.py
rename to v1/ansible/utils/module_docs_fragments/cloudstack.py
diff --git a/lib/ansible/utils/module_docs_fragments/files.py b/v1/ansible/utils/module_docs_fragments/files.py
similarity index 100%
rename from lib/ansible/utils/module_docs_fragments/files.py
rename to v1/ansible/utils/module_docs_fragments/files.py
diff --git a/lib/ansible/utils/module_docs_fragments/openstack.py b/v1/ansible/utils/module_docs_fragments/openstack.py
similarity index 100%
rename from lib/ansible/utils/module_docs_fragments/openstack.py
rename to v1/ansible/utils/module_docs_fragments/openstack.py
diff --git a/lib/ansible/utils/module_docs_fragments/rackspace.py b/v1/ansible/utils/module_docs_fragments/rackspace.py
similarity index 100%
rename from lib/ansible/utils/module_docs_fragments/rackspace.py
rename to v1/ansible/utils/module_docs_fragments/rackspace.py
diff --git a/lib/ansible/utils/plugins.py b/v1/ansible/utils/plugins.py
similarity index 100%
rename from lib/ansible/utils/plugins.py
rename to v1/ansible/utils/plugins.py
diff --git a/lib/ansible/utils/string_functions.py b/v1/ansible/utils/string_functions.py
similarity index 100%
rename from lib/ansible/utils/string_functions.py
rename to v1/ansible/utils/string_functions.py
diff --git a/lib/ansible/utils/su_prompts.py b/v1/ansible/utils/su_prompts.py
similarity index 100%
rename from lib/ansible/utils/su_prompts.py
rename to v1/ansible/utils/su_prompts.py
diff --git a/lib/ansible/utils/template.py b/v1/ansible/utils/template.py
similarity index 100%
rename from lib/ansible/utils/template.py
rename to v1/ansible/utils/template.py
diff --git a/v2/ansible/utils/unicode.py b/v1/ansible/utils/unicode.py
similarity index 93%
rename from v2/ansible/utils/unicode.py
rename to v1/ansible/utils/unicode.py
index 2cff2e5e45..7bd035c007 100644
--- a/v2/ansible/utils/unicode.py
+++ b/v1/ansible/utils/unicode.py
@@ -19,8 +19,6 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-from six import string_types, text_type, binary_type, PY3
-
# to_bytes and to_unicode were written by Toshio Kuratomi for the
# python-kitchen library https://pypi.python.org/pypi/kitchen
# They are licensed in kitchen under the terms of the GPLv2+
@@ -37,9 +35,6 @@ _LATIN1_ALIASES = frozenset(('latin-1', 'LATIN-1', 'latin1', 'LATIN1',
# EXCEPTION_CONVERTERS is defined below due to using to_unicode
-if PY3:
- basestring = (str, bytes)
-
def to_unicode(obj, encoding='utf-8', errors='replace', nonstring=None):
'''Convert an object into a :class:`unicode` string
@@ -94,12 +89,12 @@ def to_unicode(obj, encoding='utf-8', errors='replace', nonstring=None):
# Could use isbasestring/isunicode here but we want this code to be as
# fast as possible
if isinstance(obj, basestring):
- if isinstance(obj, text_type):
+ if isinstance(obj, unicode):
return obj
if encoding in _UTF8_ALIASES:
- return text_type(obj, 'utf-8', errors)
+ return unicode(obj, 'utf-8', errors)
if encoding in _LATIN1_ALIASES:
- return text_type(obj, 'latin-1', errors)
+ return unicode(obj, 'latin-1', errors)
return obj.decode(encoding, errors)
if not nonstring:
@@ -115,19 +110,19 @@ def to_unicode(obj, encoding='utf-8', errors='replace', nonstring=None):
simple = None
if not simple:
try:
- simple = text_type(obj)
+ simple = str(obj)
except UnicodeError:
try:
simple = obj.__str__()
except (UnicodeError, AttributeError):
simple = u''
- if isinstance(simple, binary_type):
- return text_type(simple, encoding, errors)
+ if isinstance(simple, str):
+ return unicode(simple, encoding, errors)
return simple
elif nonstring in ('repr', 'strict'):
obj_repr = repr(obj)
- if isinstance(obj_repr, binary_type):
- obj_repr = text_type(obj_repr, encoding, errors)
+ if isinstance(obj_repr, str):
+ obj_repr = unicode(obj_repr, encoding, errors)
if nonstring == 'repr':
return obj_repr
raise TypeError('to_unicode was given "%(obj)s" which is neither'
@@ -203,19 +198,19 @@ def to_bytes(obj, encoding='utf-8', errors='replace', nonstring=None):
# Could use isbasestring, isbytestring here but we want this to be as fast
# as possible
if isinstance(obj, basestring):
- if isinstance(obj, binary_type):
+ if isinstance(obj, str):
return obj
return obj.encode(encoding, errors)
if not nonstring:
nonstring = 'simplerepr'
if nonstring == 'empty':
- return b''
+ return ''
elif nonstring == 'passthru':
return obj
elif nonstring == 'simplerepr':
try:
- simple = binary_type(obj)
+ simple = str(obj)
except UnicodeError:
try:
simple = obj.__str__()
@@ -225,19 +220,19 @@ def to_bytes(obj, encoding='utf-8', errors='replace', nonstring=None):
try:
simple = obj.__unicode__()
except (AttributeError, UnicodeError):
- simple = b''
- if isinstance(simple, text_type):
+ simple = ''
+ if isinstance(simple, unicode):
simple = simple.encode(encoding, 'replace')
return simple
elif nonstring in ('repr', 'strict'):
try:
obj_repr = obj.__repr__()
except (AttributeError, UnicodeError):
- obj_repr = b''
- if isinstance(obj_repr, text_type):
+ obj_repr = ''
+ if isinstance(obj_repr, unicode):
obj_repr = obj_repr.encode(encoding, errors)
else:
- obj_repr = binary_type(obj_repr)
+ obj_repr = str(obj_repr)
if nonstring == 'repr':
return obj_repr
raise TypeError('to_bytes was given "%(obj)s" which is neither'
diff --git a/v1/ansible/utils/vault.py b/v1/ansible/utils/vault.py
new file mode 100644
index 0000000000..842688a2c1
--- /dev/null
+++ b/v1/ansible/utils/vault.py
@@ -0,0 +1,585 @@
+# (c) 2014, James Tanner
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see .
+#
+# ansible-pull is a script that runs ansible in local mode
+# after checking out a playbooks directory from source repo. There is an
+# example playbook to bootstrap this script in the examples/ dir which
+# installs ansible and sets it up to run on cron.
+
+import os
+import shlex
+import shutil
+import tempfile
+from io import BytesIO
+from subprocess import call
+from ansible import errors
+from hashlib import sha256
+
+# Note: Only used for loading obsolete VaultAES files. All files are written
+# using the newer VaultAES256 which does not require md5
+try:
+ from hashlib import md5
+except ImportError:
+ try:
+ from md5 import md5
+ except ImportError:
+ # MD5 unavailable. Possibly FIPS mode
+ md5 = None
+
+from binascii import hexlify
+from binascii import unhexlify
+from ansible import constants as C
+
+try:
+ from Crypto.Hash import SHA256, HMAC
+ HAS_HASH = True
+except ImportError:
+ HAS_HASH = False
+
+# Counter import fails for 2.0.1, requires >= 2.6.1 from pip
+try:
+ from Crypto.Util import Counter
+ HAS_COUNTER = True
+except ImportError:
+ HAS_COUNTER = False
+
+# KDF import fails for 2.0.1, requires >= 2.6.1 from pip
+try:
+ from Crypto.Protocol.KDF import PBKDF2
+ HAS_PBKDF2 = True
+except ImportError:
+ HAS_PBKDF2 = False
+
+# AES IMPORTS
+try:
+ from Crypto.Cipher import AES as AES
+ HAS_AES = True
+except ImportError:
+ HAS_AES = False
+
+CRYPTO_UPGRADE = "ansible-vault requires a newer version of pycrypto than the one installed on your platform. You may fix this with OS-specific commands such as: yum install python-devel; rpm -e --nodeps python-crypto; pip install pycrypto"
+
+HEADER='$ANSIBLE_VAULT'
+CIPHER_WHITELIST=['AES', 'AES256']
+
+class VaultLib(object):
+
+ def __init__(self, password):
+ self.password = password
+ self.cipher_name = None
+ self.version = '1.1'
+
+ def is_encrypted(self, data):
+ if data.startswith(HEADER):
+ return True
+ else:
+ return False
+
+ def encrypt(self, data):
+
+ if self.is_encrypted(data):
+ raise errors.AnsibleError("data is already encrypted")
+
+ if not self.cipher_name:
+ self.cipher_name = "AES256"
+ #raise errors.AnsibleError("the cipher must be set before encrypting data")
+
+ if 'Vault' + self.cipher_name in globals() and self.cipher_name in CIPHER_WHITELIST:
+ cipher = globals()['Vault' + self.cipher_name]
+ this_cipher = cipher()
+ else:
+ raise errors.AnsibleError("%s cipher could not be found" % self.cipher_name)
+
+ """
+ # combine sha + data
+ this_sha = sha256(data).hexdigest()
+ tmp_data = this_sha + "\n" + data
+ """
+
+ # encrypt sha + data
+ enc_data = this_cipher.encrypt(data, self.password)
+
+ # add header
+ tmp_data = self._add_header(enc_data)
+ return tmp_data
+
+ def decrypt(self, data):
+ if self.password is None:
+ raise errors.AnsibleError("A vault password must be specified to decrypt data")
+
+ if not self.is_encrypted(data):
+ raise errors.AnsibleError("data is not encrypted")
+
+ # clean out header
+ data = self._split_header(data)
+
+ # create the cipher object
+ if 'Vault' + self.cipher_name in globals() and self.cipher_name in CIPHER_WHITELIST:
+ cipher = globals()['Vault' + self.cipher_name]
+ this_cipher = cipher()
+ else:
+ raise errors.AnsibleError("%s cipher could not be found" % self.cipher_name)
+
+ # try to unencrypt data
+ data = this_cipher.decrypt(data, self.password)
+ if data is None:
+ raise errors.AnsibleError("Decryption failed")
+
+ return data
+
+ def _add_header(self, data):
+ # combine header and encrypted data in 80 char columns
+
+ #tmpdata = hexlify(data)
+ tmpdata = [data[i:i+80] for i in range(0, len(data), 80)]
+
+ if not self.cipher_name:
+ raise errors.AnsibleError("the cipher must be set before adding a header")
+
+ dirty_data = HEADER + ";" + str(self.version) + ";" + self.cipher_name + "\n"
+
+ for l in tmpdata:
+ dirty_data += l + '\n'
+
+ return dirty_data
+
+
+ def _split_header(self, data):
+ # used by decrypt
+
+ tmpdata = data.split('\n')
+ tmpheader = tmpdata[0].strip().split(';')
+
+ self.version = str(tmpheader[1].strip())
+ self.cipher_name = str(tmpheader[2].strip())
+ clean_data = '\n'.join(tmpdata[1:])
+
+ """
+ # strip out newline, join, unhex
+ clean_data = [ x.strip() for x in clean_data ]
+ clean_data = unhexlify(''.join(clean_data))
+ """
+
+ return clean_data
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *err):
+ pass
+
+class VaultEditor(object):
+ # uses helper methods for write_file(self, filename, data)
+ # to write a file so that code isn't duplicated for simple
+ # file I/O, ditto read_file(self, filename) and launch_editor(self, filename)
+ # ... "Don't Repeat Yourself", etc.
+
+ def __init__(self, cipher_name, password, filename):
+ # instantiates a member variable for VaultLib
+ self.cipher_name = cipher_name
+ self.password = password
+ self.filename = filename
+
+ def _edit_file_helper(self, existing_data=None, cipher=None):
+ # make sure the umask is set to a sane value
+ old_umask = os.umask(0o077)
+
+ # Create a tempfile
+ _, tmp_path = tempfile.mkstemp()
+
+ if existing_data:
+ self.write_data(existing_data, tmp_path)
+
+ # drop the user into an editor on the tmp file
+ try:
+ call(self._editor_shell_command(tmp_path))
+ except OSError, e:
+ raise Exception("Failed to open editor (%s): %s" % (self._editor_shell_command(tmp_path)[0],str(e)))
+ tmpdata = self.read_data(tmp_path)
+
+ # create new vault
+ this_vault = VaultLib(self.password)
+ if cipher:
+ this_vault.cipher_name = cipher
+
+ # encrypt new data and write out to tmp
+ enc_data = this_vault.encrypt(tmpdata)
+ self.write_data(enc_data, tmp_path)
+
+ # shuffle tmp file into place
+ self.shuffle_files(tmp_path, self.filename)
+
+ # and restore umask
+ os.umask(old_umask)
+
+ def create_file(self):
+ """ create a new encrypted file """
+
+ if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or not HAS_HASH:
+ raise errors.AnsibleError(CRYPTO_UPGRADE)
+
+ if os.path.isfile(self.filename):
+ raise errors.AnsibleError("%s exists, please use 'edit' instead" % self.filename)
+
+ # Let the user specify contents and save file
+ self._edit_file_helper(cipher=self.cipher_name)
+
+ def decrypt_file(self):
+
+ if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or not HAS_HASH:
+ raise errors.AnsibleError(CRYPTO_UPGRADE)
+
+ if not os.path.isfile(self.filename):
+ raise errors.AnsibleError("%s does not exist" % self.filename)
+
+ tmpdata = self.read_data(self.filename)
+ this_vault = VaultLib(self.password)
+ if this_vault.is_encrypted(tmpdata):
+ dec_data = this_vault.decrypt(tmpdata)
+ if dec_data is None:
+ raise errors.AnsibleError("Decryption failed")
+ else:
+ self.write_data(dec_data, self.filename)
+ else:
+ raise errors.AnsibleError("%s is not encrypted" % self.filename)
+
+ def edit_file(self):
+
+ if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or not HAS_HASH:
+ raise errors.AnsibleError(CRYPTO_UPGRADE)
+
+ # decrypt to tmpfile
+ tmpdata = self.read_data(self.filename)
+ this_vault = VaultLib(self.password)
+ dec_data = this_vault.decrypt(tmpdata)
+
+ # let the user edit the data and save
+ self._edit_file_helper(existing_data=dec_data)
+ ###we want the cipher to default to AES256 (get rid of files
+ # encrypted with the AES cipher)
+ #self._edit_file_helper(existing_data=dec_data, cipher=this_vault.cipher_name)
+
+
+ def view_file(self):
+
+ if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or not HAS_HASH:
+ raise errors.AnsibleError(CRYPTO_UPGRADE)
+
+ # decrypt to tmpfile
+ tmpdata = self.read_data(self.filename)
+ this_vault = VaultLib(self.password)
+ dec_data = this_vault.decrypt(tmpdata)
+ old_umask = os.umask(0o077)
+ _, tmp_path = tempfile.mkstemp()
+ self.write_data(dec_data, tmp_path)
+ os.umask(old_umask)
+
+ # drop the user into pager on the tmp file
+ call(self._pager_shell_command(tmp_path))
+ os.remove(tmp_path)
+
+ def encrypt_file(self):
+
+ if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or not HAS_HASH:
+ raise errors.AnsibleError(CRYPTO_UPGRADE)
+
+ if not os.path.isfile(self.filename):
+ raise errors.AnsibleError("%s does not exist" % self.filename)
+
+ tmpdata = self.read_data(self.filename)
+ this_vault = VaultLib(self.password)
+ this_vault.cipher_name = self.cipher_name
+ if not this_vault.is_encrypted(tmpdata):
+ enc_data = this_vault.encrypt(tmpdata)
+ self.write_data(enc_data, self.filename)
+ else:
+ raise errors.AnsibleError("%s is already encrypted" % self.filename)
+
+ def rekey_file(self, new_password):
+
+ if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or not HAS_HASH:
+ raise errors.AnsibleError(CRYPTO_UPGRADE)
+
+ # decrypt
+ tmpdata = self.read_data(self.filename)
+ this_vault = VaultLib(self.password)
+ dec_data = this_vault.decrypt(tmpdata)
+
+ # create new vault
+ new_vault = VaultLib(new_password)
+
+ # we want to force cipher to the default
+ #new_vault.cipher_name = this_vault.cipher_name
+
+ # re-encrypt data and re-write file
+ enc_data = new_vault.encrypt(dec_data)
+ self.write_data(enc_data, self.filename)
+
+ def read_data(self, filename):
+ f = open(filename, "rb")
+ tmpdata = f.read()
+ f.close()
+ return tmpdata
+
+ def write_data(self, data, filename):
+ if os.path.isfile(filename):
+ os.remove(filename)
+ f = open(filename, "wb")
+ f.write(data)
+ f.close()
+
+ def shuffle_files(self, src, dest):
+ # overwrite dest with src
+ if os.path.isfile(dest):
+ os.remove(dest)
+ shutil.move(src, dest)
+
+ def _editor_shell_command(self, filename):
+ EDITOR = os.environ.get('EDITOR','vim')
+ editor = shlex.split(EDITOR)
+ editor.append(filename)
+
+ return editor
+
+ def _pager_shell_command(self, filename):
+ PAGER = os.environ.get('PAGER','less')
+ pager = shlex.split(PAGER)
+ pager.append(filename)
+
+ return pager
+
+########################################
+# CIPHERS #
+########################################
+
+class VaultAES(object):
+
+ # this version has been obsoleted by the VaultAES256 class
+ # which uses encrypt-then-mac (fixing order) and also improving the KDF used
+ # code remains for upgrade purposes only
+ # http://stackoverflow.com/a/16761459
+
+ def __init__(self):
+ if not md5:
+ raise errors.AnsibleError('md5 hash is unavailable (Could be due to FIPS mode). Legacy VaultAES format is unavailable.')
+ if not HAS_AES:
+ raise errors.AnsibleError(CRYPTO_UPGRADE)
+
+ def aes_derive_key_and_iv(self, password, salt, key_length, iv_length):
+
+ """ Create a key and an initialization vector """
+
+ d = d_i = ''
+ while len(d) < key_length + iv_length:
+ d_i = md5(d_i + password + salt).digest()
+ d += d_i
+
+ key = d[:key_length]
+ iv = d[key_length:key_length+iv_length]
+
+ return key, iv
+
+ def encrypt(self, data, password, key_length=32):
+
+ """ Read plaintext data from in_file and write encrypted to out_file """
+
+
+ # combine sha + data
+ this_sha = sha256(data).hexdigest()
+ tmp_data = this_sha + "\n" + data
+
+ in_file = BytesIO(tmp_data)
+ in_file.seek(0)
+ out_file = BytesIO()
+
+ bs = AES.block_size
+
+ # Get a block of random data. EL does not have Crypto.Random.new()
+ # so os.urandom is used for cross platform purposes
+ salt = os.urandom(bs - len('Salted__'))
+
+ key, iv = self.aes_derive_key_and_iv(password, salt, key_length, bs)
+ cipher = AES.new(key, AES.MODE_CBC, iv)
+ out_file.write('Salted__' + salt)
+ finished = False
+ while not finished:
+ chunk = in_file.read(1024 * bs)
+ if len(chunk) == 0 or len(chunk) % bs != 0:
+ padding_length = (bs - len(chunk) % bs) or bs
+ chunk += padding_length * chr(padding_length)
+ finished = True
+ out_file.write(cipher.encrypt(chunk))
+
+ out_file.seek(0)
+ enc_data = out_file.read()
+ tmp_data = hexlify(enc_data)
+
+ return tmp_data
+
+
+ def decrypt(self, data, password, key_length=32):
+
+ """ Read encrypted data from in_file and write decrypted to out_file """
+
+ # http://stackoverflow.com/a/14989032
+
+ data = ''.join(data.split('\n'))
+ data = unhexlify(data)
+
+ in_file = BytesIO(data)
+ in_file.seek(0)
+ out_file = BytesIO()
+
+ bs = AES.block_size
+ salt = in_file.read(bs)[len('Salted__'):]
+ key, iv = self.aes_derive_key_and_iv(password, salt, key_length, bs)
+ cipher = AES.new(key, AES.MODE_CBC, iv)
+ next_chunk = ''
+ finished = False
+
+ while not finished:
+ chunk, next_chunk = next_chunk, cipher.decrypt(in_file.read(1024 * bs))
+ if len(next_chunk) == 0:
+ padding_length = ord(chunk[-1])
+ chunk = chunk[:-padding_length]
+ finished = True
+ out_file.write(chunk)
+
+ # reset the stream pointer to the beginning
+ out_file.seek(0)
+ new_data = out_file.read()
+
+ # split out sha and verify decryption
+ split_data = new_data.split("\n")
+ this_sha = split_data[0]
+ this_data = '\n'.join(split_data[1:])
+ test_sha = sha256(this_data).hexdigest()
+
+ if this_sha != test_sha:
+ raise errors.AnsibleError("Decryption failed")
+
+ #return out_file.read()
+ return this_data
+
+
+class VaultAES256(object):
+
+ """
+ Vault implementation using AES-CTR with an HMAC-SHA256 authentication code.
+ Keys are derived using PBKDF2
+ """
+
+ # http://www.daemonology.net/blog/2009-06-11-cryptographic-right-answers.html
+
+ def __init__(self):
+
+ if not HAS_PBKDF2 or not HAS_COUNTER or not HAS_HASH:
+ raise errors.AnsibleError(CRYPTO_UPGRADE)
+
+ def gen_key_initctr(self, password, salt):
+ # 16 for AES 128, 32 for AES256
+ keylength = 32
+
+ # match the size used for counter.new to avoid extra work
+ ivlength = 16
+
+ hash_function = SHA256
+
+ # make two keys and one iv
+ pbkdf2_prf = lambda p, s: HMAC.new(p, s, hash_function).digest()
+
+
+ derivedkey = PBKDF2(password, salt, dkLen=(2 * keylength) + ivlength,
+ count=10000, prf=pbkdf2_prf)
+
+ key1 = derivedkey[:keylength]
+ key2 = derivedkey[keylength:(keylength * 2)]
+ iv = derivedkey[(keylength * 2):(keylength * 2) + ivlength]
+
+ return key1, key2, hexlify(iv)
+
+
+ def encrypt(self, data, password):
+
+ salt = os.urandom(32)
+ key1, key2, iv = self.gen_key_initctr(password, salt)
+
+ # PKCS#7 PAD DATA http://tools.ietf.org/html/rfc5652#section-6.3
+ bs = AES.block_size
+ padding_length = (bs - len(data) % bs) or bs
+ data += padding_length * chr(padding_length)
+
+ # COUNTER.new PARAMETERS
+ # 1) nbits (integer) - Length of the counter, in bits.
+ # 2) initial_value (integer) - initial value of the counter. "iv" from gen_key_initctr
+
+ ctr = Counter.new(128, initial_value=long(iv, 16))
+
+ # AES.new PARAMETERS
+ # 1) AES key, must be either 16, 24, or 32 bytes long -- "key" from gen_key_initctr
+ # 2) MODE_CTR, is the recommended mode
+ # 3) counter=
+
+ cipher = AES.new(key1, AES.MODE_CTR, counter=ctr)
+
+ # ENCRYPT PADDED DATA
+ cryptedData = cipher.encrypt(data)
+
+ # COMBINE SALT, DIGEST AND DATA
+ hmac = HMAC.new(key2, cryptedData, SHA256)
+ message = "%s\n%s\n%s" % ( hexlify(salt), hmac.hexdigest(), hexlify(cryptedData) )
+ message = hexlify(message)
+ return message
+
+ def decrypt(self, data, password):
+
+ # SPLIT SALT, DIGEST, AND DATA
+ data = ''.join(data.split("\n"))
+ data = unhexlify(data)
+ salt, cryptedHmac, cryptedData = data.split("\n", 2)
+ salt = unhexlify(salt)
+ cryptedData = unhexlify(cryptedData)
+
+ key1, key2, iv = self.gen_key_initctr(password, salt)
+
+ # EXIT EARLY IF DIGEST DOESN'T MATCH
+ hmacDecrypt = HMAC.new(key2, cryptedData, SHA256)
+ if not self.is_equal(cryptedHmac, hmacDecrypt.hexdigest()):
+ return None
+
+ # SET THE COUNTER AND THE CIPHER
+ ctr = Counter.new(128, initial_value=long(iv, 16))
+ cipher = AES.new(key1, AES.MODE_CTR, counter=ctr)
+
+ # DECRYPT PADDED DATA
+ decryptedData = cipher.decrypt(cryptedData)
+
+ # UNPAD DATA
+ padding_length = ord(decryptedData[-1])
+ decryptedData = decryptedData[:-padding_length]
+
+ return decryptedData
+
+ def is_equal(self, a, b):
+ # http://codahale.com/a-lesson-in-timing-attacks/
+ if len(a) != len(b):
+ return False
+
+ result = 0
+ for x, y in zip(a, b):
+ result |= ord(x) ^ ord(y)
+ return result == 0
+
+
diff --git a/v1/bin/ansible b/v1/bin/ansible
new file mode 100755
index 0000000000..7fec34ec81
--- /dev/null
+++ b/v1/bin/ansible
@@ -0,0 +1,207 @@
+#!/usr/bin/env python
+
+# (c) 2012, Michael DeHaan
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see .
+
+########################################################
+
+__requires__ = ['ansible']
+try:
+ import pkg_resources
+except Exception:
+ # Use pkg_resources to find the correct versions of libraries and set
+ # sys.path appropriately when there are multiversion installs. But we
+ # have code that better expresses the errors in the places where the code
+ # is actually used (the deps are optional for many code paths) so we don't
+ # want to fail here.
+ pass
+
+import os
+import sys
+
+from ansible.runner import Runner
+import ansible.constants as C
+from ansible import utils
+from ansible import errors
+from ansible import callbacks
+from ansible import inventory
+########################################################
+
+class Cli(object):
+ ''' code behind bin/ansible '''
+
+ # ----------------------------------------------
+
+ def __init__(self):
+ self.stats = callbacks.AggregateStats()
+ self.callbacks = callbacks.CliRunnerCallbacks()
+ if C.DEFAULT_LOAD_CALLBACK_PLUGINS:
+ callbacks.load_callback_plugins()
+
+ # ----------------------------------------------
+
+ def parse(self):
+ ''' create an options parser for bin/ansible '''
+
+ parser = utils.base_parser(
+ constants=C,
+ runas_opts=True,
+ subset_opts=True,
+ async_opts=True,
+ output_opts=True,
+ connect_opts=True,
+ check_opts=True,
+ diff_opts=False,
+ usage='%prog [options]'
+ )
+
+ parser.add_option('-a', '--args', dest='module_args',
+ help="module arguments", default=C.DEFAULT_MODULE_ARGS)
+ parser.add_option('-m', '--module-name', dest='module_name',
+ help="module name to execute (default=%s)" % C.DEFAULT_MODULE_NAME,
+ default=C.DEFAULT_MODULE_NAME)
+
+ options, args = parser.parse_args()
+ self.callbacks.options = options
+
+ if len(args) == 0 or len(args) > 1:
+ parser.print_help()
+ sys.exit(1)
+
+ # privlege escalation command line arguments need to be mutually exclusive
+ utils.check_mutually_exclusive_privilege(options, parser)
+
+ if (options.ask_vault_pass and options.vault_password_file):
+ parser.error("--ask-vault-pass and --vault-password-file are mutually exclusive")
+
+ return (options, args)
+
+ # ----------------------------------------------
+
+ def run(self, options, args):
+ ''' use Runner lib to do SSH things '''
+
+ pattern = args[0]
+
+ sshpass = becomepass = vault_pass = become_method = None
+
+ # Never ask for an SSH password when we run with local connection
+ if options.connection == "local":
+ options.ask_pass = False
+ else:
+ options.ask_pass = options.ask_pass or C.DEFAULT_ASK_PASS
+
+ options.ask_vault_pass = options.ask_vault_pass or C.DEFAULT_ASK_VAULT_PASS
+
+ # become
+ utils.normalize_become_options(options)
+ prompt_method = utils.choose_pass_prompt(options)
+ (sshpass, becomepass, vault_pass) = utils.ask_passwords(ask_pass=options.ask_pass, become_ask_pass=options.become_ask_pass, ask_vault_pass=options.ask_vault_pass, become_method=prompt_method)
+
+ # read vault_pass from a file
+ if not options.ask_vault_pass and options.vault_password_file:
+ vault_pass = utils.read_vault_file(options.vault_password_file)
+
+ extra_vars = utils.parse_extra_vars(options.extra_vars, vault_pass)
+
+ inventory_manager = inventory.Inventory(options.inventory, vault_password=vault_pass)
+ if options.subset:
+ inventory_manager.subset(options.subset)
+ hosts = inventory_manager.list_hosts(pattern)
+
+ if len(hosts) == 0:
+ callbacks.display("No hosts matched", stderr=True)
+ sys.exit(0)
+
+ if options.listhosts:
+ for host in hosts:
+ callbacks.display(' %s' % host)
+ sys.exit(0)
+
+ if options.module_name in ['command','shell'] and not options.module_args:
+ callbacks.display("No argument passed to %s module" % options.module_name, color='red', stderr=True)
+ sys.exit(1)
+
+ if options.tree:
+ utils.prepare_writeable_dir(options.tree)
+
+ runner = Runner(
+ module_name=options.module_name,
+ module_path=options.module_path,
+ module_args=options.module_args,
+ remote_user=options.remote_user,
+ remote_pass=sshpass,
+ inventory=inventory_manager,
+ timeout=options.timeout,
+ private_key_file=options.private_key_file,
+ forks=options.forks,
+ pattern=pattern,
+ callbacks=self.callbacks,
+ transport=options.connection,
+ subset=options.subset,
+ check=options.check,
+ diff=options.check,
+ vault_pass=vault_pass,
+ become=options.become,
+ become_method=options.become_method,
+ become_pass=becomepass,
+ become_user=options.become_user,
+ extra_vars=extra_vars,
+ )
+
+ if options.seconds:
+ callbacks.display("background launch...\n\n", color='cyan')
+ results, poller = runner.run_async(options.seconds)
+ results = self.poll_while_needed(poller, options)
+ else:
+ results = runner.run()
+
+ return (runner, results)
+
+ # ----------------------------------------------
+
+ def poll_while_needed(self, poller, options):
+ ''' summarize results from Runner '''
+
+ # BACKGROUND POLL LOGIC when -B and -P are specified
+ if options.seconds and options.poll_interval > 0:
+ poller.wait(options.seconds, options.poll_interval)
+
+ return poller.results
+
+
+########################################################
+
+if __name__ == '__main__':
+ callbacks.display("", log_only=True)
+ callbacks.display(" ".join(sys.argv), log_only=True)
+ callbacks.display("", log_only=True)
+
+ cli = Cli()
+ (options, args) = cli.parse()
+ try:
+ (runner, results) = cli.run(options, args)
+ for result in results['contacted'].values():
+ if 'failed' in result or result.get('rc', 0) != 0:
+ sys.exit(2)
+ if results['dark']:
+ sys.exit(3)
+ except errors.AnsibleError, e:
+ # Generic handler for ansible specific errors
+ callbacks.display("ERROR: %s" % str(e), stderr=True, color='red')
+ sys.exit(1)
+
diff --git a/v1/bin/ansible-doc b/v1/bin/ansible-doc
new file mode 100755
index 0000000000..dff7cecce7
--- /dev/null
+++ b/v1/bin/ansible-doc
@@ -0,0 +1,337 @@
+#!/usr/bin/env python
+
+# (c) 2012, Jan-Piet Mens
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see .
+#
+
+import os
+import sys
+import textwrap
+import re
+import optparse
+import datetime
+import subprocess
+import fcntl
+import termios
+import struct
+
+from ansible import utils
+from ansible.utils import module_docs
+import ansible.constants as C
+from ansible.utils import version
+import traceback
+
+MODULEDIR = C.DEFAULT_MODULE_PATH
+
+BLACKLIST_EXTS = ('.pyc', '.swp', '.bak', '~', '.rpm')
+IGNORE_FILES = [ "COPYING", "CONTRIBUTING", "LICENSE", "README", "VERSION"]
+
+_ITALIC = re.compile(r"I\(([^)]+)\)")
+_BOLD = re.compile(r"B\(([^)]+)\)")
+_MODULE = re.compile(r"M\(([^)]+)\)")
+_URL = re.compile(r"U\(([^)]+)\)")
+_CONST = re.compile(r"C\(([^)]+)\)")
+PAGER = 'less'
+LESS_OPTS = 'FRSX' # -F (quit-if-one-screen) -R (allow raw ansi control chars)
+ # -S (chop long lines) -X (disable termcap init and de-init)
+
+def pager_print(text):
+ ''' just print text '''
+ print text
+
+def pager_pipe(text, cmd):
+ ''' pipe text through a pager '''
+ if 'LESS' not in os.environ:
+ os.environ['LESS'] = LESS_OPTS
+ try:
+ cmd = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, stdout=sys.stdout)
+ cmd.communicate(input=text)
+ except IOError:
+ pass
+ except KeyboardInterrupt:
+ pass
+
+def pager(text):
+ ''' find reasonable way to display text '''
+ # this is a much simpler form of what is in pydoc.py
+ if not sys.stdout.isatty():
+ pager_print(text)
+ elif 'PAGER' in os.environ:
+ if sys.platform == 'win32':
+ pager_print(text)
+ else:
+ pager_pipe(text, os.environ['PAGER'])
+ elif subprocess.call('(less --version) 2> /dev/null', shell = True) == 0:
+ pager_pipe(text, 'less')
+ else:
+ pager_print(text)
+
+def tty_ify(text):
+
+ t = _ITALIC.sub("`" + r"\1" + "'", text) # I(word) => `word'
+ t = _BOLD.sub("*" + r"\1" + "*", t) # B(word) => *word*
+ t = _MODULE.sub("[" + r"\1" + "]", t) # M(word) => [word]
+ t = _URL.sub(r"\1", t) # U(word) => word
+ t = _CONST.sub("`" + r"\1" + "'", t) # C(word) => `word'
+
+ return t
+
+def get_man_text(doc):
+
+ opt_indent=" "
+ text = []
+ text.append("> %s\n" % doc['module'].upper())
+
+ desc = " ".join(doc['description'])
+
+ text.append("%s\n" % textwrap.fill(tty_ify(desc), initial_indent=" ", subsequent_indent=" "))
+
+ if 'option_keys' in doc and len(doc['option_keys']) > 0:
+ text.append("Options (= is mandatory):\n")
+
+ for o in sorted(doc['option_keys']):
+ opt = doc['options'][o]
+
+ if opt.get('required', False):
+ opt_leadin = "="
+ else:
+ opt_leadin = "-"
+
+ text.append("%s %s" % (opt_leadin, o))
+
+ desc = " ".join(opt['description'])
+
+ if 'choices' in opt:
+ choices = ", ".join(str(i) for i in opt['choices'])
+ desc = desc + " (Choices: " + choices + ")"
+ if 'default' in opt:
+ default = str(opt['default'])
+ desc = desc + " [Default: " + default + "]"
+ text.append("%s\n" % textwrap.fill(tty_ify(desc), initial_indent=opt_indent,
+ subsequent_indent=opt_indent))
+
+ if 'notes' in doc and len(doc['notes']) > 0:
+ notes = " ".join(doc['notes'])
+ text.append("Notes:%s\n" % textwrap.fill(tty_ify(notes), initial_indent=" ",
+ subsequent_indent=opt_indent))
+
+
+ if 'requirements' in doc and doc['requirements'] is not None and len(doc['requirements']) > 0:
+ req = ", ".join(doc['requirements'])
+ text.append("Requirements:%s\n" % textwrap.fill(tty_ify(req), initial_indent=" ",
+ subsequent_indent=opt_indent))
+
+ if 'examples' in doc and len(doc['examples']) > 0:
+ text.append("Example%s:\n" % ('' if len(doc['examples']) < 2 else 's'))
+ for ex in doc['examples']:
+ text.append("%s\n" % (ex['code']))
+
+ if 'plainexamples' in doc and doc['plainexamples'] is not None:
+ text.append("EXAMPLES:")
+ text.append(doc['plainexamples'])
+ if 'returndocs' in doc and doc['returndocs'] is not None:
+ text.append("RETURN VALUES:")
+ text.append(doc['returndocs'])
+ text.append('')
+
+ return "\n".join(text)
+
+
+def get_snippet_text(doc):
+
+ text = []
+ desc = tty_ify(" ".join(doc['short_description']))
+ text.append("- name: %s" % (desc))
+ text.append(" action: %s" % (doc['module']))
+
+ for o in sorted(doc['options'].keys()):
+ opt = doc['options'][o]
+ desc = tty_ify(" ".join(opt['description']))
+
+ if opt.get('required', False):
+ s = o + "="
+ else:
+ s = o
+
+ text.append(" %-20s # %s" % (s, desc))
+ text.append('')
+
+ return "\n".join(text)
+
+def get_module_list_text(module_list):
+ tty_size = 0
+ if os.isatty(0):
+ tty_size = struct.unpack('HHHH',
+ fcntl.ioctl(0, termios.TIOCGWINSZ, struct.pack('HHHH', 0, 0, 0, 0)))[1]
+ columns = max(60, tty_size)
+ displace = max(len(x) for x in module_list)
+ linelimit = columns - displace - 5
+ text = []
+ deprecated = []
+ for module in sorted(set(module_list)):
+
+ if module in module_docs.BLACKLIST_MODULES:
+ continue
+
+ filename = utils.plugins.module_finder.find_plugin(module)
+
+ if filename is None:
+ continue
+ if filename.endswith(".ps1"):
+ continue
+ if os.path.isdir(filename):
+ continue
+
+ try:
+ doc, plainexamples, returndocs = module_docs.get_docstring(filename)
+ desc = tty_ify(doc.get('short_description', '?')).strip()
+ if len(desc) > linelimit:
+ desc = desc[:linelimit] + '...'
+
+ if module.startswith('_'): # Handle deprecated
+ deprecated.append("%-*s %-*.*s" % (displace, module[1:], linelimit, len(desc), desc))
+ else:
+ text.append("%-*s %-*.*s" % (displace, module, linelimit, len(desc), desc))
+ except:
+ traceback.print_exc()
+ sys.stderr.write("ERROR: module %s has a documentation error formatting or is missing documentation\n" % module)
+
+ if len(deprecated) > 0:
+ text.append("\nDEPRECATED:")
+ text.extend(deprecated)
+ return "\n".join(text)
+
+def find_modules(path, module_list):
+
+ if os.path.isdir(path):
+ for module in os.listdir(path):
+ if module.startswith('.'):
+ continue
+ elif os.path.isdir(module):
+ find_modules(module, module_list)
+ elif any(module.endswith(x) for x in BLACKLIST_EXTS):
+ continue
+ elif module.startswith('__'):
+ continue
+ elif module in IGNORE_FILES:
+ continue
+ elif module.startswith('_'):
+ fullpath = '/'.join([path,module])
+ if os.path.islink(fullpath): # avoids aliases
+ continue
+
+ module = os.path.splitext(module)[0] # removes the extension
+ module_list.append(module)
+
+def main():
+
+ p = optparse.OptionParser(
+ version=version("%prog"),
+ usage='usage: %prog [options] [module...]',
+ description='Show Ansible module documentation',
+ )
+
+ p.add_option("-M", "--module-path",
+ action="store",
+ dest="module_path",
+ default=MODULEDIR,
+ help="Ansible modules/ directory")
+ p.add_option("-l", "--list",
+ action="store_true",
+ default=False,
+ dest='list_dir',
+ help='List available modules')
+ p.add_option("-s", "--snippet",
+ action="store_true",
+ default=False,
+ dest='show_snippet',
+ help='Show playbook snippet for specified module(s)')
+ p.add_option('-v', action='version', help='Show version number and exit')
+
+ (options, args) = p.parse_args()
+
+ if options.module_path is not None:
+ for i in options.module_path.split(os.pathsep):
+ utils.plugins.module_finder.add_directory(i)
+
+ if options.list_dir:
+ # list modules
+ paths = utils.plugins.module_finder._get_paths()
+ module_list = []
+ for path in paths:
+ find_modules(path, module_list)
+
+ pager(get_module_list_text(module_list))
+ sys.exit()
+
+ if len(args) == 0:
+ p.print_help()
+
+ def print_paths(finder):
+ ''' Returns a string suitable for printing of the search path '''
+
+ # Uses a list to get the order right
+ ret = []
+ for i in finder._get_paths():
+ if i not in ret:
+ ret.append(i)
+ return os.pathsep.join(ret)
+
+ text = ''
+ for module in args:
+
+ filename = utils.plugins.module_finder.find_plugin(module)
+ if filename is None:
+ sys.stderr.write("module %s not found in %s\n" % (module, print_paths(utils.plugins.module_finder)))
+ continue
+
+ if any(filename.endswith(x) for x in BLACKLIST_EXTS):
+ continue
+
+ try:
+ doc, plainexamples, returndocs = module_docs.get_docstring(filename)
+ except:
+ traceback.print_exc()
+ sys.stderr.write("ERROR: module %s has a documentation error formatting or is missing documentation\n" % module)
+ continue
+
+ if doc is not None:
+
+ all_keys = []
+ for (k,v) in doc['options'].iteritems():
+ all_keys.append(k)
+ all_keys = sorted(all_keys)
+ doc['option_keys'] = all_keys
+
+ doc['filename'] = filename
+ doc['docuri'] = doc['module'].replace('_', '-')
+ doc['now_date'] = datetime.date.today().strftime('%Y-%m-%d')
+ doc['plainexamples'] = plainexamples
+ doc['returndocs'] = returndocs
+
+ if options.show_snippet:
+ text += get_snippet_text(doc)
+ else:
+ text += get_man_text(doc)
+ else:
+ # this typically means we couldn't even parse the docstring, not just that the YAML is busted,
+ # probably a quoting issue.
+ sys.stderr.write("ERROR: module %s missing documentation (or could not parse documentation)\n" % module)
+ pager(text)
+
+if __name__ == '__main__':
+ main()
diff --git a/v1/bin/ansible-galaxy b/v1/bin/ansible-galaxy
new file mode 100755
index 0000000000..a6d625671e
--- /dev/null
+++ b/v1/bin/ansible-galaxy
@@ -0,0 +1,957 @@
+#!/usr/bin/env python
+
+########################################################################
+#
+# (C) 2013, James Cammarata
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see .
+#
+########################################################################
+
+import datetime
+import json
+import os
+import os.path
+import shutil
+import subprocess
+import sys
+import tarfile
+import tempfile
+import urllib
+import urllib2
+import yaml
+
+from collections import defaultdict
+from distutils.version import LooseVersion
+from jinja2 import Environment
+from optparse import OptionParser
+
+import ansible.constants as C
+import ansible.utils
+from ansible.errors import AnsibleError
+
+default_meta_template = """---
+galaxy_info:
+ author: {{ author }}
+ description: {{description}}
+ company: {{ company }}
+ # If the issue tracker for your role is not on github, uncomment the
+ # next line and provide a value
+ # issue_tracker_url: {{ issue_tracker_url }}
+ # Some suggested licenses:
+ # - BSD (default)
+ # - MIT
+ # - GPLv2
+ # - GPLv3
+ # - Apache
+ # - CC-BY
+ license: {{ license }}
+ min_ansible_version: {{ min_ansible_version }}
+ #
+ # Below are all platforms currently available. Just uncomment
+ # the ones that apply to your role. If you don't see your
+ # platform on this list, let us know and we'll get it added!
+ #
+ #platforms:
+ {%- for platform,versions in platforms.iteritems() %}
+ #- name: {{ platform }}
+ # versions:
+ # - all
+ {%- for version in versions %}
+ # - {{ version }}
+ {%- endfor %}
+ {%- endfor %}
+ #
+ # Below are all categories currently available. Just as with
+ # the platforms above, uncomment those that apply to your role.
+ #
+ #categories:
+ {%- for category in categories %}
+ #- {{ category.name }}
+ {%- endfor %}
+dependencies: []
+ # List your role dependencies here, one per line.
+ # Be sure to remove the '[]' above if you add dependencies
+ # to this list.
+ {% for dependency in dependencies %}
+ #- {{ dependency }}
+ {% endfor %}
+
+"""
+
+default_readme_template = """Role Name
+=========
+
+A brief description of the role goes here.
+
+Requirements
+------------
+
+Any pre-requisites that may not be covered by Ansible itself or the role should be mentioned here. For instance, if the role uses the EC2 module, it may be a good idea to mention in this section that the boto package is required.
+
+Role Variables
+--------------
+
+A description of the settable variables for this role should go here, including any variables that are in defaults/main.yml, vars/main.yml, and any variables that can/should be set via parameters to the role. Any variables that are read from other roles and/or the global scope (ie. hostvars, group vars, etc.) should be mentioned here as well.
+
+Dependencies
+------------
+
+A list of other roles hosted on Galaxy should go here, plus any details in regards to parameters that may need to be set for other roles, or variables that are used from other roles.
+
+Example Playbook
+----------------
+
+Including an example of how to use your role (for instance, with variables passed in as parameters) is always nice for users too:
+
+ - hosts: servers
+ roles:
+ - { role: username.rolename, x: 42 }
+
+License
+-------
+
+BSD
+
+Author Information
+------------------
+
+An optional section for the role authors to include contact information, or a website (HTML is not allowed).
+"""
+
+#-------------------------------------------------------------------------------------
+# Utility functions for parsing actions/options
+#-------------------------------------------------------------------------------------
+
+VALID_ACTIONS = ("init", "info", "install", "list", "remove")
+SKIP_INFO_KEYS = ("platforms","readme_html", "related", "summary_fields", "average_aw_composite", "average_aw_score", "url" )
+
+def get_action(args):
+ """
+ Get the action the user wants to execute from the
+ sys argv list.
+ """
+ for i in range(0,len(args)):
+ arg = args[i]
+ if arg in VALID_ACTIONS:
+ del args[i]
+ return arg
+ return None
+
+def build_option_parser(action):
+ """
+ Builds an option parser object based on the action
+ the user wants to execute.
+ """
+
+ usage = "usage: %%prog [%s] [--help] [options] ..." % "|".join(VALID_ACTIONS)
+ epilog = "\nSee '%s --help' for more information on a specific command.\n\n" % os.path.basename(sys.argv[0])
+ OptionParser.format_epilog = lambda self, formatter: self.epilog
+ parser = OptionParser(usage=usage, epilog=epilog)
+
+ if not action:
+ parser.print_help()
+ sys.exit()
+
+ # options for all actions
+ # - none yet
+
+ # options specific to actions
+ if action == "info":
+ parser.set_usage("usage: %prog info [options] role_name[,version]")
+ elif action == "init":
+ parser.set_usage("usage: %prog init [options] role_name")
+ parser.add_option(
+ '-p', '--init-path', dest='init_path', default="./",
+ help='The path in which the skeleton role will be created. '
+ 'The default is the current working directory.')
+ parser.add_option(
+ '--offline', dest='offline', default=False, action='store_true',
+ help="Don't query the galaxy API when creating roles")
+ elif action == "install":
+ parser.set_usage("usage: %prog install [options] [-r FILE | role_name(s)[,version] | scm+role_repo_url[,version] | tar_file(s)]")
+ parser.add_option(
+ '-i', '--ignore-errors', dest='ignore_errors', action='store_true', default=False,
+ help='Ignore errors and continue with the next specified role.')
+ parser.add_option(
+ '-n', '--no-deps', dest='no_deps', action='store_true', default=False,
+ help='Don\'t download roles listed as dependencies')
+ parser.add_option(
+ '-r', '--role-file', dest='role_file',
+ help='A file containing a list of roles to be imported')
+ elif action == "remove":
+ parser.set_usage("usage: %prog remove role1 role2 ...")
+ elif action == "list":
+ parser.set_usage("usage: %prog list [role_name]")
+
+ # options that apply to more than one action
+ if action != "init":
+ parser.add_option(
+ '-p', '--roles-path', dest='roles_path', default=C.DEFAULT_ROLES_PATH,
+ help='The path to the directory containing your roles. '
+ 'The default is the roles_path configured in your '
+ 'ansible.cfg file (/etc/ansible/roles if not configured)')
+
+ if action in ("info","init","install"):
+ parser.add_option(
+ '-s', '--server', dest='api_server', default="galaxy.ansible.com",
+ help='The API server destination')
+
+ if action in ("init","install"):
+ parser.add_option(
+ '-f', '--force', dest='force', action='store_true', default=False,
+ help='Force overwriting an existing role')
+ # done, return the parser
+ return parser
+
+def get_opt(options, k, defval=""):
+ """
+ Returns an option from an Optparse values instance.
+ """
+ try:
+ data = getattr(options, k)
+ except:
+ return defval
+ if k == "roles_path":
+ if os.pathsep in data:
+ data = data.split(os.pathsep)[0]
+ return data
+
+def exit_without_ignore(options, rc=1):
+ """
+ Exits with the specified return code unless the
+ option --ignore-errors was specified
+ """
+
+ if not get_opt(options, "ignore_errors", False):
+ print '- you can use --ignore-errors to skip failed roles.'
+ sys.exit(rc)
+
+
+#-------------------------------------------------------------------------------------
+# Galaxy API functions
+#-------------------------------------------------------------------------------------
+
+def api_get_config(api_server):
+ """
+ Fetches the Galaxy API current version to ensure
+ the API server is up and reachable.
+ """
+
+ try:
+ url = 'https://%s/api/' % api_server
+ data = json.load(urllib2.urlopen(url))
+ if not data.get("current_version",None):
+ return None
+ else:
+ return data
+ except:
+ return None
+
+def api_lookup_role_by_name(api_server, role_name, notify=True):
+ """
+ Uses the Galaxy API to do a lookup on the role owner/name.
+ """
+
+ role_name = urllib.quote(role_name)
+
+ try:
+ parts = role_name.split(".")
+ user_name = ".".join(parts[0:-1])
+ role_name = parts[-1]
+ if notify:
+ print "- downloading role '%s', owned by %s" % (role_name, user_name)
+ except:
+ parser.print_help()
+ print "- invalid role name (%s). Specify role as format: username.rolename" % role_name
+ sys.exit(1)
+
+ url = 'https://%s/api/v1/roles/?owner__username=%s&name=%s' % (api_server,user_name,role_name)
+ try:
+ data = json.load(urllib2.urlopen(url))
+ if len(data["results"]) == 0:
+ return None
+ else:
+ return data["results"][0]
+ except:
+ return None
+
+def api_fetch_role_related(api_server, related, role_id):
+ """
+ Uses the Galaxy API to fetch the list of related items for
+ the given role. The url comes from the 'related' field of
+ the role.
+ """
+
+ try:
+ url = 'https://%s/api/v1/roles/%d/%s/?page_size=50' % (api_server, int(role_id), related)
+ data = json.load(urllib2.urlopen(url))
+ results = data['results']
+ done = (data.get('next', None) == None)
+ while not done:
+ url = 'https://%s%s' % (api_server, data['next'])
+ print url
+ data = json.load(urllib2.urlopen(url))
+ results += data['results']
+ done = (data.get('next', None) == None)
+ return results
+ except:
+ return None
+
+def api_get_list(api_server, what):
+ """
+ Uses the Galaxy API to fetch the list of items specified.
+ """
+
+ try:
+ url = 'https://%s/api/v1/%s/?page_size' % (api_server, what)
+ data = json.load(urllib2.urlopen(url))
+ if "results" in data:
+ results = data['results']
+ else:
+ results = data
+ done = True
+ if "next" in data:
+ done = (data.get('next', None) == None)
+ while not done:
+ url = 'https://%s%s' % (api_server, data['next'])
+ print url
+ data = json.load(urllib2.urlopen(url))
+ results += data['results']
+ done = (data.get('next', None) == None)
+ return results
+ except:
+ print "- failed to download the %s list" % what
+ return None
+
+#-------------------------------------------------------------------------------------
+# scm repo utility functions
+#-------------------------------------------------------------------------------------
+
+def scm_archive_role(scm, role_url, role_version, role_name):
+ if scm not in ['hg', 'git']:
+ print "- scm %s is not currently supported" % scm
+ return False
+ tempdir = tempfile.mkdtemp()
+ clone_cmd = [scm, 'clone', role_url, role_name]
+ with open('/dev/null', 'w') as devnull:
+ try:
+ print "- executing: %s" % " ".join(clone_cmd)
+ popen = subprocess.Popen(clone_cmd, cwd=tempdir, stdout=devnull, stderr=devnull)
+ except:
+ raise AnsibleError("error executing: %s" % " ".join(clone_cmd))
+ rc = popen.wait()
+ if rc != 0:
+ print "- command %s failed" % ' '.join(clone_cmd)
+ print " in directory %s" % tempdir
+ return False
+
+ temp_file = tempfile.NamedTemporaryFile(delete=False, suffix='.tar')
+ if scm == 'hg':
+ archive_cmd = ['hg', 'archive', '--prefix', "%s/" % role_name]
+ if role_version:
+ archive_cmd.extend(['-r', role_version])
+ archive_cmd.append(temp_file.name)
+ if scm == 'git':
+ archive_cmd = ['git', 'archive', '--prefix=%s/' % role_name, '--output=%s' % temp_file.name]
+ if role_version:
+ archive_cmd.append(role_version)
+ else:
+ archive_cmd.append('HEAD')
+
+ with open('/dev/null', 'w') as devnull:
+ print "- executing: %s" % " ".join(archive_cmd)
+ popen = subprocess.Popen(archive_cmd, cwd=os.path.join(tempdir, role_name),
+ stderr=devnull, stdout=devnull)
+ rc = popen.wait()
+ if rc != 0:
+ print "- command %s failed" % ' '.join(archive_cmd)
+ print " in directory %s" % tempdir
+ return False
+
+ shutil.rmtree(tempdir, ignore_errors=True)
+
+ return temp_file.name
+
+
+#-------------------------------------------------------------------------------------
+# Role utility functions
+#-------------------------------------------------------------------------------------
+
+def get_role_path(role_name, options):
+ """
+ Returns the role path based on the roles_path option
+ and the role name.
+ """
+ roles_path = get_opt(options,'roles_path')
+ roles_path = os.path.join(roles_path, role_name)
+ roles_path = os.path.expanduser(roles_path)
+ return roles_path
+
+def get_role_metadata(role_name, options):
+ """
+ Returns the metadata as YAML, if the file 'meta/main.yml'
+ exists in the specified role_path
+ """
+ role_path = os.path.join(get_role_path(role_name, options), 'meta/main.yml')
+ try:
+ if os.path.isfile(role_path):
+ f = open(role_path, 'r')
+ meta_data = yaml.safe_load(f)
+ f.close()
+ return meta_data
+ else:
+ return None
+ except:
+ return None
+
+def get_galaxy_install_info(role_name, options):
+ """
+ Returns the YAML data contained in 'meta/.galaxy_install_info',
+ if it exists.
+ """
+
+ try:
+ info_path = os.path.join(get_role_path(role_name, options), 'meta/.galaxy_install_info')
+ if os.path.isfile(info_path):
+ f = open(info_path, 'r')
+ info_data = yaml.safe_load(f)
+ f.close()
+ return info_data
+ else:
+ return None
+ except:
+ return None
+
+def write_galaxy_install_info(role_name, role_version, options):
+ """
+ Writes a YAML-formatted file to the role's meta/ directory
+ (named .galaxy_install_info) which contains some information
+ we can use later for commands like 'list' and 'info'.
+ """
+
+ info = dict(
+ version = role_version,
+ install_date = datetime.datetime.utcnow().strftime("%c"),
+ )
+ try:
+ info_path = os.path.join(get_role_path(role_name, options), 'meta/.galaxy_install_info')
+ f = open(info_path, 'w+')
+ info_data = yaml.safe_dump(info, f)
+ f.close()
+ except:
+ return False
+ return True
+
+
+def remove_role(role_name, options):
+ """
+ Removes the specified role from the roles path. There is a
+ sanity check to make sure there's a meta/main.yml file at this
+ path so the user doesn't blow away random directories
+ """
+ if get_role_metadata(role_name, options):
+ role_path = get_role_path(role_name, options)
+ shutil.rmtree(role_path)
+ return True
+ else:
+ return False
+
+def fetch_role(role_name, target, role_data, options):
+ """
+ Downloads the archived role from github to a temp location, extracts
+ it, and then copies the extracted role to the role library path.
+ """
+
+ # first grab the file and save it to a temp location
+ if '://' in role_name:
+ archive_url = role_name
+ else:
+ archive_url = 'https://github.com/%s/%s/archive/%s.tar.gz' % (role_data["github_user"], role_data["github_repo"], target)
+ print "- downloading role from %s" % archive_url
+
+ try:
+ url_file = urllib2.urlopen(archive_url)
+ temp_file = tempfile.NamedTemporaryFile(delete=False)
+ data = url_file.read()
+ while data:
+ temp_file.write(data)
+ data = url_file.read()
+ temp_file.close()
+ return temp_file.name
+ except Exception, e:
+ # TODO: better urllib2 error handling for error
+ # messages that are more exact
+ print "- error: failed to download the file."
+ return False
+
+def install_role(role_name, role_version, role_filename, options):
+ # the file is a tar, so open it that way and extract it
+ # to the specified (or default) roles directory
+
+ if not tarfile.is_tarfile(role_filename):
+ print "- error: the file downloaded was not a tar.gz"
+ return False
+ else:
+ if role_filename.endswith('.gz'):
+ role_tar_file = tarfile.open(role_filename, "r:gz")
+ else:
+ role_tar_file = tarfile.open(role_filename, "r")
+ # verify the role's meta file
+ meta_file = None
+ members = role_tar_file.getmembers()
+ # next find the metadata file
+ for member in members:
+ if "/meta/main.yml" in member.name:
+ meta_file = member
+ break
+ if not meta_file:
+ print "- error: this role does not appear to have a meta/main.yml file."
+ return False
+ else:
+ try:
+ meta_file_data = yaml.safe_load(role_tar_file.extractfile(meta_file))
+ except:
+ print "- error: this role does not appear to have a valid meta/main.yml file."
+ return False
+
+ # we strip off the top-level directory for all of the files contained within
+ # the tar file here, since the default is 'github_repo-target', and change it
+ # to the specified role's name
+ role_path = os.path.join(get_opt(options, 'roles_path'), role_name)
+ role_path = os.path.expanduser(role_path)
+ print "- extracting %s to %s" % (role_name, role_path)
+ try:
+ if os.path.exists(role_path):
+ if not os.path.isdir(role_path):
+ print "- error: the specified roles path exists and is not a directory."
+ return False
+ elif not get_opt(options, "force", False):
+ print "- error: the specified role %s appears to already exist. Use --force to replace it." % role_name
+ return False
+ else:
+ # using --force, remove the old path
+ if not remove_role(role_name, options):
+ print "- error: %s doesn't appear to contain a role." % role_path
+ print " please remove this directory manually if you really want to put the role here."
+ return False
+ else:
+ os.makedirs(role_path)
+
+ # now we do the actual extraction to the role_path
+ for member in members:
+ # we only extract files, and remove any relative path
+ # bits that might be in the file for security purposes
+ # and drop the leading directory, as mentioned above
+ if member.isreg() or member.issym():
+ parts = member.name.split("/")[1:]
+ final_parts = []
+ for part in parts:
+ if part != '..' and '~' not in part and '$' not in part:
+ final_parts.append(part)
+ member.name = os.path.join(*final_parts)
+ role_tar_file.extract(member, role_path)
+
+ # write out the install info file for later use
+ write_galaxy_install_info(role_name, role_version, options)
+ except OSError, e:
+ print "- error: you do not have permission to modify files in %s" % role_path
+ return False
+
+ # return the parsed yaml metadata
+ print "- %s was installed successfully" % role_name
+ return meta_file_data
+
+#-------------------------------------------------------------------------------------
+# Action functions
+#-------------------------------------------------------------------------------------
+
+def execute_init(args, options, parser):
+ """
+ Executes the init action, which creates the skeleton framework
+ of a role that complies with the galaxy metadata format.
+ """
+
+ init_path = get_opt(options, 'init_path', './')
+ api_server = get_opt(options, "api_server", "galaxy.ansible.com")
+ force = get_opt(options, 'force', False)
+ offline = get_opt(options, 'offline', False)
+
+ if not offline:
+ api_config = api_get_config(api_server)
+ if not api_config:
+ print "- the API server (%s) is not responding, please try again later." % api_server
+ sys.exit(1)
+
+ try:
+ role_name = args.pop(0).strip()
+ if role_name == "":
+ raise Exception("")
+ role_path = os.path.join(init_path, role_name)
+ if os.path.exists(role_path):
+ if os.path.isfile(role_path):
+ print "- the path %s already exists, but is a file - aborting" % role_path
+ sys.exit(1)
+ elif not force:
+ print "- the directory %s already exists." % role_path
+ print " you can use --force to re-initialize this directory,\n" + \
+ " however it will reset any main.yml files that may have\n" + \
+ " been modified there already."
+ sys.exit(1)
+ except Exception, e:
+ parser.print_help()
+ print "- no role name specified for init"
+ sys.exit(1)
+
+ ROLE_DIRS = ('defaults','files','handlers','meta','tasks','templates','vars')
+
+ # create the default README.md
+ if not os.path.exists(role_path):
+ os.makedirs(role_path)
+ readme_path = os.path.join(role_path, "README.md")
+ f = open(readme_path, "wb")
+ f.write(default_readme_template)
+ f.close
+
+ for dir in ROLE_DIRS:
+ dir_path = os.path.join(init_path, role_name, dir)
+ main_yml_path = os.path.join(dir_path, 'main.yml')
+ # create the directory if it doesn't exist already
+ if not os.path.exists(dir_path):
+ os.makedirs(dir_path)
+
+ # now create the main.yml file for that directory
+ if dir == "meta":
+ # create a skeleton meta/main.yml with a valid galaxy_info
+ # datastructure in place, plus with all of the available
+ # tags/platforms included (but commented out) and the
+ # dependencies section
+ platforms = []
+ if not offline:
+ platforms = api_get_list(api_server, "platforms") or []
+ categories = []
+ if not offline:
+ categories = api_get_list(api_server, "categories") or []
+
+ # group the list of platforms from the api based
+ # on their names, with the release field being
+ # appended to a list of versions
+ platform_groups = defaultdict(list)
+ for platform in platforms:
+ platform_groups[platform['name']].append(platform['release'])
+ platform_groups[platform['name']].sort()
+
+ inject = dict(
+ author = 'your name',
+ company = 'your company (optional)',
+ license = 'license (GPLv2, CC-BY, etc)',
+ issue_tracker_url = 'http://example.com/issue/tracker',
+ min_ansible_version = '1.2',
+ platforms = platform_groups,
+ categories = categories,
+ )
+ rendered_meta = Environment().from_string(default_meta_template).render(inject)
+ f = open(main_yml_path, 'w')
+ f.write(rendered_meta)
+ f.close()
+ pass
+ elif dir not in ('files','templates'):
+ # just write a (mostly) empty YAML file for main.yml
+ f = open(main_yml_path, 'w')
+ f.write('---\n# %s file for %s\n' % (dir,role_name))
+ f.close()
+ print "- %s was created successfully" % role_name
+
+def execute_info(args, options, parser):
+ """
+ Executes the info action. This action prints out detailed
+ information about an installed role as well as info available
+ from the galaxy API.
+ """
+
+ if len(args) == 0:
+ # the user needs to specify a role
+ parser.print_help()
+ print "- you must specify a user/role name"
+ sys.exit(1)
+
+ api_server = get_opt(options, "api_server", "galaxy.ansible.com")
+ api_config = api_get_config(api_server)
+ roles_path = get_opt(options, "roles_path")
+
+ for role in args:
+
+ role_info = {}
+
+ install_info = get_galaxy_install_info(role, options)
+ if install_info:
+ if 'version' in install_info:
+ install_info['intalled_version'] = install_info['version']
+ del install_info['version']
+ role_info.update(install_info)
+
+ remote_data = api_lookup_role_by_name(api_server, role, False)
+ if remote_data:
+ role_info.update(remote_data)
+
+ metadata = get_role_metadata(role, options)
+ if metadata:
+ role_info.update(metadata)
+
+ role_spec = ansible.utils.role_spec_parse(role)
+ if role_spec:
+ role_info.update(role_spec)
+
+ if role_info:
+ print "- %s:" % (role)
+ for k in sorted(role_info.keys()):
+
+ if k in SKIP_INFO_KEYS:
+ continue
+
+ if isinstance(role_info[k], dict):
+ print "\t%s: " % (k)
+ for key in sorted(role_info[k].keys()):
+ if key in SKIP_INFO_KEYS:
+ continue
+ print "\t\t%s: %s" % (key, role_info[k][key])
+ else:
+ print "\t%s: %s" % (k, role_info[k])
+ else:
+ print "- the role %s was not found" % role
+
+def execute_install(args, options, parser):
+ """
+ Executes the installation action. The args list contains the
+ roles to be installed, unless -f was specified. The list of roles
+ can be a name (which will be downloaded via the galaxy API and github),
+ or it can be a local .tar.gz file.
+ """
+
+ role_file = get_opt(options, "role_file", None)
+
+ if len(args) == 0 and role_file is None:
+ # the user needs to specify one of either --role-file
+ # or specify a single user/role name
+ parser.print_help()
+ print "- you must specify a user/role name or a roles file"
+ sys.exit()
+ elif len(args) == 1 and not role_file is None:
+ # using a role file is mutually exclusive of specifying
+ # the role name on the command line
+ parser.print_help()
+ print "- please specify a user/role name, or a roles file, but not both"
+ sys.exit(1)
+
+ api_server = get_opt(options, "api_server", "galaxy.ansible.com")
+ no_deps = get_opt(options, "no_deps", False)
+ roles_path = get_opt(options, "roles_path")
+
+ roles_done = []
+ if role_file:
+ f = open(role_file, 'r')
+ if role_file.endswith('.yaml') or role_file.endswith('.yml'):
+ roles_left = map(ansible.utils.role_yaml_parse, yaml.safe_load(f))
+ else:
+ # roles listed in a file, one per line
+ roles_left = map(ansible.utils.role_spec_parse, f.readlines())
+ f.close()
+ else:
+ # roles were specified directly, so we'll just go out grab them
+ # (and their dependencies, unless the user doesn't want us to).
+ roles_left = map(ansible.utils.role_spec_parse, args)
+
+ while len(roles_left) > 0:
+ # query the galaxy API for the role data
+ role_data = None
+ role = roles_left.pop(0)
+ role_src = role.get("src")
+ role_scm = role.get("scm")
+ role_path = role.get("path")
+
+ if role_path:
+ options.roles_path = role_path
+ else:
+ options.roles_path = roles_path
+
+ if os.path.isfile(role_src):
+ # installing a local tar.gz
+ tmp_file = role_src
+ else:
+ if role_scm:
+ # create tar file from scm url
+ tmp_file = scm_archive_role(role_scm, role_src, role.get("version"), role.get("name"))
+ elif '://' in role_src:
+ # just download a URL - version will probably be in the URL
+ tmp_file = fetch_role(role_src, None, None, options)
+ else:
+ # installing from galaxy
+ api_config = api_get_config(api_server)
+ if not api_config:
+ print "- the API server (%s) is not responding, please try again later." % api_server
+ sys.exit(1)
+
+ role_data = api_lookup_role_by_name(api_server, role_src)
+ if not role_data:
+ print "- sorry, %s was not found on %s." % (role_src, api_server)
+ exit_without_ignore(options)
+ continue
+
+ role_versions = api_fetch_role_related(api_server, 'versions', role_data['id'])
+ if "version" not in role or role['version'] == '':
+ # convert the version names to LooseVersion objects
+ # and sort them to get the latest version. If there
+ # are no versions in the list, we'll grab the head
+ # of the master branch
+ if len(role_versions) > 0:
+ loose_versions = [LooseVersion(a.get('name',None)) for a in role_versions]
+ loose_versions.sort()
+ role["version"] = str(loose_versions[-1])
+ else:
+ role["version"] = 'master'
+ elif role['version'] != 'master':
+ if role_versions and role["version"] not in [a.get('name', None) for a in role_versions]:
+ print 'role is %s' % role
+ print "- the specified version (%s) was not found in the list of available versions (%s)." % (role['version'], role_versions)
+ exit_without_ignore(options)
+ continue
+
+ # download the role. if --no-deps was specified, we stop here,
+ # otherwise we recursively grab roles and all of their deps.
+ tmp_file = fetch_role(role_src, role["version"], role_data, options)
+ installed = False
+ if tmp_file:
+ installed = install_role(role.get("name"), role.get("version"), tmp_file, options)
+ # we're done with the temp file, clean it up
+ if tmp_file != role_src:
+ os.unlink(tmp_file)
+ # install dependencies, if we want them
+ if not no_deps and installed:
+ if not role_data:
+ role_data = get_role_metadata(role.get("name"), options)
+ role_dependencies = role_data['dependencies']
+ else:
+ role_dependencies = role_data['summary_fields']['dependencies'] # api_fetch_role_related(api_server, 'dependencies', role_data['id'])
+ for dep in role_dependencies:
+ if isinstance(dep, basestring):
+ dep = ansible.utils.role_spec_parse(dep)
+ else:
+ dep = ansible.utils.role_yaml_parse(dep)
+ if not get_role_metadata(dep["name"], options):
+ if dep not in roles_left:
+ print '- adding dependency: %s' % dep["name"]
+ roles_left.append(dep)
+ else:
+ print '- dependency %s already pending installation.' % dep["name"]
+ else:
+ print '- dependency %s is already installed, skipping.' % dep["name"]
+ if not tmp_file or not installed:
+ print "- %s was NOT installed successfully." % role.get("name")
+ exit_without_ignore(options)
+ sys.exit(0)
+
+def execute_remove(args, options, parser):
+ """
+ Executes the remove action. The args list contains the list
+ of roles to be removed. This list can contain more than one role.
+ """
+
+ if len(args) == 0:
+ parser.print_help()
+ print '- you must specify at least one role to remove.'
+ sys.exit()
+
+ for role in args:
+ if get_role_metadata(role, options):
+ if remove_role(role, options):
+ print '- successfully removed %s' % role
+ else:
+ print "- failed to remove role: %s" % role
+ else:
+ print '- %s is not installed, skipping.' % role
+ sys.exit(0)
+
+def execute_list(args, options, parser):
+ """
+ Executes the list action. The args list can contain zero
+ or one role. If one is specified, only that role will be
+ shown, otherwise all roles in the specified directory will
+ be shown.
+ """
+
+ if len(args) > 1:
+ print "- please specify only one role to list, or specify no roles to see a full list"
+ sys.exit(1)
+
+ if len(args) == 1:
+ # show only the request role, if it exists
+ role_name = args[0]
+ metadata = get_role_metadata(role_name, options)
+ if metadata:
+ install_info = get_galaxy_install_info(role_name, options)
+ version = None
+ if install_info:
+ version = install_info.get("version", None)
+ if not version:
+ version = "(unknown version)"
+ # show some more info about single roles here
+ print "- %s, %s" % (role_name, version)
+ else:
+ print "- the role %s was not found" % role_name
+ else:
+ # show all valid roles in the roles_path directory
+ roles_path = get_opt(options, 'roles_path')
+ roles_path = os.path.expanduser(roles_path)
+ if not os.path.exists(roles_path):
+ parser.print_help()
+ print "- the path %s does not exist. Please specify a valid path with --roles-path" % roles_path
+ sys.exit(1)
+ elif not os.path.isdir(roles_path):
+ print "- %s exists, but it is not a directory. Please specify a valid path with --roles-path" % roles_path
+ parser.print_help()
+ sys.exit(1)
+ path_files = os.listdir(roles_path)
+ for path_file in path_files:
+ if get_role_metadata(path_file, options):
+ install_info = get_galaxy_install_info(path_file, options)
+ version = None
+ if install_info:
+ version = install_info.get("version", None)
+ if not version:
+ version = "(unknown version)"
+ print "- %s, %s" % (path_file, version)
+ sys.exit(0)
+
+#-------------------------------------------------------------------------------------
+# The main entry point
+#-------------------------------------------------------------------------------------
+
+def main():
+ # parse the CLI options
+ action = get_action(sys.argv)
+ parser = build_option_parser(action)
+ (options, args) = parser.parse_args()
+
+ # execute the desired action
+ if 1: #try:
+ fn = globals()["execute_%s" % action]
+ fn(args, options, parser)
+ #except KeyError, e:
+ # print "- error: %s is not a valid action. Valid actions are: %s" % (action, ", ".join(VALID_ACTIONS))
+ # sys.exit(1)
+
+if __name__ == "__main__":
+ main()
diff --git a/v1/bin/ansible-playbook b/v1/bin/ansible-playbook
new file mode 100755
index 0000000000..3d6e1f9f40
--- /dev/null
+++ b/v1/bin/ansible-playbook
@@ -0,0 +1,330 @@
+#!/usr/bin/env python
+# (C) 2012, Michael DeHaan,
+
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see .
+
+#######################################################
+
+__requires__ = ['ansible']
+try:
+ import pkg_resources
+except Exception:
+ # Use pkg_resources to find the correct versions of libraries and set
+ # sys.path appropriately when there are multiversion installs. But we
+ # have code that better expresses the errors in the places where the code
+ # is actually used (the deps are optional for many code paths) so we don't
+ # want to fail here.
+ pass
+
+import sys
+import os
+import stat
+
+# Augment PYTHONPATH to find Python modules relative to this file path
+# This is so that we can find the modules when running from a local checkout
+# installed as editable with `pip install -e ...` or `python setup.py develop`
+local_module_path = os.path.abspath(
+ os.path.join(os.path.dirname(__file__), '..', 'lib')
+)
+sys.path.append(local_module_path)
+
+import ansible.playbook
+import ansible.constants as C
+import ansible.utils.template
+from ansible import errors
+from ansible import callbacks
+from ansible import utils
+from ansible.color import ANSIBLE_COLOR, stringc
+from ansible.callbacks import display
+
+def colorize(lead, num, color):
+ """ Print 'lead' = 'num' in 'color' """
+ if num != 0 and ANSIBLE_COLOR and color is not None:
+ return "%s%s%-15s" % (stringc(lead, color), stringc("=", color), stringc(str(num), color))
+ else:
+ return "%s=%-4s" % (lead, str(num))
+
+def hostcolor(host, stats, color=True):
+ if ANSIBLE_COLOR and color:
+ if stats['failures'] != 0 or stats['unreachable'] != 0:
+ return "%-37s" % stringc(host, 'red')
+ elif stats['changed'] != 0:
+ return "%-37s" % stringc(host, 'yellow')
+ else:
+ return "%-37s" % stringc(host, 'green')
+ return "%-26s" % host
+
+
+def main(args):
+ ''' run ansible-playbook operations '''
+
+ # create parser for CLI options
+ parser = utils.base_parser(
+ constants=C,
+ usage = "%prog playbook.yml",
+ connect_opts=True,
+ runas_opts=True,
+ subset_opts=True,
+ check_opts=True,
+ diff_opts=True
+ )
+ #parser.add_option('--vault-password', dest="vault_password",
+ # help="password for vault encrypted files")
+ parser.add_option('-t', '--tags', dest='tags', default='all',
+ help="only run plays and tasks tagged with these values")
+ parser.add_option('--skip-tags', dest='skip_tags',
+ help="only run plays and tasks whose tags do not match these values")
+ parser.add_option('--syntax-check', dest='syntax', action='store_true',
+ help="perform a syntax check on the playbook, but do not execute it")
+ parser.add_option('--list-tasks', dest='listtasks', action='store_true',
+ help="list all tasks that would be executed")
+ parser.add_option('--list-tags', dest='listtags', action='store_true',
+ help="list all available tags")
+ parser.add_option('--step', dest='step', action='store_true',
+ help="one-step-at-a-time: confirm each task before running")
+ parser.add_option('--start-at-task', dest='start_at',
+ help="start the playbook at the task matching this name")
+ parser.add_option('--force-handlers', dest='force_handlers',
+ default=C.DEFAULT_FORCE_HANDLERS, action='store_true',
+ help="run handlers even if a task fails")
+ parser.add_option('--flush-cache', dest='flush_cache', action='store_true',
+ help="clear the fact cache")
+
+ options, args = parser.parse_args(args)
+
+ if len(args) == 0:
+ parser.print_help(file=sys.stderr)
+ return 1
+
+ # privlege escalation command line arguments need to be mutually exclusive
+ utils.check_mutually_exclusive_privilege(options, parser)
+
+ if (options.ask_vault_pass and options.vault_password_file):
+ parser.error("--ask-vault-pass and --vault-password-file are mutually exclusive")
+
+ sshpass = None
+ becomepass = None
+ vault_pass = None
+
+ options.ask_vault_pass = options.ask_vault_pass or C.DEFAULT_ASK_VAULT_PASS
+
+ if options.listhosts or options.syntax or options.listtasks or options.listtags:
+ (_, _, vault_pass) = utils.ask_passwords(ask_vault_pass=options.ask_vault_pass)
+ else:
+ options.ask_pass = options.ask_pass or C.DEFAULT_ASK_PASS
+ # Never ask for an SSH password when we run with local connection
+ if options.connection == "local":
+ options.ask_pass = False
+
+ # set pe options
+ utils.normalize_become_options(options)
+ prompt_method = utils.choose_pass_prompt(options)
+ (sshpass, becomepass, vault_pass) = utils.ask_passwords(ask_pass=options.ask_pass,
+ become_ask_pass=options.become_ask_pass,
+ ask_vault_pass=options.ask_vault_pass,
+ become_method=prompt_method)
+
+ # read vault_pass from a file
+ if not options.ask_vault_pass and options.vault_password_file:
+ vault_pass = utils.read_vault_file(options.vault_password_file)
+
+ extra_vars = utils.parse_extra_vars(options.extra_vars, vault_pass)
+
+ only_tags = options.tags.split(",")
+ skip_tags = options.skip_tags
+ if options.skip_tags is not None:
+ skip_tags = options.skip_tags.split(",")
+
+ for playbook in args:
+ if not os.path.exists(playbook):
+ raise errors.AnsibleError("the playbook: %s could not be found" % playbook)
+ if not (os.path.isfile(playbook) or stat.S_ISFIFO(os.stat(playbook).st_mode)):
+ raise errors.AnsibleError("the playbook: %s does not appear to be a file" % playbook)
+
+ inventory = ansible.inventory.Inventory(options.inventory, vault_password=vault_pass)
+
+ # Note: slightly wrong, this is written so that implicit localhost
+ # (which is not returned in list_hosts()) is taken into account for
+ # warning if inventory is empty. But it can't be taken into account for
+ # checking if limit doesn't match any hosts. Instead we don't worry about
+ # limit if only implicit localhost was in inventory to start with.
+ #
+ # Fix this in v2
+ no_hosts = False
+ if len(inventory.list_hosts()) == 0:
+ # Empty inventory
+ utils.warning("provided hosts list is empty, only localhost is available")
+ no_hosts = True
+ inventory.subset(options.subset)
+ if len(inventory.list_hosts()) == 0 and no_hosts is False:
+ # Invalid limit
+ raise errors.AnsibleError("Specified --limit does not match any hosts")
+
+ # run all playbooks specified on the command line
+ for playbook in args:
+
+ stats = callbacks.AggregateStats()
+ playbook_cb = callbacks.PlaybookCallbacks(verbose=utils.VERBOSITY)
+ if options.step:
+ playbook_cb.step = options.step
+ if options.start_at:
+ playbook_cb.start_at = options.start_at
+ runner_cb = callbacks.PlaybookRunnerCallbacks(stats, verbose=utils.VERBOSITY)
+
+ pb = ansible.playbook.PlayBook(
+ playbook=playbook,
+ module_path=options.module_path,
+ inventory=inventory,
+ forks=options.forks,
+ remote_user=options.remote_user,
+ remote_pass=sshpass,
+ callbacks=playbook_cb,
+ runner_callbacks=runner_cb,
+ stats=stats,
+ timeout=options.timeout,
+ transport=options.connection,
+ become=options.become,
+ become_method=options.become_method,
+ become_user=options.become_user,
+ become_pass=becomepass,
+ extra_vars=extra_vars,
+ private_key_file=options.private_key_file,
+ only_tags=only_tags,
+ skip_tags=skip_tags,
+ check=options.check,
+ diff=options.diff,
+ vault_password=vault_pass,
+ force_handlers=options.force_handlers,
+ )
+
+ if options.flush_cache:
+ display(callbacks.banner("FLUSHING FACT CACHE"))
+ pb.SETUP_CACHE.flush()
+
+ if options.listhosts or options.listtasks or options.syntax or options.listtags:
+ print ''
+ print 'playbook: %s' % playbook
+ print ''
+ playnum = 0
+ for (play_ds, play_basedir) in zip(pb.playbook, pb.play_basedirs):
+ playnum += 1
+ play = ansible.playbook.Play(pb, play_ds, play_basedir,
+ vault_password=pb.vault_password)
+ label = play.name
+ hosts = pb.inventory.list_hosts(play.hosts)
+
+ if options.listhosts:
+ print ' play #%d (%s): host count=%d' % (playnum, label, len(hosts))
+ for host in hosts:
+ print ' %s' % host
+
+ if options.listtags or options.listtasks:
+ print ' play #%d (%s):\tTAGS: [%s]' % (playnum, label,','.join(sorted(set(play.tags))))
+
+ if options.listtags:
+ tags = []
+ for task in pb.tasks_to_run_in_play(play):
+ tags.extend(task.tags)
+ print ' TASK TAGS: [%s]' % (', '.join(sorted(set(tags).difference(['untagged']))))
+
+ if options.listtasks:
+
+ for task in pb.tasks_to_run_in_play(play):
+ if getattr(task, 'name', None) is not None:
+ # meta tasks have no names
+ print ' %s\tTAGS: [%s]' % (task.name, ', '.join(sorted(set(task.tags).difference(['untagged']))))
+
+ if options.listhosts or options.listtasks or options.listtags:
+ print ''
+ continue
+
+ if options.syntax:
+ # if we've not exited by now then we are fine.
+ print 'Playbook Syntax is fine'
+ return 0
+
+ failed_hosts = []
+ unreachable_hosts = []
+
+ try:
+
+ pb.run()
+
+ hosts = sorted(pb.stats.processed.keys())
+ display(callbacks.banner("PLAY RECAP"))
+ playbook_cb.on_stats(pb.stats)
+
+ for h in hosts:
+ t = pb.stats.summarize(h)
+ if t['failures'] > 0:
+ failed_hosts.append(h)
+ if t['unreachable'] > 0:
+ unreachable_hosts.append(h)
+
+ retries = failed_hosts + unreachable_hosts
+
+ if C.RETRY_FILES_ENABLED and len(retries) > 0:
+ filename = pb.generate_retry_inventory(retries)
+ if filename:
+ display(" to retry, use: --limit @%s\n" % filename)
+
+ for h in hosts:
+ t = pb.stats.summarize(h)
+
+ display("%s : %s %s %s %s" % (
+ hostcolor(h, t),
+ colorize('ok', t['ok'], 'green'),
+ colorize('changed', t['changed'], 'yellow'),
+ colorize('unreachable', t['unreachable'], 'red'),
+ colorize('failed', t['failures'], 'red')),
+ screen_only=True
+ )
+
+ display("%s : %s %s %s %s" % (
+ hostcolor(h, t, False),
+ colorize('ok', t['ok'], None),
+ colorize('changed', t['changed'], None),
+ colorize('unreachable', t['unreachable'], None),
+ colorize('failed', t['failures'], None)),
+ log_only=True
+ )
+
+
+ print ""
+ if len(failed_hosts) > 0:
+ return 2
+ if len(unreachable_hosts) > 0:
+ return 3
+
+ except errors.AnsibleError, e:
+ display("ERROR: %s" % e, color='red')
+ return 1
+
+ return 0
+
+
+if __name__ == "__main__":
+ display(" ", log_only=True)
+ display(" ".join(sys.argv), log_only=True)
+ display(" ", log_only=True)
+ try:
+ sys.exit(main(sys.argv[1:]))
+ except errors.AnsibleError, e:
+ display("ERROR: %s" % e, color='red', stderr=True)
+ sys.exit(1)
+ except KeyboardInterrupt, ke:
+ display("ERROR: interrupted", color='red', stderr=True)
+ sys.exit(1)
diff --git a/v1/bin/ansible-pull b/v1/bin/ansible-pull
new file mode 100755
index 0000000000..d4887631e0
--- /dev/null
+++ b/v1/bin/ansible-pull
@@ -0,0 +1,257 @@
+#!/usr/bin/env python
+
+# (c) 2012, Stephen Fromm
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see .
+#
+# ansible-pull is a script that runs ansible in local mode
+# after checking out a playbooks directory from source repo. There is an
+# example playbook to bootstrap this script in the examples/ dir which
+# installs ansible and sets it up to run on cron.
+
+# usage:
+# ansible-pull -d /var/lib/ansible \
+# -U http://example.net/content.git [-C production] \
+# [path/playbook.yml]
+#
+# the -d and -U arguments are required; the -C argument is optional.
+#
+# ansible-pull accepts an optional argument to specify a playbook
+# location underneath the workdir and then searches the source repo
+# for playbooks in the following order, stopping at the first match:
+#
+# 1. $workdir/path/playbook.yml, if specified
+# 2. $workdir/$fqdn.yml
+# 3. $workdir/$hostname.yml
+# 4. $workdir/local.yml
+#
+# the source repo must contain at least one of these playbooks.
+
+import os
+import shutil
+import sys
+import datetime
+import socket
+import random
+import time
+from ansible import utils
+from ansible.utils import cmd_functions
+from ansible import errors
+from ansible import inventory
+
+DEFAULT_REPO_TYPE = 'git'
+DEFAULT_PLAYBOOK = 'local.yml'
+PLAYBOOK_ERRORS = {1: 'File does not exist',
+ 2: 'File is not readable'}
+
+VERBOSITY=0
+
+def increment_debug(option, opt, value, parser):
+ global VERBOSITY
+ VERBOSITY += 1
+
+def try_playbook(path):
+ if not os.path.exists(path):
+ return 1
+ if not os.access(path, os.R_OK):
+ return 2
+ return 0
+
+
+def select_playbook(path, args):
+ playbook = None
+ if len(args) > 0 and args[0] is not None:
+ playbook = "%s/%s" % (path, args[0])
+ rc = try_playbook(playbook)
+ if rc != 0:
+ print >>sys.stderr, "%s: %s" % (playbook, PLAYBOOK_ERRORS[rc])
+ return None
+ return playbook
+ else:
+ fqdn = socket.getfqdn()
+ hostpb = "%s/%s.yml" % (path, fqdn)
+ shorthostpb = "%s/%s.yml" % (path, fqdn.split('.')[0])
+ localpb = "%s/%s" % (path, DEFAULT_PLAYBOOK)
+ errors = []
+ for pb in [hostpb, shorthostpb, localpb]:
+ rc = try_playbook(pb)
+ if rc == 0:
+ playbook = pb
+ break
+ else:
+ errors.append("%s: %s" % (pb, PLAYBOOK_ERRORS[rc]))
+ if playbook is None:
+ print >>sys.stderr, "\n".join(errors)
+ return playbook
+
+
+def main(args):
+ """ Set up and run a local playbook """
+ usage = "%prog [options] [playbook.yml]"
+ parser = utils.SortedOptParser(usage=usage)
+ parser.add_option('--purge', default=False, action='store_true',
+ help='purge checkout after playbook run')
+ parser.add_option('-o', '--only-if-changed', dest='ifchanged', default=False, action='store_true',
+ help='only run the playbook if the repository has been updated')
+ parser.add_option('-s', '--sleep', dest='sleep', default=None,
+ help='sleep for random interval (between 0 and n number of seconds) before starting. this is a useful way to disperse git requests')
+ parser.add_option('-f', '--force', dest='force', default=False,
+ action='store_true',
+ help='run the playbook even if the repository could '
+ 'not be updated')
+ parser.add_option('-d', '--directory', dest='dest', default=None,
+ help='directory to checkout repository to')
+ #parser.add_option('-l', '--live', default=True, action='store_live',
+ # help='Print the ansible-playbook output while running')
+ parser.add_option('-U', '--url', dest='url', default=None,
+ help='URL of the playbook repository')
+ parser.add_option('-C', '--checkout', dest='checkout',
+ help='branch/tag/commit to checkout. '
+ 'Defaults to behavior of repository module.')
+ parser.add_option('-i', '--inventory-file', dest='inventory',
+ help="location of the inventory host file")
+ parser.add_option('-e', '--extra-vars', dest="extra_vars", action="append",
+ help="set additional variables as key=value or YAML/JSON", default=[])
+ parser.add_option('-v', '--verbose', default=False, action="callback",
+ callback=increment_debug,
+ help='Pass -vvvv to ansible-playbook')
+ parser.add_option('-m', '--module-name', dest='module_name',
+ default=DEFAULT_REPO_TYPE,
+ help='Module name used to check out repository. '
+ 'Default is %s.' % DEFAULT_REPO_TYPE)
+ parser.add_option('--vault-password-file', dest='vault_password_file',
+ help="vault password file")
+ parser.add_option('-K', '--ask-sudo-pass', default=False, dest='ask_sudo_pass', action='store_true',
+ help='ask for sudo password')
+ parser.add_option('-t', '--tags', dest='tags', default=False,
+ help='only run plays and tasks tagged with these values')
+ parser.add_option('--accept-host-key', default=False, dest='accept_host_key', action='store_true',
+ help='adds the hostkey for the repo url if not already added')
+ parser.add_option('--key-file', dest='key_file',
+ help="Pass '-i ' to the SSH arguments used by git.")
+ options, args = parser.parse_args(args)
+
+ hostname = socket.getfqdn()
+ if not options.dest:
+ # use a hostname dependent directory, in case of $HOME on nfs
+ options.dest = utils.prepare_writeable_dir('~/.ansible/pull/%s' % hostname)
+
+ options.dest = os.path.abspath(options.dest)
+
+ if not options.url:
+ parser.error("URL for repository not specified, use -h for help")
+ return 1
+
+ now = datetime.datetime.now()
+ print now.strftime("Starting ansible-pull at %F %T")
+
+ # Attempt to use the inventory passed in as an argument
+ # It might not yet have been downloaded so use localhost if note
+ if not options.inventory or not os.path.exists(options.inventory):
+ inv_opts = 'localhost,'
+ else:
+ inv_opts = options.inventory
+ limit_opts = 'localhost:%s:127.0.0.1' % hostname
+ repo_opts = "name=%s dest=%s" % (options.url, options.dest)
+
+ if VERBOSITY == 0:
+ base_opts = '-c local --limit "%s"' % limit_opts
+ elif VERBOSITY > 0:
+ debug_level = ''.join([ "v" for x in range(0, VERBOSITY) ])
+ base_opts = '-%s -c local --limit "%s"' % (debug_level, limit_opts)
+
+ if options.checkout:
+ repo_opts += ' version=%s' % options.checkout
+
+ # Only git module is supported
+ if options.module_name == DEFAULT_REPO_TYPE:
+ if options.accept_host_key:
+ repo_opts += ' accept_hostkey=yes'
+
+ if options.key_file:
+ repo_opts += ' key_file=%s' % options.key_file
+
+ path = utils.plugins.module_finder.find_plugin(options.module_name)
+ if path is None:
+ sys.stderr.write("module '%s' not found.\n" % options.module_name)
+ return 1
+
+ bin_path = os.path.dirname(os.path.abspath(__file__))
+ cmd = '%s/ansible localhost -i "%s" %s -m %s -a "%s"' % (
+ bin_path, inv_opts, base_opts, options.module_name, repo_opts
+ )
+
+ for ev in options.extra_vars:
+ cmd += ' -e "%s"' % ev
+
+ if options.sleep:
+ try:
+ secs = random.randint(0,int(options.sleep));
+ except ValueError:
+ parser.error("%s is not a number." % options.sleep)
+ return 1
+
+ print >>sys.stderr, "Sleeping for %d seconds..." % secs
+ time.sleep(secs);
+
+
+ # RUN THe CHECKOUT COMMAND
+ rc, out, err = cmd_functions.run_cmd(cmd, live=True)
+
+ if rc != 0:
+ if options.force:
+ print >>sys.stderr, "Unable to update repository. Continuing with (forced) run of playbook."
+ else:
+ return rc
+ elif options.ifchanged and '"changed": true' not in out:
+ print "Repository has not changed, quitting."
+ return 0
+
+ playbook = select_playbook(options.dest, args)
+
+ if playbook is None:
+ print >>sys.stderr, "Could not find a playbook to run."
+ return 1
+
+ cmd = '%s/ansible-playbook %s %s' % (bin_path, base_opts, playbook)
+ if options.vault_password_file:
+ cmd += " --vault-password-file=%s" % options.vault_password_file
+ if options.inventory:
+ cmd += ' -i "%s"' % options.inventory
+ for ev in options.extra_vars:
+ cmd += ' -e "%s"' % ev
+ if options.ask_sudo_pass:
+ cmd += ' -K'
+ if options.tags:
+ cmd += ' -t "%s"' % options.tags
+ os.chdir(options.dest)
+
+ # RUN THE PLAYBOOK COMMAND
+ rc, out, err = cmd_functions.run_cmd(cmd, live=True)
+
+ if options.purge:
+ os.chdir('/')
+ try:
+ shutil.rmtree(options.dest)
+ except Exception, e:
+ print >>sys.stderr, "Failed to remove %s: %s" % (options.dest, str(e))
+
+ return rc
+
+if __name__ == '__main__':
+ try:
+ sys.exit(main(sys.argv[1:]))
+ except KeyboardInterrupt, e:
+ print >>sys.stderr, "Exit on user request.\n"
+ sys.exit(1)
diff --git a/v1/bin/ansible-vault b/v1/bin/ansible-vault
new file mode 100755
index 0000000000..22cfc0e148
--- /dev/null
+++ b/v1/bin/ansible-vault
@@ -0,0 +1,241 @@
+#!/usr/bin/env python
+
+# (c) 2014, James Tanner
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see .
+#
+# ansible-vault is a script that encrypts/decrypts YAML files. See
+# http://docs.ansible.com/playbooks_vault.html for more details.
+
+__requires__ = ['ansible']
+try:
+ import pkg_resources
+except Exception:
+ # Use pkg_resources to find the correct versions of libraries and set
+ # sys.path appropriately when there are multiversion installs. But we
+ # have code that better expresses the errors in the places where the code
+ # is actually used (the deps are optional for many code paths) so we don't
+ # want to fail here.
+ pass
+
+import os
+import sys
+import traceback
+
+import ansible.constants as C
+
+from ansible import utils
+from ansible import errors
+from ansible.utils.vault import VaultEditor
+
+from optparse import OptionParser
+
+#-------------------------------------------------------------------------------------
+# Utility functions for parsing actions/options
+#-------------------------------------------------------------------------------------
+
+VALID_ACTIONS = ("create", "decrypt", "edit", "encrypt", "rekey", "view")
+
+def build_option_parser(action):
+ """
+ Builds an option parser object based on the action
+ the user wants to execute.
+ """
+
+ usage = "usage: %%prog [%s] [--help] [options] file_name" % "|".join(VALID_ACTIONS)
+ epilog = "\nSee '%s --help' for more information on a specific command.\n\n" % os.path.basename(sys.argv[0])
+ OptionParser.format_epilog = lambda self, formatter: self.epilog
+ parser = OptionParser(usage=usage, epilog=epilog)
+
+ if not action:
+ parser.print_help()
+ sys.exit()
+
+ # options for all actions
+ #parser.add_option('-c', '--cipher', dest='cipher', default="AES256", help="cipher to use")
+ parser.add_option('--debug', dest='debug', action="store_true", help="debug")
+ parser.add_option('--vault-password-file', dest='password_file',
+ help="vault password file", default=C.DEFAULT_VAULT_PASSWORD_FILE)
+
+ # options specific to actions
+ if action == "create":
+ parser.set_usage("usage: %prog create [options] file_name")
+ elif action == "decrypt":
+ parser.set_usage("usage: %prog decrypt [options] file_name")
+ elif action == "edit":
+ parser.set_usage("usage: %prog edit [options] file_name")
+ elif action == "view":
+ parser.set_usage("usage: %prog view [options] file_name")
+ elif action == "encrypt":
+ parser.set_usage("usage: %prog encrypt [options] file_name")
+ elif action == "rekey":
+ parser.set_usage("usage: %prog rekey [options] file_name")
+
+ # done, return the parser
+ return parser
+
+def get_action(args):
+ """
+ Get the action the user wants to execute from the
+ sys argv list.
+ """
+ for i in range(0,len(args)):
+ arg = args[i]
+ if arg in VALID_ACTIONS:
+ del args[i]
+ return arg
+ return None
+
+def get_opt(options, k, defval=""):
+ """
+ Returns an option from an Optparse values instance.
+ """
+ try:
+ data = getattr(options, k)
+ except:
+ return defval
+ if k == "roles_path":
+ if os.pathsep in data:
+ data = data.split(os.pathsep)[0]
+ return data
+
+#-------------------------------------------------------------------------------------
+# Command functions
+#-------------------------------------------------------------------------------------
+
+def execute_create(args, options, parser):
+ if len(args) > 1:
+ raise errors.AnsibleError("'create' does not accept more than one filename")
+
+ if not options.password_file:
+ password, new_password = utils.ask_vault_passwords(ask_vault_pass=True, confirm_vault=True)
+ else:
+ password = utils.read_vault_file(options.password_file)
+
+ cipher = 'AES256'
+ if hasattr(options, 'cipher'):
+ cipher = options.cipher
+
+ this_editor = VaultEditor(cipher, password, args[0])
+ this_editor.create_file()
+
+def execute_decrypt(args, options, parser):
+
+ if not options.password_file:
+ password, new_password = utils.ask_vault_passwords(ask_vault_pass=True)
+ else:
+ password = utils.read_vault_file(options.password_file)
+
+ cipher = 'AES256'
+ if hasattr(options, 'cipher'):
+ cipher = options.cipher
+
+ for f in args:
+ this_editor = VaultEditor(cipher, password, f)
+ this_editor.decrypt_file()
+
+ print "Decryption successful"
+
+def execute_edit(args, options, parser):
+
+ if len(args) > 1:
+ raise errors.AnsibleError("edit does not accept more than one filename")
+
+ if not options.password_file:
+ password, new_password = utils.ask_vault_passwords(ask_vault_pass=True)
+ else:
+ password = utils.read_vault_file(options.password_file)
+
+ cipher = None
+
+ for f in args:
+ this_editor = VaultEditor(cipher, password, f)
+ this_editor.edit_file()
+
+def execute_view(args, options, parser):
+
+ if len(args) > 1:
+ raise errors.AnsibleError("view does not accept more than one filename")
+
+ if not options.password_file:
+ password, new_password = utils.ask_vault_passwords(ask_vault_pass=True)
+ else:
+ password = utils.read_vault_file(options.password_file)
+
+ cipher = None
+
+ for f in args:
+ this_editor = VaultEditor(cipher, password, f)
+ this_editor.view_file()
+
+def execute_encrypt(args, options, parser):
+
+ if not options.password_file:
+ password, new_password = utils.ask_vault_passwords(ask_vault_pass=True, confirm_vault=True)
+ else:
+ password = utils.read_vault_file(options.password_file)
+
+ cipher = 'AES256'
+ if hasattr(options, 'cipher'):
+ cipher = options.cipher
+
+ for f in args:
+ this_editor = VaultEditor(cipher, password, f)
+ this_editor.encrypt_file()
+
+ print "Encryption successful"
+
+def execute_rekey(args, options, parser):
+
+ if not options.password_file:
+ password, __ = utils.ask_vault_passwords(ask_vault_pass=True)
+ else:
+ password = utils.read_vault_file(options.password_file)
+
+ __, new_password = utils.ask_vault_passwords(ask_vault_pass=False, ask_new_vault_pass=True, confirm_new=True)
+
+ cipher = None
+ for f in args:
+ this_editor = VaultEditor(cipher, password, f)
+ this_editor.rekey_file(new_password)
+
+ print "Rekey successful"
+
+#-------------------------------------------------------------------------------------
+# MAIN
+#-------------------------------------------------------------------------------------
+
+def main():
+
+ action = get_action(sys.argv)
+ parser = build_option_parser(action)
+ (options, args) = parser.parse_args()
+
+ if not len(args):
+ raise errors.AnsibleError(
+ "The '%s' command requires a filename as the first argument" % action
+ )
+
+ # execute the desired action
+ try:
+ fn = globals()["execute_%s" % action]
+ fn(args, options, parser)
+ except Exception, err:
+ if options.debug:
+ print traceback.format_exc()
+ print "ERROR:",err
+ sys.exit(1)
+
+if __name__ == "__main__":
+ main()
diff --git a/test/units/README.md b/v1/tests/README.md
similarity index 100%
rename from test/units/README.md
rename to v1/tests/README.md
diff --git a/test/units/TestConstants.py b/v1/tests/TestConstants.py
similarity index 100%
rename from test/units/TestConstants.py
rename to v1/tests/TestConstants.py
diff --git a/test/units/TestFilters.py b/v1/tests/TestFilters.py
similarity index 100%
rename from test/units/TestFilters.py
rename to v1/tests/TestFilters.py
diff --git a/test/units/TestInventory.py b/v1/tests/TestInventory.py
similarity index 100%
rename from test/units/TestInventory.py
rename to v1/tests/TestInventory.py
diff --git a/test/units/TestModuleUtilsBasic.py b/v1/tests/TestModuleUtilsBasic.py
similarity index 100%
rename from test/units/TestModuleUtilsBasic.py
rename to v1/tests/TestModuleUtilsBasic.py
diff --git a/test/units/TestModuleUtilsDatabase.py b/v1/tests/TestModuleUtilsDatabase.py
similarity index 100%
rename from test/units/TestModuleUtilsDatabase.py
rename to v1/tests/TestModuleUtilsDatabase.py
diff --git a/test/units/TestModules.py b/v1/tests/TestModules.py
similarity index 100%
rename from test/units/TestModules.py
rename to v1/tests/TestModules.py
diff --git a/test/units/TestPlayVarsFiles.py b/v1/tests/TestPlayVarsFiles.py
similarity index 100%
rename from test/units/TestPlayVarsFiles.py
rename to v1/tests/TestPlayVarsFiles.py
diff --git a/test/units/TestSynchronize.py b/v1/tests/TestSynchronize.py
similarity index 100%
rename from test/units/TestSynchronize.py
rename to v1/tests/TestSynchronize.py
diff --git a/test/units/TestUtils.py b/v1/tests/TestUtils.py
similarity index 100%
rename from test/units/TestUtils.py
rename to v1/tests/TestUtils.py
diff --git a/test/units/TestUtilsStringFunctions.py b/v1/tests/TestUtilsStringFunctions.py
similarity index 100%
rename from test/units/TestUtilsStringFunctions.py
rename to v1/tests/TestUtilsStringFunctions.py
diff --git a/test/units/TestVault.py b/v1/tests/TestVault.py
similarity index 100%
rename from test/units/TestVault.py
rename to v1/tests/TestVault.py
diff --git a/test/units/TestVaultEditor.py b/v1/tests/TestVaultEditor.py
similarity index 100%
rename from test/units/TestVaultEditor.py
rename to v1/tests/TestVaultEditor.py
diff --git a/test/units/ansible.cfg b/v1/tests/ansible.cfg
similarity index 100%
rename from test/units/ansible.cfg
rename to v1/tests/ansible.cfg
diff --git a/test/units/inventory_test_data/ansible_hosts b/v1/tests/inventory_test_data/ansible_hosts
similarity index 100%
rename from test/units/inventory_test_data/ansible_hosts
rename to v1/tests/inventory_test_data/ansible_hosts
diff --git a/test/units/inventory_test_data/broken.yml b/v1/tests/inventory_test_data/broken.yml
similarity index 100%
rename from test/units/inventory_test_data/broken.yml
rename to v1/tests/inventory_test_data/broken.yml
diff --git a/test/units/inventory_test_data/common_vars.yml b/v1/tests/inventory_test_data/common_vars.yml
similarity index 100%
rename from test/units/inventory_test_data/common_vars.yml
rename to v1/tests/inventory_test_data/common_vars.yml
diff --git a/test/units/inventory_test_data/complex_hosts b/v1/tests/inventory_test_data/complex_hosts
similarity index 100%
rename from test/units/inventory_test_data/complex_hosts
rename to v1/tests/inventory_test_data/complex_hosts
diff --git a/test/units/inventory_test_data/encrypted.yml b/v1/tests/inventory_test_data/encrypted.yml
similarity index 100%
rename from test/units/inventory_test_data/encrypted.yml
rename to v1/tests/inventory_test_data/encrypted.yml
diff --git a/test/units/inventory_test_data/hosts_list.yml b/v1/tests/inventory_test_data/hosts_list.yml
similarity index 100%
rename from test/units/inventory_test_data/hosts_list.yml
rename to v1/tests/inventory_test_data/hosts_list.yml
diff --git a/test/units/inventory_test_data/inventory/test_alpha_end_before_beg b/v1/tests/inventory_test_data/inventory/test_alpha_end_before_beg
similarity index 100%
rename from test/units/inventory_test_data/inventory/test_alpha_end_before_beg
rename to v1/tests/inventory_test_data/inventory/test_alpha_end_before_beg
diff --git a/test/units/inventory_test_data/inventory/test_combined_range b/v1/tests/inventory_test_data/inventory/test_combined_range
similarity index 100%
rename from test/units/inventory_test_data/inventory/test_combined_range
rename to v1/tests/inventory_test_data/inventory/test_combined_range
diff --git a/test/units/inventory_test_data/inventory/test_incorrect_format b/v1/tests/inventory_test_data/inventory/test_incorrect_format
similarity index 100%
rename from test/units/inventory_test_data/inventory/test_incorrect_format
rename to v1/tests/inventory_test_data/inventory/test_incorrect_format
diff --git a/test/units/inventory_test_data/inventory/test_incorrect_range b/v1/tests/inventory_test_data/inventory/test_incorrect_range
similarity index 100%
rename from test/units/inventory_test_data/inventory/test_incorrect_range
rename to v1/tests/inventory_test_data/inventory/test_incorrect_range
diff --git a/test/units/inventory_test_data/inventory/test_leading_range b/v1/tests/inventory_test_data/inventory/test_leading_range
similarity index 100%
rename from test/units/inventory_test_data/inventory/test_leading_range
rename to v1/tests/inventory_test_data/inventory/test_leading_range
diff --git a/test/units/inventory_test_data/inventory/test_missing_end b/v1/tests/inventory_test_data/inventory/test_missing_end
similarity index 100%
rename from test/units/inventory_test_data/inventory/test_missing_end
rename to v1/tests/inventory_test_data/inventory/test_missing_end
diff --git a/test/units/inventory_test_data/inventory_api.py b/v1/tests/inventory_test_data/inventory_api.py
similarity index 100%
rename from test/units/inventory_test_data/inventory_api.py
rename to v1/tests/inventory_test_data/inventory_api.py
diff --git a/test/units/inventory_test_data/inventory_dir/0hosts b/v1/tests/inventory_test_data/inventory_dir/0hosts
similarity index 100%
rename from test/units/inventory_test_data/inventory_dir/0hosts
rename to v1/tests/inventory_test_data/inventory_dir/0hosts
diff --git a/test/units/inventory_test_data/inventory_dir/1mythology b/v1/tests/inventory_test_data/inventory_dir/1mythology
similarity index 100%
rename from test/units/inventory_test_data/inventory_dir/1mythology
rename to v1/tests/inventory_test_data/inventory_dir/1mythology
diff --git a/test/units/inventory_test_data/inventory_dir/2levels b/v1/tests/inventory_test_data/inventory_dir/2levels
similarity index 100%
rename from test/units/inventory_test_data/inventory_dir/2levels
rename to v1/tests/inventory_test_data/inventory_dir/2levels
diff --git a/test/units/inventory_test_data/inventory_dir/3comments b/v1/tests/inventory_test_data/inventory_dir/3comments
similarity index 100%
rename from test/units/inventory_test_data/inventory_dir/3comments
rename to v1/tests/inventory_test_data/inventory_dir/3comments
diff --git a/test/units/inventory_test_data/inventory_dir/4skip_extensions.ini b/v1/tests/inventory_test_data/inventory_dir/4skip_extensions.ini
similarity index 100%
rename from test/units/inventory_test_data/inventory_dir/4skip_extensions.ini
rename to v1/tests/inventory_test_data/inventory_dir/4skip_extensions.ini
diff --git a/test/units/inventory_test_data/large_range b/v1/tests/inventory_test_data/large_range
similarity index 100%
rename from test/units/inventory_test_data/large_range
rename to v1/tests/inventory_test_data/large_range
diff --git a/test/units/inventory_test_data/restrict_pattern b/v1/tests/inventory_test_data/restrict_pattern
similarity index 100%
rename from test/units/inventory_test_data/restrict_pattern
rename to v1/tests/inventory_test_data/restrict_pattern
diff --git a/test/units/inventory_test_data/simple_hosts b/v1/tests/inventory_test_data/simple_hosts
similarity index 100%
rename from test/units/inventory_test_data/simple_hosts
rename to v1/tests/inventory_test_data/simple_hosts
diff --git a/test/units/module_tests/TestApt.py b/v1/tests/module_tests/TestApt.py
similarity index 100%
rename from test/units/module_tests/TestApt.py
rename to v1/tests/module_tests/TestApt.py
diff --git a/test/units/module_tests/TestDocker.py b/v1/tests/module_tests/TestDocker.py
similarity index 100%
rename from test/units/module_tests/TestDocker.py
rename to v1/tests/module_tests/TestDocker.py
diff --git a/test/units/vault_test_data/foo-ansible-1.0.yml b/v1/tests/vault_test_data/foo-ansible-1.0.yml
similarity index 100%
rename from test/units/vault_test_data/foo-ansible-1.0.yml
rename to v1/tests/vault_test_data/foo-ansible-1.0.yml
diff --git a/test/units/vault_test_data/foo-ansible-1.1-ansible-newline-ansible.yml b/v1/tests/vault_test_data/foo-ansible-1.1-ansible-newline-ansible.yml
similarity index 100%
rename from test/units/vault_test_data/foo-ansible-1.1-ansible-newline-ansible.yml
rename to v1/tests/vault_test_data/foo-ansible-1.1-ansible-newline-ansible.yml
diff --git a/test/units/vault_test_data/foo-ansible-1.1.yml b/v1/tests/vault_test_data/foo-ansible-1.1.yml
similarity index 100%
rename from test/units/vault_test_data/foo-ansible-1.1.yml
rename to v1/tests/vault_test_data/foo-ansible-1.1.yml
diff --git a/v2/README-tests.md b/v2/README-tests.md
deleted file mode 100644
index 956160b653..0000000000
--- a/v2/README-tests.md
+++ /dev/null
@@ -1,33 +0,0 @@
-Ansible Test System
-===================
-
-Folders
-=======
-
-test
-----
-
-Unit tests that test small pieces of code not suited for the integration test
-layer, usually very API based, and should leverage mock interfaces rather than
-producing side effects.
-
-Playbook engine code is better suited for integration tests.
-
-Requirements: sudo pip install paramiko PyYAML jinja2 httplib2 passlib unittest2 mock
-
-integration
------------
-
-Integration test layer, constructed using playbooks.
-
-Some tests may require cloud credentials, others will not, and destructive
-tests are separated from non-destructive so a subset can be run on development
-machines.
-
-learn more
-----------
-
-hop into a subdirectory and see the associated README.md for more info.
-
-
-
diff --git a/v2/ansible/__init__.py b/v2/ansible/__init__.py
deleted file mode 100644
index 8637adb54d..0000000000
--- a/v2/ansible/__init__.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# (c) 2012-2014, Michael DeHaan
-#
-# This file is part of Ansible
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see .
-
-# Make coding more python3-ish
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-__version__ = '2.0'
diff --git a/v2/ansible/inventory/host.py b/v2/ansible/inventory/host.py
deleted file mode 100644
index 29d6afd991..0000000000
--- a/v2/ansible/inventory/host.py
+++ /dev/null
@@ -1,130 +0,0 @@
-# (c) 2012-2014, Michael DeHaan
-#
-# This file is part of Ansible
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see .
-
-# Make coding more python3-ish
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-from ansible import constants as C
-from ansible.inventory.group import Group
-from ansible.utils.vars import combine_vars
-
-__all__ = ['Host']
-
-class Host:
- ''' a single ansible host '''
-
- #__slots__ = [ 'name', 'vars', 'groups' ]
-
- def __getstate__(self):
- return self.serialize()
-
- def __setstate__(self, data):
- return self.deserialize(data)
-
- def __eq__(self, other):
- return self.name == other.name
-
- def serialize(self):
- groups = []
- for group in self.groups:
- groups.append(group.serialize())
-
- return dict(
- name=self.name,
- vars=self.vars.copy(),
- ipv4_address=self.ipv4_address,
- ipv6_address=self.ipv6_address,
- port=self.port,
- gathered_facts=self._gathered_facts,
- groups=groups,
- )
-
- def deserialize(self, data):
- self.__init__()
-
- self.name = data.get('name')
- self.vars = data.get('vars', dict())
- self.ipv4_address = data.get('ipv4_address', '')
- self.ipv6_address = data.get('ipv6_address', '')
- self.port = data.get('port')
-
- groups = data.get('groups', [])
- for group_data in groups:
- g = Group()
- g.deserialize(group_data)
- self.groups.append(g)
-
- def __init__(self, name=None, port=None):
-
- self.name = name
- self.vars = {}
- self.groups = []
-
- self.ipv4_address = name
- self.ipv6_address = name
-
- if port and port != C.DEFAULT_REMOTE_PORT:
- self.port = int(port)
- else:
- self.port = C.DEFAULT_REMOTE_PORT
-
- self._gathered_facts = False
-
- def __repr__(self):
- return self.get_name()
-
- def get_name(self):
- return self.name
-
- @property
- def gathered_facts(self):
- return self._gathered_facts
-
- def set_gathered_facts(self, gathered):
- self._gathered_facts = gathered
-
- def add_group(self, group):
-
- self.groups.append(group)
-
- def set_variable(self, key, value):
-
- self.vars[key]=value
-
- def get_groups(self):
-
- groups = {}
- for g in self.groups:
- groups[g.name] = g
- ancestors = g.get_ancestors()
- for a in ancestors:
- groups[a.name] = a
- return groups.values()
-
- def get_vars(self):
-
- results = {}
- groups = self.get_groups()
- for group in sorted(groups, key=lambda g: g.depth):
- results = combine_vars(results, group.get_vars())
- results = combine_vars(results, self.vars)
- results['inventory_hostname'] = self.name
- results['inventory_hostname_short'] = self.name.split('.')[0]
- results['group_names'] = sorted([ g.name for g in groups if g.name != 'all'])
- return results
-
diff --git a/v2/ansible/modules/core b/v2/ansible/modules/core
deleted file mode 160000
index 0341ddd35e..0000000000
--- a/v2/ansible/modules/core
+++ /dev/null
@@ -1 +0,0 @@
-Subproject commit 0341ddd35ed5ff477ad5de2488d947255ce86259
diff --git a/v2/ansible/modules/extras b/v2/ansible/modules/extras
deleted file mode 160000
index dd80fa221c..0000000000
--- a/v2/ansible/modules/extras
+++ /dev/null
@@ -1 +0,0 @@
-Subproject commit dd80fa221ce0adb3abd658fbd1aa09bf7cf8a6dc
diff --git a/v2/ansible/playbook/__init__.py b/v2/ansible/playbook/__init__.py
deleted file mode 100644
index 40e6638f23..0000000000
--- a/v2/ansible/playbook/__init__.py
+++ /dev/null
@@ -1,85 +0,0 @@
-# (c) 2012-2014, Michael DeHaan
-#
-# This file is part of Ansible
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see .
-
-# Make coding more python3-ish
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-import os
-
-from ansible.errors import AnsibleError, AnsibleParserError
-from ansible.parsing import DataLoader
-from ansible.playbook.attribute import Attribute, FieldAttribute
-from ansible.playbook.play import Play
-from ansible.playbook.playbook_include import PlaybookInclude
-from ansible.plugins import push_basedir
-
-
-__all__ = ['Playbook']
-
-
-class Playbook:
-
- def __init__(self, loader):
- # Entries in the datastructure of a playbook may
- # be either a play or an include statement
- self._entries = []
- self._basedir = os.getcwd()
- self._loader = loader
-
- @staticmethod
- def load(file_name, variable_manager=None, loader=None):
- pb = Playbook(loader=loader)
- pb._load_playbook_data(file_name=file_name, variable_manager=variable_manager)
- return pb
-
- def _load_playbook_data(self, file_name, variable_manager):
-
- if os.path.isabs(file_name):
- self._basedir = os.path.dirname(file_name)
- else:
- self._basedir = os.path.normpath(os.path.join(self._basedir, os.path.dirname(file_name)))
-
- # set the loaders basedir
- self._loader.set_basedir(self._basedir)
-
- # also add the basedir to the list of module directories
- push_basedir(self._basedir)
-
- ds = self._loader.load_from_file(os.path.basename(file_name))
- if not isinstance(ds, list):
- raise AnsibleParserError("playbooks must be a list of plays", obj=ds)
-
- # Parse the playbook entries. For plays, we simply parse them
- # using the Play() object, and includes are parsed using the
- # PlaybookInclude() object
- for entry in ds:
- if not isinstance(entry, dict):
- raise AnsibleParserError("playbook entries must be either a valid play or an include statement", obj=entry)
-
- if 'include' in entry:
- pb = PlaybookInclude.load(entry, basedir=self._basedir, variable_manager=variable_manager, loader=self._loader)
- self._entries.extend(pb._entries)
- else:
- entry_obj = Play.load(entry, variable_manager=variable_manager, loader=self._loader)
- self._entries.append(entry_obj)
-
- def get_loader(self):
- return self._loader
-
- def get_plays(self):
- return self._entries[:]
diff --git a/v2/ansible/playbook/play.py b/v2/ansible/playbook/play.py
deleted file mode 100644
index b99c01fdf7..0000000000
--- a/v2/ansible/playbook/play.py
+++ /dev/null
@@ -1,263 +0,0 @@
-# (c) 2012-2014, Michael DeHaan
-#
-# This file is part of Ansible
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see .
-
-# Make coding more python3-ish
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-from ansible.errors import AnsibleError, AnsibleParserError
-
-from ansible.playbook.attribute import Attribute, FieldAttribute
-from ansible.playbook.base import Base
-from ansible.playbook.become import Become
-from ansible.playbook.helpers import load_list_of_blocks, load_list_of_roles
-from ansible.playbook.role import Role
-from ansible.playbook.taggable import Taggable
-from ansible.playbook.block import Block
-
-from ansible.utils.vars import combine_vars
-
-
-__all__ = ['Play']
-
-
-class Play(Base, Taggable, Become):
-
- """
- A play is a language feature that represents a list of roles and/or
- task/handler blocks to execute on a given set of hosts.
-
- Usage:
-
- Play.load(datastructure) -> Play
- Play.something(...)
- """
-
- # =================================================================================
- # Connection-Related Attributes
-
- # TODO: generalize connection
- _accelerate = FieldAttribute(isa='bool', default=False)
- _accelerate_ipv6 = FieldAttribute(isa='bool', default=False)
- _accelerate_port = FieldAttribute(isa='int', default=5099) # should be alias of port
-
- # Connection
- _gather_facts = FieldAttribute(isa='string', default='smart')
- _hosts = FieldAttribute(isa='list', default=[], required=True)
- _name = FieldAttribute(isa='string', default='')
-
- # Variable Attributes
- _vars_files = FieldAttribute(isa='list', default=[])
- _vars_prompt = FieldAttribute(isa='dict', default=dict())
- _vault_password = FieldAttribute(isa='string')
-
- # Block (Task) Lists Attributes
- _handlers = FieldAttribute(isa='list', default=[])
- _pre_tasks = FieldAttribute(isa='list', default=[])
- _post_tasks = FieldAttribute(isa='list', default=[])
- _tasks = FieldAttribute(isa='list', default=[])
-
- # Role Attributes
- _roles = FieldAttribute(isa='list', default=[])
-
- # Flag/Setting Attributes
- _any_errors_fatal = FieldAttribute(isa='bool', default=False)
- _max_fail_percentage = FieldAttribute(isa='string', default='0')
- _serial = FieldAttribute(isa='int', default=0)
- _strategy = FieldAttribute(isa='string', default='linear')
-
- # =================================================================================
-
- def __init__(self):
- super(Play, self).__init__()
-
- def __repr__(self):
- return self.get_name()
-
- def get_name(self):
- ''' return the name of the Play '''
- return "PLAY: %s" % self._attributes.get('name')
-
- @staticmethod
- def load(data, variable_manager=None, loader=None):
- p = Play()
- return p.load_data(data, variable_manager=variable_manager, loader=loader)
-
- def preprocess_data(self, ds):
- '''
- Adjusts play datastructure to cleanup old/legacy items
- '''
-
- assert isinstance(ds, dict)
-
- # The use of 'user' in the Play datastructure was deprecated to
- # line up with the same change for Tasks, due to the fact that
- # 'user' conflicted with the user module.
- if 'user' in ds:
- # this should never happen, but error out with a helpful message
- # to the user if it does...
- if 'remote_user' in ds:
- raise AnsibleParserError("both 'user' and 'remote_user' are set for %s. The use of 'user' is deprecated, and should be removed" % self.get_name(), obj=ds)
-
- ds['remote_user'] = ds['user']
- del ds['user']
-
- return super(Play, self).preprocess_data(ds)
-
- def _load_vars(self, attr, ds):
- '''
- Vars in a play can be specified either as a dictionary directly, or
- as a list of dictionaries. If the later, this method will turn the
- list into a single dictionary.
- '''
-
- try:
- if isinstance(ds, dict):
- return ds
- elif isinstance(ds, list):
- all_vars = dict()
- for item in ds:
- if not isinstance(item, dict):
- raise ValueError
- all_vars = combine_vars(all_vars, item)
- return all_vars
- else:
- raise ValueError
- except ValueError:
- raise AnsibleParserError("Vars in a playbook must be specified as a dictionary, or a list of dictionaries", obj=ds)
-
- def _load_tasks(self, attr, ds):
- '''
- Loads a list of blocks from a list which may be mixed tasks/blocks.
- Bare tasks outside of a block are given an implicit block.
- '''
- return load_list_of_blocks(ds=ds, play=self, variable_manager=self._variable_manager, loader=self._loader)
-
- def _load_pre_tasks(self, attr, ds):
- '''
- Loads a list of blocks from a list which may be mixed tasks/blocks.
- Bare tasks outside of a block are given an implicit block.
- '''
- return load_list_of_blocks(ds=ds, play=self, variable_manager=self._variable_manager, loader=self._loader)
-
- def _load_post_tasks(self, attr, ds):
- '''
- Loads a list of blocks from a list which may be mixed tasks/blocks.
- Bare tasks outside of a block are given an implicit block.
- '''
- return load_list_of_blocks(ds=ds, play=self, variable_manager=self._variable_manager, loader=self._loader)
-
- def _load_handlers(self, attr, ds):
- '''
- Loads a list of blocks from a list which may be mixed handlers/blocks.
- Bare handlers outside of a block are given an implicit block.
- '''
- return load_list_of_blocks(ds=ds, play=self, use_handlers=True, variable_manager=self._variable_manager, loader=self._loader)
-
- def _load_roles(self, attr, ds):
- '''
- Loads and returns a list of RoleInclude objects from the datastructure
- list of role definitions and creates the Role from those objects
- '''
-
- role_includes = load_list_of_roles(ds, variable_manager=self._variable_manager, loader=self._loader)
-
- roles = []
- for ri in role_includes:
- roles.append(Role.load(ri))
- return roles
-
- # FIXME: post_validation needs to ensure that become/su/sudo have only 1 set
-
- def _compile_roles(self):
- '''
- Handles the role compilation step, returning a flat list of tasks
- with the lowest level dependencies first. For example, if a role R
- has a dependency D1, which also has a dependency D2, the tasks from
- D2 are merged first, followed by D1, and lastly by the tasks from
- the parent role R last. This is done for all roles in the Play.
- '''
-
- block_list = []
-
- if len(self.roles) > 0:
- for r in self.roles:
- block_list.extend(r.compile(play=self))
-
- return block_list
-
- def compile(self):
- '''
- Compiles and returns the task list for this play, compiled from the
- roles (which are themselves compiled recursively) and/or the list of
- tasks specified in the play.
- '''
-
- block_list = []
-
- block_list.extend(self.pre_tasks)
- block_list.extend(self._compile_roles())
- block_list.extend(self.tasks)
- block_list.extend(self.post_tasks)
-
- return block_list
-
- def get_vars(self):
- return self.vars.copy()
-
- def get_vars_files(self):
- return self.vars_files
-
- def get_handlers(self):
- return self.handlers[:]
-
- def get_roles(self):
- return self.roles[:]
-
- def get_tasks(self):
- tasklist = []
- for task in self.pre_tasks + self.tasks + self.post_tasks:
- if isinstance(task, Block):
- tasklist.append(task.block + task.rescue + task.always)
- else:
- tasklist.append(task)
- return tasklist
-
- def serialize(self):
- data = super(Play, self).serialize()
-
- roles = []
- for role in self.get_roles():
- roles.append(role.serialize())
- data['roles'] = roles
-
- return data
-
- def deserialize(self, data):
- super(Play, self).deserialize(data)
-
- if 'roles' in data:
- role_data = data.get('roles', [])
- roles = []
- for role in role_data:
- r = Role()
- r.deserialize(role)
- roles.append(r)
-
- setattr(self, 'roles', roles)
- del data['roles']
-
diff --git a/v2/ansible/playbook/task.py b/v2/ansible/playbook/task.py
deleted file mode 100644
index 0606025798..0000000000
--- a/v2/ansible/playbook/task.py
+++ /dev/null
@@ -1,310 +0,0 @@
-# (c) 2012-2014, Michael DeHaan
-#
-# This file is part of Ansible
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see .
-
-# Make coding more python3-ish
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-from ansible.errors import AnsibleError
-
-from ansible.parsing.mod_args import ModuleArgsParser
-from ansible.parsing.splitter import parse_kv
-from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject, AnsibleMapping
-
-from ansible.plugins import module_loader, lookup_loader
-
-from ansible.playbook.attribute import Attribute, FieldAttribute
-from ansible.playbook.base import Base
-from ansible.playbook.become import Become
-from ansible.playbook.block import Block
-from ansible.playbook.conditional import Conditional
-from ansible.playbook.role import Role
-from ansible.playbook.taggable import Taggable
-
-__all__ = ['Task']
-
-class Task(Base, Conditional, Taggable, Become):
-
- """
- A task is a language feature that represents a call to a module, with given arguments and other parameters.
- A handler is a subclass of a task.
-
- Usage:
-
- Task.load(datastructure) -> Task
- Task.something(...)
- """
-
- # =================================================================================
- # ATTRIBUTES
- # load_ and
- # validate_
- # will be used if defined
- # might be possible to define others
-
- _args = FieldAttribute(isa='dict', default=dict())
- _action = FieldAttribute(isa='string')
-
- _always_run = FieldAttribute(isa='bool')
- _any_errors_fatal = FieldAttribute(isa='bool')
- _async = FieldAttribute(isa='int', default=0)
- _changed_when = FieldAttribute(isa='string')
- _delay = FieldAttribute(isa='int', default=5)
- _delegate_to = FieldAttribute(isa='string')
- _failed_when = FieldAttribute(isa='string')
- _first_available_file = FieldAttribute(isa='list')
- _ignore_errors = FieldAttribute(isa='bool')
-
- _loop = FieldAttribute(isa='string', private=True)
- _loop_args = FieldAttribute(isa='list', private=True)
- _local_action = FieldAttribute(isa='string')
-
- # FIXME: this should not be a Task
- _meta = FieldAttribute(isa='string')
-
- _name = FieldAttribute(isa='string', default='')
-
- _notify = FieldAttribute(isa='list')
- _poll = FieldAttribute(isa='int')
- _register = FieldAttribute(isa='string')
- _retries = FieldAttribute(isa='int', default=1)
- _run_once = FieldAttribute(isa='bool')
- _until = FieldAttribute(isa='list') # ?
-
- def __init__(self, block=None, role=None, task_include=None):
- ''' constructors a task, without the Task.load classmethod, it will be pretty blank '''
-
- self._block = block
- self._role = role
- self._task_include = task_include
-
- super(Task, self).__init__()
-
- def get_name(self):
- ''' return the name of the task '''
-
- if self._role and self.name:
- return "%s : %s" % (self._role.get_name(), self.name)
- elif self.name:
- return self.name
- else:
- flattened_args = self._merge_kv(self.args)
- if self._role:
- return "%s : %s %s" % (self._role.get_name(), self.action, flattened_args)
- else:
- return "%s %s" % (self.action, flattened_args)
-
- def _merge_kv(self, ds):
- if ds is None:
- return ""
- elif isinstance(ds, basestring):
- return ds
- elif isinstance(ds, dict):
- buf = ""
- for (k,v) in ds.iteritems():
- if k.startswith('_'):
- continue
- buf = buf + "%s=%s " % (k,v)
- buf = buf.strip()
- return buf
-
- @staticmethod
- def load(data, block=None, role=None, task_include=None, variable_manager=None, loader=None):
- t = Task(block=block, role=role, task_include=task_include)
- return t.load_data(data, variable_manager=variable_manager, loader=loader)
-
- def __repr__(self):
- ''' returns a human readable representation of the task '''
- return "TASK: %s" % self.get_name()
-
- def _preprocess_loop(self, ds, new_ds, k, v):
- ''' take a lookup plugin name and store it correctly '''
-
- loop_name = k.replace("with_", "")
- if new_ds.get('loop') is not None:
- raise AnsibleError("duplicate loop in task: %s" % loop_name)
- new_ds['loop'] = loop_name
- new_ds['loop_args'] = v
-
- def preprocess_data(self, ds):
- '''
- tasks are especially complex arguments so need pre-processing.
- keep it short.
- '''
-
- assert isinstance(ds, dict)
-
- # the new, cleaned datastructure, which will have legacy
- # items reduced to a standard structure suitable for the
- # attributes of the task class
- new_ds = AnsibleMapping()
- if isinstance(ds, AnsibleBaseYAMLObject):
- new_ds.ansible_pos = ds.ansible_pos
-
- # use the args parsing class to determine the action, args,
- # and the delegate_to value from the various possible forms
- # supported as legacy
- args_parser = ModuleArgsParser(task_ds=ds)
- (action, args, delegate_to) = args_parser.parse()
-
- new_ds['action'] = action
- new_ds['args'] = args
- new_ds['delegate_to'] = delegate_to
-
- for (k,v) in ds.iteritems():
- if k in ('action', 'local_action', 'args', 'delegate_to') or k == action or k == 'shell':
- # we don't want to re-assign these values, which were
- # determined by the ModuleArgsParser() above
- continue
- elif k.replace("with_", "") in lookup_loader:
- self._preprocess_loop(ds, new_ds, k, v)
- else:
- new_ds[k] = v
-
- return super(Task, self).preprocess_data(new_ds)
-
- def post_validate(self, templar):
- '''
- Override of base class post_validate, to also do final validation on
- the block and task include (if any) to which this task belongs.
- '''
-
- if self._block:
- self._block.post_validate(templar)
- if self._task_include:
- self._task_include.post_validate(templar)
-
- super(Task, self).post_validate(templar)
-
- def get_vars(self):
- all_vars = self.vars.copy()
- if self._block:
- all_vars.update(self._block.get_vars())
- if self._task_include:
- all_vars.update(self._task_include.get_vars())
-
- all_vars.update(self.serialize())
-
- if 'tags' in all_vars:
- del all_vars['tags']
- if 'when' in all_vars:
- del all_vars['when']
- return all_vars
-
- def copy(self, exclude_block=False):
- new_me = super(Task, self).copy()
-
- new_me._block = None
- if self._block and not exclude_block:
- new_me._block = self._block.copy()
-
- new_me._role = None
- if self._role:
- new_me._role = self._role
-
- new_me._task_include = None
- if self._task_include:
- new_me._task_include = self._task_include.copy()
-
- return new_me
-
- def serialize(self):
- data = super(Task, self).serialize()
-
- if self._block:
- data['block'] = self._block.serialize()
-
- if self._role:
- data['role'] = self._role.serialize()
-
- if self._task_include:
- data['task_include'] = self._task_include.serialize()
-
- return data
-
- def deserialize(self, data):
-
- # import is here to avoid import loops
- #from ansible.playbook.task_include import TaskInclude
-
- block_data = data.get('block')
-
- if block_data:
- b = Block()
- b.deserialize(block_data)
- self._block = b
- del data['block']
-
- role_data = data.get('role')
- if role_data:
- r = Role()
- r.deserialize(role_data)
- self._role = r
- del data['role']
-
- ti_data = data.get('task_include')
- if ti_data:
- #ti = TaskInclude()
- ti = Task()
- ti.deserialize(ti_data)
- self._task_include = ti
- del data['task_include']
-
- super(Task, self).deserialize(data)
-
- def evaluate_conditional(self, all_vars):
- if self._block is not None:
- if not self._block.evaluate_conditional(all_vars):
- return False
- if self._task_include is not None:
- if not self._task_include.evaluate_conditional(all_vars):
- return False
- return super(Task, self).evaluate_conditional(all_vars)
-
- def set_loader(self, loader):
- '''
- Sets the loader on this object and recursively on parent, child objects.
- This is used primarily after the Task has been serialized/deserialized, which
- does not preserve the loader.
- '''
-
- self._loader = loader
-
- if self._block:
- self._block.set_loader(loader)
- if self._task_include:
- self._task_include.set_loader(loader)
-
- def _get_parent_attribute(self, attr, extend=False):
- '''
- Generic logic to get the attribute or parent attribute for a task value.
- '''
- value = self._attributes[attr]
- if self._block and (not value or extend):
- parent_value = getattr(self._block, attr)
- if extend:
- value = self._extend_value(value, parent_value)
- else:
- value = parent_value
- if self._task_include and (not value or extend):
- parent_value = getattr(self._task_include, attr)
- if extend:
- value = self._extend_value(value, parent_value)
- else:
- value = parent_value
- return value
-
diff --git a/v2/ansible/utils/vault.py b/v2/ansible/utils/vault.py
deleted file mode 100644
index 5c704afac5..0000000000
--- a/v2/ansible/utils/vault.py
+++ /dev/null
@@ -1,56 +0,0 @@
-# (c) 2012-2014, Michael DeHaan
-#
-# This file is part of Ansible
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see .
-
-# Make coding more python3-ish
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-import os
-import subprocess
-
-from ansible import constants as C
-from ansible.errors import AnsibleError
-from ansible.utils.path import is_executable
-
-def read_vault_file(vault_password_file):
- """
- Read a vault password from a file or if executable, execute the script and
- retrieve password from STDOUT
- """
-
- this_path = os.path.realpath(os.path.expanduser(vault_password_file))
- if not os.path.exists(this_path):
- raise AnsibleError("The vault password file %s was not found" % this_path)
-
- if is_executable(this_path):
- try:
- # STDERR not captured to make it easier for users to prompt for input in their scripts
- p = subprocess.Popen(this_path, stdout=subprocess.PIPE)
- except OSError as e:
- raise AnsibleError("Problem running vault password script %s (%s). If this is not a script, remove the executable bit from the file." % (' '.join(this_path), e))
- stdout, stderr = p.communicate()
- vault_pass = stdout.strip('\r\n')
- else:
- try:
- f = open(this_path, "rb")
- vault_pass=f.read().strip()
- f.close()
- except (OSError, IOError) as e:
- raise AnsibleError("Could not read vault password file %s: %s" % (this_path, e))
-
- return vault_pass
-
diff --git a/v2/bin/ansible b/v2/bin/ansible
deleted file mode 100755
index 467dd505a2..0000000000
--- a/v2/bin/ansible
+++ /dev/null
@@ -1,79 +0,0 @@
-#!/usr/bin/env python
-
-# (c) 2012, Michael DeHaan
-#
-# This file is part of Ansible
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see .
-
-########################################################
-from __future__ import (absolute_import)
-__metaclass__ = type
-
-__requires__ = ['ansible']
-try:
- import pkg_resources
-except Exception:
- # Use pkg_resources to find the correct versions of libraries and set
- # sys.path appropriately when there are multiversion installs. But we
- # have code that better expresses the errors in the places where the code
- # is actually used (the deps are optional for many code paths) so we don't
- # want to fail here.
- pass
-
-import os
-import sys
-
-from ansible.errors import AnsibleError, AnsibleOptionsError
-from ansible.utils.display import Display
-
-########################################################
-
-if __name__ == '__main__':
-
- cli = None
- display = Display()
- me = os.path.basename(__file__)
-
- try:
- if me == 'ansible-playbook':
- from ansible.cli.playbook import PlaybookCLI as mycli
- elif me == 'ansible':
- from ansible.cli.adhoc import AdHocCLI as mycli
- elif me == 'ansible-pull':
- from ansible.cli.pull import PullCLI as mycli
- elif me == 'ansible-doc':
- from ansible.cli.doc import DocCLI as mycli
- elif me == 'ansible-vault':
- from ansible.cli.vault import VaultCLI as mycli
- elif me == 'ansible-galaxy':
- from ansible.cli.galaxy import GalaxyCLI as mycli
-
- cli = mycli(sys.argv, display=display)
- if cli:
- cli.parse()
- sys.exit(cli.run())
- else:
- raise AnsibleError("Program not implemented: %s" % me)
-
- except AnsibleOptionsError as e:
- cli.parser.print_help()
- display.display(str(e), stderr=True, color='red')
- sys.exit(1)
- except AnsibleError as e:
- display.display(str(e), stderr=True, color='red')
- sys.exit(2)
- except KeyboardInterrupt:
- display.error("interrupted")
- sys.exit(4)
diff --git a/v2/bin/ansible-doc b/v2/bin/ansible-doc
deleted file mode 120000
index cabb1f519a..0000000000
--- a/v2/bin/ansible-doc
+++ /dev/null
@@ -1 +0,0 @@
-ansible
\ No newline at end of file
diff --git a/v2/bin/ansible-galaxy b/v2/bin/ansible-galaxy
deleted file mode 120000
index cabb1f519a..0000000000
--- a/v2/bin/ansible-galaxy
+++ /dev/null
@@ -1 +0,0 @@
-ansible
\ No newline at end of file
diff --git a/v2/bin/ansible-playbook b/v2/bin/ansible-playbook
deleted file mode 120000
index cabb1f519a..0000000000
--- a/v2/bin/ansible-playbook
+++ /dev/null
@@ -1 +0,0 @@
-ansible
\ No newline at end of file
diff --git a/v2/bin/ansible-pull b/v2/bin/ansible-pull
deleted file mode 120000
index cabb1f519a..0000000000
--- a/v2/bin/ansible-pull
+++ /dev/null
@@ -1 +0,0 @@
-ansible
\ No newline at end of file
diff --git a/v2/bin/ansible-vault b/v2/bin/ansible-vault
deleted file mode 120000
index cabb1f519a..0000000000
--- a/v2/bin/ansible-vault
+++ /dev/null
@@ -1 +0,0 @@
-ansible
\ No newline at end of file
diff --git a/v2/hacking/README.md b/v2/hacking/README.md
deleted file mode 100644
index 6d65464eee..0000000000
--- a/v2/hacking/README.md
+++ /dev/null
@@ -1,48 +0,0 @@
-'Hacking' directory tools
-=========================
-
-Env-setup
----------
-
-The 'env-setup' script modifies your environment to allow you to run
-ansible from a git checkout using python 2.6+. (You may not use
-python 3 at this time).
-
-First, set up your environment to run from the checkout:
-
- $ source ./hacking/env-setup
-
-You will need some basic prerequisites installed. If you do not already have them
-and do not wish to install them from your operating system package manager, you
-can install them from pip
-
- $ easy_install pip # if pip is not already available
- $ pip install pyyaml jinja2 nose passlib pycrypto
-
-From there, follow ansible instructions on docs.ansible.com as normal.
-
-Test-module
------------
-
-'test-module' is a simple program that allows module developers (or testers) to run
-a module outside of the ansible program, locally, on the current machine.
-
-Example:
-
- $ ./hacking/test-module -m library/commands/shell -a "echo hi"
-
-This is a good way to insert a breakpoint into a module, for instance.
-
-Module-formatter
-----------------
-
-The module formatter is a script used to generate manpages and online
-module documentation. This is used by the system makefiles and rarely
-needs to be run directly.
-
-Authors
--------
-'authors' is a simple script that generates a list of everyone who has
-contributed code to the ansible repository.
-
-
diff --git a/v2/hacking/authors.sh b/v2/hacking/authors.sh
deleted file mode 100755
index 7c97840b2f..0000000000
--- a/v2/hacking/authors.sh
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/bin/sh
-# script from http://stackoverflow.com/questions/12133583
-set -e
-
-# Get a list of authors ordered by number of commits
-# and remove the commit count column
-AUTHORS=$(git --no-pager shortlog -nse | cut -f 2- | sort -f)
-if [ -z "$AUTHORS" ] ; then
- echo "Authors list was empty"
- exit 1
-fi
-
-# Display the authors list and write it to the file
-echo "$AUTHORS" | tee "$(git rev-parse --show-toplevel)/AUTHORS.TXT"
diff --git a/v2/hacking/env-setup b/v2/hacking/env-setup
deleted file mode 100644
index 8f2c331fe4..0000000000
--- a/v2/hacking/env-setup
+++ /dev/null
@@ -1,78 +0,0 @@
-# usage: source hacking/env-setup [-q]
-# modifies environment for running Ansible from checkout
-
-# Default values for shell variables we use
-PYTHONPATH=${PYTHONPATH-""}
-PATH=${PATH-""}
-MANPATH=${MANPATH-""}
-verbosity=${1-info} # Defaults to `info' if unspecified
-
-if [ "$verbosity" = -q ]; then
- verbosity=silent
-fi
-
-# When run using source as directed, $0 gets set to bash, so we must use $BASH_SOURCE
-if [ -n "$BASH_SOURCE" ] ; then
- HACKING_DIR=$(dirname "$BASH_SOURCE")
-elif [ $(basename -- "$0") = "env-setup" ]; then
- HACKING_DIR=$(dirname "$0")
-# Works with ksh93 but not pdksh
-elif [ -n "$KSH_VERSION" ] && echo $KSH_VERSION | grep -qv '^@(#)PD KSH'; then
- HACKING_DIR=$(dirname "${.sh.file}")
-else
- HACKING_DIR="$PWD/hacking"
-fi
-# The below is an alternative to readlink -fn which doesn't exist on OS X
-# Source: http://stackoverflow.com/a/1678636
-FULL_PATH=$(python -c "import os; print(os.path.realpath('$HACKING_DIR'))")
-ANSIBLE_HOME=$(dirname "$FULL_PATH")
-
-PREFIX_PYTHONPATH="$ANSIBLE_HOME"
-PREFIX_PATH="$ANSIBLE_HOME/bin"
-PREFIX_MANPATH="$ANSIBLE_HOME/docs/man"
-
-expr "$PYTHONPATH" : "${PREFIX_PYTHONPATH}.*" > /dev/null || export PYTHONPATH="$PREFIX_PYTHONPATH:$PYTHONPATH"
-expr "$PATH" : "${PREFIX_PATH}.*" > /dev/null || export PATH="$PREFIX_PATH:$PATH"
-expr "$MANPATH" : "${PREFIX_MANPATH}.*" > /dev/null || export MANPATH="$PREFIX_MANPATH:$MANPATH"
-
-#
-# Generate egg_info so that pkg_resources works
-#
-
-# Do the work in a function so we don't repeat ourselves later
-gen_egg_info()
-{
- if [ -e "$PREFIX_PYTHONPATH/ansible.egg-info" ] ; then
- rm -r "$PREFIX_PYTHONPATH/ansible.egg-info"
- fi
- python setup.py egg_info
-}
-
-if [ "$ANSIBLE_HOME" != "$PWD" ] ; then
- current_dir="$PWD"
-else
- current_dir="$ANSIBLE_HOME"
-fi
-cd "$ANSIBLE_HOME"
-#if [ "$verbosity" = silent ] ; then
-# gen_egg_info > /dev/null 2>&1
-#else
-# gen_egg_info
-#fi
-cd "$current_dir"
-
-if [ "$verbosity" != silent ] ; then
- cat <<- EOF
-
- Setting up Ansible to run out of checkout...
-
- PATH=$PATH
- PYTHONPATH=$PYTHONPATH
- MANPATH=$MANPATH
-
- Remember, you may wish to specify your host file with -i
-
- Done!
-
- EOF
-fi
diff --git a/v2/hacking/env-setup.fish b/v2/hacking/env-setup.fish
deleted file mode 100644
index 05fb60672d..0000000000
--- a/v2/hacking/env-setup.fish
+++ /dev/null
@@ -1,57 +0,0 @@
-#!/usr/bin/env fish
-# usage: . ./hacking/env-setup [-q]
-# modifies environment for running Ansible from checkout
-set HACKING_DIR (dirname (status -f))
-set FULL_PATH (python -c "import os; print(os.path.realpath('$HACKING_DIR'))")
-set ANSIBLE_HOME (dirname $FULL_PATH)
-set PREFIX_PYTHONPATH $ANSIBLE_HOME/lib
-set PREFIX_PATH $ANSIBLE_HOME/bin
-set PREFIX_MANPATH $ANSIBLE_HOME/docs/man
-
-# Set PYTHONPATH
-if not set -q PYTHONPATH
- set -gx PYTHONPATH $PREFIX_PYTHONPATH
-else
- switch PYTHONPATH
- case "$PREFIX_PYTHONPATH*"
- case "*"
- echo "Appending PYTHONPATH"
- set -gx PYTHONPATH "$PREFIX_PYTHONPATH:$PYTHONPATH"
- end
-end
-
-# Set PATH
-if not contains $PREFIX_PATH $PATH
- set -gx PATH $PREFIX_PATH $PATH
-end
-
-# Set MANPATH
-if not contains $PREFIX_MANPATH $MANPATH
- if not set -q MANPATH
- set -gx MANPATH $PREFIX_MANPATH
- else
- set -gx MANPATH $PREFIX_MANPATH $MANPATH
- end
-end
-
-set -gx ANSIBLE_LIBRARY $ANSIBLE_HOME/library
-
-if set -q argv
- switch $argv
- case '-q' '--quiet'
- case '*'
- echo ""
- echo "Setting up Ansible to run out of checkout..."
- echo ""
- echo "PATH=$PATH"
- echo "PYTHONPATH=$PYTHONPATH"
- echo "ANSIBLE_LIBRARY=$ANSIBLE_LIBRARY"
- echo "MANPATH=$MANPATH"
- echo ""
-
- echo "Remember, you may wish to specify your host file with -i"
- echo ""
- echo "Done!"
- echo ""
- end
-end
diff --git a/v2/hacking/get_library.py b/v2/hacking/get_library.py
deleted file mode 100755
index 571183b688..0000000000
--- a/v2/hacking/get_library.py
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/usr/bin/env python
-
-# (c) 2014, Will Thames
-#
-# This file is part of Ansible
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see .
-#
-
-import ansible.constants as C
-import sys
-
-def main():
- print C.DEFAULT_MODULE_PATH
- return 0
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/v2/hacking/module_formatter.py b/v2/hacking/module_formatter.py
deleted file mode 100755
index e70eb982de..0000000000
--- a/v2/hacking/module_formatter.py
+++ /dev/null
@@ -1,442 +0,0 @@
-#!/usr/bin/env python
-# (c) 2012, Jan-Piet Mens
-# (c) 2012-2014, Michael DeHaan and others
-#
-# This file is part of Ansible
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see .
-#
-
-import os
-import glob
-import sys
-import yaml
-import codecs
-import json
-import ast
-import re
-import optparse
-import time
-import datetime
-import subprocess
-import cgi
-from jinja2 import Environment, FileSystemLoader
-
-import ansible.utils
-import ansible.utils.module_docs as module_docs
-
-#####################################################################################
-# constants and paths
-
-# if a module is added in a version of Ansible older than this, don't print the version added information
-# in the module documentation because everyone is assumed to be running something newer than this already.
-TO_OLD_TO_BE_NOTABLE = 1.0
-
-# Get parent directory of the directory this script lives in
-MODULEDIR=os.path.abspath(os.path.join(
- os.path.dirname(os.path.realpath(__file__)), os.pardir, 'lib', 'ansible', 'modules'
-))
-
-# The name of the DOCUMENTATION template
-EXAMPLE_YAML=os.path.abspath(os.path.join(
- os.path.dirname(os.path.realpath(__file__)), os.pardir, 'examples', 'DOCUMENTATION.yml'
-))
-
-_ITALIC = re.compile(r"I\(([^)]+)\)")
-_BOLD = re.compile(r"B\(([^)]+)\)")
-_MODULE = re.compile(r"M\(([^)]+)\)")
-_URL = re.compile(r"U\(([^)]+)\)")
-_CONST = re.compile(r"C\(([^)]+)\)")
-
-DEPRECATED = " (D)"
-NOTCORE = " (E)"
-#####################################################################################
-
-def rst_ify(text):
- ''' convert symbols like I(this is in italics) to valid restructured text '''
-
- t = _ITALIC.sub(r'*' + r"\1" + r"*", text)
- t = _BOLD.sub(r'**' + r"\1" + r"**", t)
- t = _MODULE.sub(r'``' + r"\1" + r"``", t)
- t = _URL.sub(r"\1", t)
- t = _CONST.sub(r'``' + r"\1" + r"``", t)
-
- return t
-
-#####################################################################################
-
-def html_ify(text):
- ''' convert symbols like I(this is in italics) to valid HTML '''
-
- t = cgi.escape(text)
- t = _ITALIC.sub("" + r"\1" + "", t)
- t = _BOLD.sub("" + r"\1" + "", t)
- t = _MODULE.sub("" + r"\1" + "", t)
- t = _URL.sub("" + r"\1" + "", t)
- t = _CONST.sub("" + r"\1" + "
", t)
-
- return t
-
-
-#####################################################################################
-
-def rst_fmt(text, fmt):
- ''' helper for Jinja2 to do format strings '''
-
- return fmt % (text)
-
-#####################################################################################
-
-def rst_xline(width, char="="):
- ''' return a restructured text line of a given length '''
-
- return char * width
-
-#####################################################################################
-
-def write_data(text, options, outputname, module):
- ''' dumps module output to a file or the screen, as requested '''
-
- if options.output_dir is not None:
- fname = os.path.join(options.output_dir, outputname % module)
- fname = fname.replace(".py","")
- f = open(fname, 'w')
- f.write(text.encode('utf-8'))
- f.close()
- else:
- print text
-
-#####################################################################################
-
-
-def list_modules(module_dir, depth=0):
- ''' returns a hash of categories, each category being a hash of module names to file paths '''
-
- categories = dict(all=dict(),_aliases=dict())
- if depth <= 3: # limit # of subdirs
-
- files = glob.glob("%s/*" % module_dir)
- for d in files:
-
- category = os.path.splitext(os.path.basename(d))[0]
- if os.path.isdir(d):
-
- res = list_modules(d, depth + 1)
- for key in res.keys():
- if key in categories:
- categories[key] = ansible.utils.merge_hash(categories[key], res[key])
- res.pop(key, None)
-
- if depth < 2:
- categories.update(res)
- else:
- category = module_dir.split("/")[-1]
- if not category in categories:
- categories[category] = res
- else:
- categories[category].update(res)
- else:
- module = category
- category = os.path.basename(module_dir)
- if not d.endswith(".py") or d.endswith('__init__.py'):
- # windows powershell modules have documentation stubs in python docstring
- # format (they are not executed) so skip the ps1 format files
- continue
- elif module.startswith("_") and os.path.islink(d):
- source = os.path.splitext(os.path.basename(os.path.realpath(d)))[0]
- module = module.replace("_","",1)
- if not d in categories['_aliases']:
- categories['_aliases'][source] = [module]
- else:
- categories['_aliases'][source].update(module)
- continue
-
- if not category in categories:
- categories[category] = {}
- categories[category][module] = d
- categories['all'][module] = d
-
- return categories
-
-#####################################################################################
-
-def generate_parser():
- ''' generate an optparse parser '''
-
- p = optparse.OptionParser(
- version='%prog 1.0',
- usage='usage: %prog [options] arg1 arg2',
- description='Generate module documentation from metadata',
- )
-
- p.add_option("-A", "--ansible-version", action="store", dest="ansible_version", default="unknown", help="Ansible version number")
- p.add_option("-M", "--module-dir", action="store", dest="module_dir", default=MODULEDIR, help="Ansible library path")
- p.add_option("-T", "--template-dir", action="store", dest="template_dir", default="hacking/templates", help="directory containing Jinja2 templates")
- p.add_option("-t", "--type", action='store', dest='type', choices=['rst'], default='rst', help="Document type")
- p.add_option("-v", "--verbose", action='store_true', default=False, help="Verbose")
- p.add_option("-o", "--output-dir", action="store", dest="output_dir", default=None, help="Output directory for module files")
- p.add_option("-I", "--includes-file", action="store", dest="includes_file", default=None, help="Create a file containing list of processed modules")
- p.add_option('-V', action='version', help='Show version number and exit')
- return p
-
-#####################################################################################
-
-def jinja2_environment(template_dir, typ):
-
- env = Environment(loader=FileSystemLoader(template_dir),
- variable_start_string="@{",
- variable_end_string="}@",
- trim_blocks=True,
- )
- env.globals['xline'] = rst_xline
-
- if typ == 'rst':
- env.filters['convert_symbols_to_format'] = rst_ify
- env.filters['html_ify'] = html_ify
- env.filters['fmt'] = rst_fmt
- env.filters['xline'] = rst_xline
- template = env.get_template('rst.j2')
- outputname = "%s_module.rst"
- else:
- raise Exception("unknown module format type: %s" % typ)
-
- return env, template, outputname
-
-#####################################################################################
-
-def process_module(module, options, env, template, outputname, module_map, aliases):
-
- fname = module_map[module]
- if isinstance(fname, dict):
- return "SKIPPED"
-
- basename = os.path.basename(fname)
- deprecated = False
-
- # ignore files with extensions
- if not basename.endswith(".py"):
- return
- elif module.startswith("_"):
- if os.path.islink(fname):
- return # ignore, its an alias
- deprecated = True
- module = module.replace("_","",1)
-
- print "rendering: %s" % module
-
- # use ansible core library to parse out doc metadata YAML and plaintext examples
- doc, examples, returndocs = ansible.utils.module_docs.get_docstring(fname, verbose=options.verbose)
-
- # crash if module is missing documentation and not explicitly hidden from docs index
- if doc is None:
- if module in ansible.utils.module_docs.BLACKLIST_MODULES:
- return "SKIPPED"
- else:
- sys.stderr.write("*** ERROR: MODULE MISSING DOCUMENTATION: %s, %s ***\n" % (fname, module))
- sys.exit(1)
-
- if deprecated and 'deprecated' not in doc:
- sys.stderr.write("*** ERROR: DEPRECATED MODULE MISSING 'deprecated' DOCUMENTATION: %s, %s ***\n" % (fname, module))
- sys.exit(1)
-
- if "/core/" in fname:
- doc['core'] = True
- else:
- doc['core'] = False
-
- if module in aliases:
- doc['aliases'] = aliases[module]
-
- all_keys = []
-
- if not 'version_added' in doc:
- sys.stderr.write("*** ERROR: missing version_added in: %s ***\n" % module)
- sys.exit(1)
-
- added = 0
- if doc['version_added'] == 'historical':
- del doc['version_added']
- else:
- added = doc['version_added']
-
- # don't show version added information if it's too old to be called out
- if added:
- added_tokens = str(added).split(".")
- added = added_tokens[0] + "." + added_tokens[1]
- added_float = float(added)
- if added and added_float < TO_OLD_TO_BE_NOTABLE:
- del doc['version_added']
-
- for (k,v) in doc['options'].iteritems():
- all_keys.append(k)
-
- all_keys = sorted(all_keys)
-
- doc['option_keys'] = all_keys
- doc['filename'] = fname
- doc['docuri'] = doc['module'].replace('_', '-')
- doc['now_date'] = datetime.date.today().strftime('%Y-%m-%d')
- doc['ansible_version'] = options.ansible_version
- doc['plainexamples'] = examples #plain text
-
- # here is where we build the table of contents...
-
- text = template.render(doc)
- write_data(text, options, outputname, module)
- return doc['short_description']
-
-#####################################################################################
-
-def print_modules(module, category_file, deprecated, core, options, env, template, outputname, module_map, aliases):
- modstring = module
- modname = module
- if module in deprecated:
- modstring = modstring + DEPRECATED
- modname = "_" + module
- elif module not in core:
- modstring = modstring + NOTCORE
-
- result = process_module(modname, options, env, template, outputname, module_map, aliases)
-
- if result != "SKIPPED":
- category_file.write(" %s - %s <%s_module>\n" % (modstring, result, module))
-
-def process_category(category, categories, options, env, template, outputname):
-
- module_map = categories[category]
-
- aliases = {}
- if '_aliases' in categories:
- aliases = categories['_aliases']
-
- category_file_path = os.path.join(options.output_dir, "list_of_%s_modules.rst" % category)
- category_file = open(category_file_path, "w")
- print "*** recording category %s in %s ***" % (category, category_file_path)
-
- # TODO: start a new category file
-
- category = category.replace("_"," ")
- category = category.title()
-
- modules = []
- deprecated = []
- core = []
- for module in module_map.keys():
-
- if isinstance(module_map[module], dict):
- for mod in module_map[module].keys():
- if mod.startswith("_"):
- mod = mod.replace("_","",1)
- deprecated.append(mod)
- elif '/core/' in module_map[module][mod]:
- core.append(mod)
- else:
- if module.startswith("_"):
- module = module.replace("_","",1)
- deprecated.append(module)
- elif '/core/' in module_map[module]:
- core.append(module)
-
- modules.append(module)
-
- modules.sort()
-
- category_header = "%s Modules" % (category.title())
- underscores = "`" * len(category_header)
-
- category_file.write("""\
-%s
-%s
-
-.. toctree:: :maxdepth: 1
-
-""" % (category_header, underscores))
- sections = []
- for module in modules:
- if module in module_map and isinstance(module_map[module], dict):
- sections.append(module)
- continue
- else:
- print_modules(module, category_file, deprecated, core, options, env, template, outputname, module_map, aliases)
-
- sections.sort()
- for section in sections:
- category_file.write("\n%s\n%s\n\n" % (section.replace("_"," ").title(),'-' * len(section)))
- category_file.write(".. toctree:: :maxdepth: 1\n\n")
-
- section_modules = module_map[section].keys()
- section_modules.sort()
- #for module in module_map[section]:
- for module in section_modules:
- print_modules(module, category_file, deprecated, core, options, env, template, outputname, module_map[section], aliases)
-
- category_file.write("""\n\n
-.. note::
- - %s: This marks a module as deprecated, which means a module is kept for backwards compatibility but usage is discouraged. The module documentation details page may explain more about this rationale.
- - %s: This marks a module as 'extras', which means it ships with ansible but may be a newer module and possibly (but not necessarily) less activity maintained than 'core' modules.
- - Tickets filed on modules are filed to different repos than those on the main open source project. Core module tickets should be filed at `ansible/ansible-modules-core on GitHub `_, extras tickets to `ansible/ansible-modules-extras on GitHub `_
-""" % (DEPRECATED, NOTCORE))
- category_file.close()
-
- # TODO: end a new category file
-
-#####################################################################################
-
-def validate_options(options):
- ''' validate option parser options '''
-
- if not options.module_dir:
- print >>sys.stderr, "--module-dir is required"
- sys.exit(1)
- if not os.path.exists(options.module_dir):
- print >>sys.stderr, "--module-dir does not exist: %s" % options.module_dir
- sys.exit(1)
- if not options.template_dir:
- print "--template-dir must be specified"
- sys.exit(1)
-
-#####################################################################################
-
-def main():
-
- p = generate_parser()
-
- (options, args) = p.parse_args()
- validate_options(options)
-
- env, template, outputname = jinja2_environment(options.template_dir, options.type)
-
- categories = list_modules(options.module_dir)
- last_category = None
- category_names = categories.keys()
- category_names.sort()
-
- category_list_path = os.path.join(options.output_dir, "modules_by_category.rst")
- category_list_file = open(category_list_path, "w")
- category_list_file.write("Module Index\n")
- category_list_file.write("============\n")
- category_list_file.write("\n\n")
- category_list_file.write(".. toctree::\n")
- category_list_file.write(" :maxdepth: 1\n\n")
-
- for category in category_names:
- if category.startswith("_"):
- continue
- category_list_file.write(" list_of_%s_modules\n" % category)
- process_category(category, categories, options, env, template, outputname)
-
- category_list_file.close()
-
-if __name__ == '__main__':
- main()
diff --git a/v2/hacking/templates/rst.j2 b/v2/hacking/templates/rst.j2
deleted file mode 100644
index 59b8f35474..0000000000
--- a/v2/hacking/templates/rst.j2
+++ /dev/null
@@ -1,153 +0,0 @@
-.. _@{ module }@:
-
-{% if short_description %}
-{% set title = module + ' - ' + short_description|convert_symbols_to_format %}
-{% else %}
-{% set title = module %}
-{% endif %}
-{% set title_len = title|length %}
-
-@{ title }@
-@{ '+' * title_len }@
-
-.. contents::
- :local:
- :depth: 1
-
-{# ------------------------------------------
- #
- # Please note: this looks like a core dump
- # but it isn't one.
- #
- --------------------------------------------#}
-
-{% if aliases is defined -%}
-Aliases: @{ ','.join(aliases) }@
-{% endif %}
-
-{% if deprecated is defined -%}
-DEPRECATED
-----------
-
-@{ deprecated }@
-{% endif %}
-
-Synopsis
---------
-
-{% if version_added is defined -%}
-.. versionadded:: @{ version_added }@
-{% endif %}
-
-{% for desc in description -%}
-@{ desc | convert_symbols_to_format }@
-{% endfor %}
-
-{% if options -%}
-Options
--------
-
-.. raw:: html
-
-
-
- parameter |
- required |
- default |
- choices |
- comments |
-
- {% for k in option_keys %}
- {% set v = options[k] %}
-
- @{ k }@ |
- {% if v.get('required', False) %}yes{% else %}no{% endif %} |
- {% if v['default'] %}@{ v['default'] }@{% endif %} |
- {% if v.get('type', 'not_bool') == 'bool' %}
- |
- {% else %}
- {% for choice in v.get('choices',[]) -%}- @{ choice }@
{% endfor -%} |
- {% endif %}
- {% for desc in v.description -%}@{ desc | html_ify }@{% endfor -%}{% if v['version_added'] %} (added in Ansible @{v['version_added']}@){% endif %} |
-
- {% endfor %}
-
-{% endif %}
-
-{% if requirements %}
-{% for req in requirements %}
-
-.. note:: Requires @{ req | convert_symbols_to_format }@
-
-{% endfor %}
-{% endif %}
-
-{% if examples or plainexamples %}
-Examples
---------
-
-.. raw:: html
-
-{% for example in examples %}
- {% if example['description'] %}@{ example['description'] | html_ify }@
{% endif %}
-
-
-@{ example['code'] | escape | indent(4, True) }@
-
-
-{% endfor %}
-
-
-{% if plainexamples %}
-
-::
-
-@{ plainexamples | indent(4, True) }@
-{% endif %}
-{% endif %}
-
-{% if notes %}
-{% for note in notes %}
-.. note:: @{ note | convert_symbols_to_format }@
-{% endfor %}
-{% endif %}
-
-
-{% if not deprecated %}
- {% if core %}
-
-This is a Core Module
----------------------
-
-This source of this module is hosted on GitHub in the `ansible-modules-core `_ repo.
-
-If you believe you have found a bug in this module, and are already running the latest stable or development version of Ansible, first look in the `issue tracker at github.com/ansible/ansible-modules-core `_ to see if a bug has already been filed. If not, we would be grateful if you would file one.
-
-Should you have a question rather than a bug report, inquiries are welcome on the `ansible-project google group `_ or on Ansible's "#ansible" channel, located on irc.freenode.net. Development oriented topics should instead use the similar `ansible-devel google group `_.
-
-Documentation updates for this module can also be edited directly by submitting a pull request to the module source code, just look for the "DOCUMENTATION" block in the source tree.
-
-This is a "core" ansible module, which means it will receive slightly higher priority for all requests than those in the "extras" repos.
-
- {% else %}
-
-This is an Extras Module
-------------------------
-
-This source of this module is hosted on GitHub in the `ansible-modules-extras `_ repo.
-
-If you believe you have found a bug in this module, and are already running the latest stable or development version of Ansible, first look in the `issue tracker at github.com/ansible/ansible-modules-extras `_ to see if a bug has already been filed. If not, we would be grateful if you would file one.
-
-Should you have a question rather than a bug report, inquiries are welcome on the `ansible-project google group ` or on Ansible's "#ansible" channel, located on irc.freenode.net. Development oriented topics should instead use the similar `ansible-devel google group `_.
-
-Documentation updates for this module can also be edited directly by submitting a pull request to the module source code, just look for the "DOCUMENTATION" block in the source tree.
-
-Note that this module is designated a "extras" module. Non-core modules are still fully usable, but may receive slightly lower response rates for issues and pull requests.
-Popular "extras" modules may be promoted to core modules over time.
-
- {% endif %}
-{% endif %}
-
-For help in developing on modules, should you be so inclined, please read :doc:`community`, :doc:`developing_test_pr` and :doc:`developing_modules`.
-
-
diff --git a/v2/hacking/test-module b/v2/hacking/test-module
deleted file mode 100755
index b672e23e26..0000000000
--- a/v2/hacking/test-module
+++ /dev/null
@@ -1,192 +0,0 @@
-#!/usr/bin/env python
-
-# (c) 2012, Michael DeHaan
-#
-# This file is part of Ansible
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see .
-#
-
-# this script is for testing modules without running through the
-# entire guts of ansible, and is very helpful for when developing
-# modules
-#
-# example:
-# test-module -m ../library/commands/command -a "/bin/sleep 3"
-# test-module -m ../library/system/service -a "name=httpd ensure=restarted"
-# test-module -m ../library/system/service -a "name=httpd ensure=restarted" --debugger /usr/bin/pdb
-# test-modulr -m ../library/file/lineinfile -a "dest=/etc/exports line='/srv/home hostname1(rw,sync)'" --check
-
-import sys
-import base64
-import os
-import subprocess
-import traceback
-import optparse
-
-from ansible import utils
-from ansible import module_common
-import ansible.constants as C
-
-try:
- import json
-except ImportError:
- import simplejson as json
-
-def parse():
- """parse command line
-
- :return : (options, args)"""
- parser = optparse.OptionParser()
-
- parser.usage = "%prog -[options] (-h for help)"
-
- parser.add_option('-m', '--module-path', dest='module_path',
- help="REQUIRED: full path of module source to execute")
- parser.add_option('-a', '--args', dest='module_args', default="",
- help="module argument string")
- parser.add_option('-D', '--debugger', dest='debugger',
- help="path to python debugger (e.g. /usr/bin/pdb)")
- parser.add_option('-I', '--interpreter', dest='interpreter',
- help="path to interpeter to use for this module (e.g. ansible_python_interpreter=/usr/bin/python)",
- metavar='INTERPRETER_TYPE=INTERPRETER_PATH')
- parser.add_option('-c', '--check', dest='check', action='store_true',
- help="run the module in check mode")
- options, args = parser.parse_args()
- if not options.module_path:
- parser.print_help()
- sys.exit(1)
- else:
- return options, args
-
-def write_argsfile(argstring, json=False):
- """ Write args to a file for old-style module's use. """
- argspath = os.path.expanduser("~/.ansible_test_module_arguments")
- argsfile = open(argspath, 'w')
- if json:
- args = utils.parse_kv(argstring)
- argstring = utils.jsonify(args)
- argsfile.write(argstring)
- argsfile.close()
- return argspath
-
-def boilerplate_module(modfile, args, interpreter, check):
- """ simulate what ansible does with new style modules """
-
- #module_fh = open(modfile)
- #module_data = module_fh.read()
- #module_fh.close()
-
- #included_boilerplate = module_data.find(module_common.REPLACER) != -1 or module_data.find("import ansible.module_utils") != -1
-
- complex_args = {}
- if args.startswith("@"):
- # Argument is a YAML file (JSON is a subset of YAML)
- complex_args = utils.combine_vars(complex_args, utils.parse_yaml_from_file(args[1:]))
- args=''
- elif args.startswith("{"):
- # Argument is a YAML document (not a file)
- complex_args = utils.combine_vars(complex_args, utils.parse_yaml(args))
- args=''
-
- inject = {}
- if interpreter:
- if '=' not in interpreter:
- print 'interpeter must by in the form of ansible_python_interpreter=/usr/bin/python'
- sys.exit(1)
- interpreter_type, interpreter_path = interpreter.split('=')
- if not interpreter_type.startswith('ansible_'):
- interpreter_type = 'ansible_%s' % interpreter_type
- if not interpreter_type.endswith('_interpreter'):
- interpreter_type = '%s_interpreter' % interpreter_type
- inject[interpreter_type] = interpreter_path
-
- if check:
- complex_args['CHECKMODE'] = True
-
- (module_data, module_style, shebang) = module_common.modify_module(
- modfile,
- complex_args,
- args,
- inject
- )
-
- modfile2_path = os.path.expanduser("~/.ansible_module_generated")
- print "* including generated source, if any, saving to: %s" % modfile2_path
- print "* this may offset any line numbers in tracebacks/debuggers!"
- modfile2 = open(modfile2_path, 'w')
- modfile2.write(module_data)
- modfile2.close()
- modfile = modfile2_path
-
- return (modfile2_path, module_style)
-
-def runtest( modfile, argspath):
- """Test run a module, piping it's output for reporting."""
-
- os.system("chmod +x %s" % modfile)
-
- invoke = "%s" % (modfile)
- if argspath is not None:
- invoke = "%s %s" % (modfile, argspath)
-
- cmd = subprocess.Popen(invoke, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- (out, err) = cmd.communicate()
-
- try:
- print "***********************************"
- print "RAW OUTPUT"
- print out
- print err
- results = utils.parse_json(out)
- except:
- print "***********************************"
- print "INVALID OUTPUT FORMAT"
- print out
- traceback.print_exc()
- sys.exit(1)
-
- print "***********************************"
- print "PARSED OUTPUT"
- print utils.jsonify(results,format=True)
-
-def rundebug(debugger, modfile, argspath):
- """Run interactively with console debugger."""
-
- if argspath is not None:
- subprocess.call("%s %s %s" % (debugger, modfile, argspath), shell=True)
- else:
- subprocess.call("%s %s" % (debugger, modfile), shell=True)
-
-def main():
-
- options, args = parse()
- (modfile, module_style) = boilerplate_module(options.module_path, options.module_args, options.interpreter, options.check)
-
- argspath=None
- if module_style != 'new':
- if module_style == 'non_native_want_json':
- argspath = write_argsfile(options.module_args, json=True)
- elif module_style == 'old':
- argspath = write_argsfile(options.module_args, json=False)
- else:
- raise Exception("internal error, unexpected module style: %s" % module_style)
- if options.debugger:
- rundebug(options.debugger, modfile, argspath)
- else:
- runtest(modfile, argspath)
-
-if __name__ == "__main__":
- main()
-
diff --git a/v2/scripts/ansible b/v2/scripts/ansible
deleted file mode 100644
index ae8ccff595..0000000000
--- a/v2/scripts/ansible
+++ /dev/null
@@ -1,20 +0,0 @@
-# (c) 2012-2014, Michael DeHaan
-#
-# This file is part of Ansible
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see .
-
-# Make coding more python3-ish
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
diff --git a/v2/setup.py b/v2/setup.py
deleted file mode 100644
index e982c382f2..0000000000
--- a/v2/setup.py
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/usr/bin/env python
-
-import sys
-
-from ansible import __version__
-try:
- from setuptools import setup, find_packages
-except ImportError:
- print("Ansible now needs setuptools in order to build. Install it using"
- " your package manager (usually python-setuptools) or via pip (pip"
- " install setuptools).")
- sys.exit(1)
-
-setup(name='ansible',
- version=__version__,
- description='Radically simple IT automation',
- author='Michael DeHaan',
- author_email='michael@ansible.com',
- url='http://ansible.com/',
- license='GPLv3',
- install_requires=['paramiko', 'jinja2', "PyYAML", 'setuptools', 'pycrypto >= 2.6', 'six >= 1.4.0'],
- # package_dir={ '': 'lib' },
- # packages=find_packages('lib'),
- package_data={
- '': ['module_utils/*.ps1', 'modules/core/windows/*.ps1', 'modules/extras/windows/*.ps1'],
- },
- scripts=[
- 'bin/ansible',
- 'bin/ansible-playbook',
- # 'bin/ansible-pull',
- # 'bin/ansible-doc',
- # 'bin/ansible-galaxy',
- # 'bin/ansible-vault',
- ],
- data_files=[],
-)
diff --git a/v2/test/mock/__init__.py b/v2/test/mock/__init__.py
deleted file mode 100644
index ae8ccff595..0000000000
--- a/v2/test/mock/__init__.py
+++ /dev/null
@@ -1,20 +0,0 @@
-# (c) 2012-2014, Michael DeHaan
-#
-# This file is part of Ansible
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see .
-
-# Make coding more python3-ish
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
From 249fd2a7e1b79139e814e66a0a47e3e497e3f243 Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Sun, 3 May 2015 21:58:48 -0500
Subject: [PATCH 026/971] Re-adding submodules after moving things around
---
.gitmodules | 12 ++++++++++++
lib/ansible/__init__.py | 8 ++------
lib/ansible/modules/core | 1 +
lib/ansible/modules/extras | 1 +
v1/ansible/modules/core | 1 +
v1/ansible/modules/extras | 1 +
6 files changed, 18 insertions(+), 6 deletions(-)
create mode 160000 lib/ansible/modules/core
create mode 160000 lib/ansible/modules/extras
create mode 160000 v1/ansible/modules/core
create mode 160000 v1/ansible/modules/extras
diff --git a/.gitmodules b/.gitmodules
index e69de29bb2..793522a29c 100644
--- a/.gitmodules
+++ b/.gitmodules
@@ -0,0 +1,12 @@
+[submodule "lib/ansible/modules/core"]
+ path = lib/ansible/modules/core
+ url = https://github.com/ansible/ansible-modules-core
+[submodule "lib/ansible/modules/extras"]
+ path = lib/ansible/modules/extras
+ url = https://github.com/ansible/ansible-modules-extras
+[submodule "v1/ansible/modules/core"]
+ path = v1/ansible/modules/core
+ url = https://github.com/ansible/ansible-modules-core
+[submodule "v1/ansible/modules/extras"]
+ path = v1/ansible/modules/extras
+ url = https://github.com/ansible/ansible-modules-extras
diff --git a/lib/ansible/__init__.py b/lib/ansible/__init__.py
index 8637adb54d..704b6456f7 100644
--- a/lib/ansible/__init__.py
+++ b/lib/ansible/__init__.py
@@ -14,9 +14,5 @@
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see .
-
-# Make coding more python3-ish
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-__version__ = '2.0'
+__version__ = '2.0.0'
+__author__ = 'Ansible, Inc.'
diff --git a/lib/ansible/modules/core b/lib/ansible/modules/core
new file mode 160000
index 0000000000..0341ddd35e
--- /dev/null
+++ b/lib/ansible/modules/core
@@ -0,0 +1 @@
+Subproject commit 0341ddd35ed5ff477ad5de2488d947255ce86259
diff --git a/lib/ansible/modules/extras b/lib/ansible/modules/extras
new file mode 160000
index 0000000000..495ad450e5
--- /dev/null
+++ b/lib/ansible/modules/extras
@@ -0,0 +1 @@
+Subproject commit 495ad450e53feb1cd26218dc68056cc34d1ea9ff
diff --git a/v1/ansible/modules/core b/v1/ansible/modules/core
new file mode 160000
index 0000000000..9028e9d4be
--- /dev/null
+++ b/v1/ansible/modules/core
@@ -0,0 +1 @@
+Subproject commit 9028e9d4be8a3dbb96c81a799e18f3adf63d9fd0
diff --git a/v1/ansible/modules/extras b/v1/ansible/modules/extras
new file mode 160000
index 0000000000..495ad450e5
--- /dev/null
+++ b/v1/ansible/modules/extras
@@ -0,0 +1 @@
+Subproject commit 495ad450e53feb1cd26218dc68056cc34d1ea9ff
From 803fb397f35fe190a9c10a4e25386a6450ff52ff Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Mon, 4 May 2015 01:33:10 -0500
Subject: [PATCH 027/971] Fixing filter plugins directory from switch
---
lib/ansible/executor/task_executor.py | 21 +-
lib/ansible/playbook/block.py | 12 +-
lib/ansible/playbook/conditional.py | 4 +-
lib/ansible/playbook/task.py | 8 +-
lib/ansible/plugins/action/__init__.py | 3 +-
lib/ansible/plugins/action/assert.py | 2 +-
lib/ansible/plugins/action/debug.py | 4 +-
lib/ansible/plugins/action/set_fact.py | 4 +-
lib/ansible/plugins/action/template.py | 4 +-
lib/ansible/plugins/filter | 1 -
lib/ansible/plugins/filter/__init__.py | 0
lib/ansible/plugins/filter/core.py | 351 +++++++++++++
lib/ansible/plugins/filter/ipaddr.py | 659 ++++++++++++++++++++++++
lib/ansible/plugins/filter/mathstuff.py | 126 +++++
14 files changed, 1166 insertions(+), 33 deletions(-)
delete mode 120000 lib/ansible/plugins/filter
create mode 100644 lib/ansible/plugins/filter/__init__.py
create mode 100644 lib/ansible/plugins/filter/core.py
create mode 100644 lib/ansible/plugins/filter/ipaddr.py
create mode 100644 lib/ansible/plugins/filter/mathstuff.py
diff --git a/lib/ansible/executor/task_executor.py b/lib/ansible/executor/task_executor.py
index 2f90b3d87e..7fa2134948 100644
--- a/lib/ansible/executor/task_executor.py
+++ b/lib/ansible/executor/task_executor.py
@@ -180,7 +180,8 @@ class TaskExecutor:
final_items = []
for item in items:
variables['item'] = item
- if self._task.evaluate_conditional(variables):
+ templar = Templar(loader=self._loader, shared_loader_obj=self._shared_loader_obj, variables=variables)
+ if self._task.evaluate_conditional(templar, variables):
final_items.append(item)
return [",".join(final_items)]
else:
@@ -208,13 +209,13 @@ class TaskExecutor:
# get the connection and the handler for this execution
self._connection = self._get_connection(variables)
- self._handler = self._get_action_handler(connection=self._connection)
+ self._handler = self._get_action_handler(connection=self._connection, templar=templar)
# Evaluate the conditional (if any) for this task, which we do before running
# the final task post-validation. We do this before the post validation due to
# the fact that the conditional may specify that the task be skipped due to a
# variable not being present which would otherwise cause validation to fail
- if not self._task.evaluate_conditional(variables):
+ if not self._task.evaluate_conditional(templar, variables):
debug("when evaulation failed, skipping this task")
return dict(changed=False, skipped=True, skip_reason='Conditional check failed')
@@ -268,7 +269,7 @@ class TaskExecutor:
return dict(failed=True, msg="The async task did not return valid JSON: %s" % str(e))
if self._task.poll > 0:
- result = self._poll_async_result(result=result)
+ result = self._poll_async_result(result=result, templar=templar)
# update the local copy of vars with the registered value, if specified,
# or any facts which may have been generated by the module execution
@@ -284,15 +285,15 @@ class TaskExecutor:
# FIXME: make sure until is mutually exclusive with changed_when/failed_when
if self._task.until:
cond.when = self._task.until
- if cond.evaluate_conditional(vars_copy):
+ if cond.evaluate_conditional(templar, vars_copy):
break
elif (self._task.changed_when or self._task.failed_when) and 'skipped' not in result:
if self._task.changed_when:
cond.when = [ self._task.changed_when ]
- result['changed'] = cond.evaluate_conditional(vars_copy)
+ result['changed'] = cond.evaluate_conditional(templar, vars_copy)
if self._task.failed_when:
cond.when = [ self._task.failed_when ]
- failed_when_result = cond.evaluate_conditional(vars_copy)
+ failed_when_result = cond.evaluate_conditional(templar, vars_copy)
result['failed_when_result'] = result['failed'] = failed_when_result
if failed_when_result:
break
@@ -315,7 +316,7 @@ class TaskExecutor:
debug("attempt loop complete, returning result")
return result
- def _poll_async_result(self, result):
+ def _poll_async_result(self, result, templar):
'''
Polls for the specified JID to be complete
'''
@@ -339,6 +340,7 @@ class TaskExecutor:
connection=self._connection,
connection_info=self._connection_info,
loader=self._loader,
+ templar=templar,
shared_loader_obj=self._shared_loader_obj,
)
@@ -391,7 +393,7 @@ class TaskExecutor:
return connection
- def _get_action_handler(self, connection):
+ def _get_action_handler(self, connection, templar):
'''
Returns the correct action plugin to handle the requestion task action
'''
@@ -411,6 +413,7 @@ class TaskExecutor:
connection=connection,
connection_info=self._connection_info,
loader=self._loader,
+ templar=templar,
shared_loader_obj=self._shared_loader_obj,
)
diff --git a/lib/ansible/playbook/block.py b/lib/ansible/playbook/block.py
index e6ad8e5745..d65f787127 100644
--- a/lib/ansible/playbook/block.py
+++ b/lib/ansible/playbook/block.py
@@ -225,21 +225,21 @@ class Block(Base, Become, Conditional, Taggable):
ti.deserialize(ti_data)
self._task_include = ti
- def evaluate_conditional(self, all_vars):
+ def evaluate_conditional(self, templar, all_vars):
if len(self._dep_chain):
for dep in self._dep_chain:
- if not dep.evaluate_conditional(all_vars):
+ if not dep.evaluate_conditional(templar, all_vars):
return False
if self._task_include is not None:
- if not self._task_include.evaluate_conditional(all_vars):
+ if not self._task_include.evaluate_conditional(templar, all_vars):
return False
if self._parent_block is not None:
- if not self._parent_block.evaluate_conditional(all_vars):
+ if not self._parent_block.evaluate_conditional(templar, all_vars):
return False
elif self._role is not None:
- if not self._role.evaluate_conditional(all_vars):
+ if not self._role.evaluate_conditional(templar, all_vars):
return False
- return super(Block, self).evaluate_conditional(all_vars)
+ return super(Block, self).evaluate_conditional(templar, all_vars)
def set_loader(self, loader):
self._loader = loader
diff --git a/lib/ansible/playbook/conditional.py b/lib/ansible/playbook/conditional.py
index 2233f3fa9e..707233aaa0 100644
--- a/lib/ansible/playbook/conditional.py
+++ b/lib/ansible/playbook/conditional.py
@@ -47,16 +47,16 @@ class Conditional:
if not isinstance(value, list):
setattr(self, name, [ value ])
- def evaluate_conditional(self, all_vars):
+ def evaluate_conditional(self, templar, all_vars):
'''
Loops through the conditionals set on this object, returning
False if any of them evaluate as such.
'''
- templar = Templar(loader=self._loader, variables=all_vars, fail_on_undefined=False)
for conditional in self.when:
if not self._check_conditional(conditional, templar, all_vars):
return False
+
return True
def _check_conditional(self, conditional, templar, all_vars):
diff --git a/lib/ansible/playbook/task.py b/lib/ansible/playbook/task.py
index 0606025798..58788df65b 100644
--- a/lib/ansible/playbook/task.py
+++ b/lib/ansible/playbook/task.py
@@ -266,14 +266,14 @@ class Task(Base, Conditional, Taggable, Become):
super(Task, self).deserialize(data)
- def evaluate_conditional(self, all_vars):
+ def evaluate_conditional(self, templar, all_vars):
if self._block is not None:
- if not self._block.evaluate_conditional(all_vars):
+ if not self._block.evaluate_conditional(templar, all_vars):
return False
if self._task_include is not None:
- if not self._task_include.evaluate_conditional(all_vars):
+ if not self._task_include.evaluate_conditional(templar, all_vars):
return False
- return super(Task, self).evaluate_conditional(all_vars)
+ return super(Task, self).evaluate_conditional(templar, all_vars)
def set_loader(self, loader):
'''
diff --git a/lib/ansible/plugins/action/__init__.py b/lib/ansible/plugins/action/__init__.py
index 62036cc706..83c129687e 100644
--- a/lib/ansible/plugins/action/__init__.py
+++ b/lib/ansible/plugins/action/__init__.py
@@ -44,11 +44,12 @@ class ActionBase:
action in use.
'''
- def __init__(self, task, connection, connection_info, loader, shared_loader_obj):
+ def __init__(self, task, connection, connection_info, loader, templar, shared_loader_obj):
self._task = task
self._connection = connection
self._connection_info = connection_info
self._loader = loader
+ self._templar = templar
self._shared_loader_obj = shared_loader_obj
self._shell = self.get_shell()
diff --git a/lib/ansible/plugins/action/assert.py b/lib/ansible/plugins/action/assert.py
index 5c4fdd7b89..d39484f366 100644
--- a/lib/ansible/plugins/action/assert.py
+++ b/lib/ansible/plugins/action/assert.py
@@ -48,7 +48,7 @@ class ActionModule(ActionBase):
cond = Conditional(loader=self._loader)
for that in thats:
cond.when = [ that ]
- test_result = cond.evaluate_conditional(all_vars=task_vars)
+ test_result = cond.evaluate_conditional(templar=self._templar, all_vars=task_vars)
if not test_result:
result = dict(
failed = True,
diff --git a/lib/ansible/plugins/action/debug.py b/lib/ansible/plugins/action/debug.py
index 04db3c9cc1..94056e496c 100644
--- a/lib/ansible/plugins/action/debug.py
+++ b/lib/ansible/plugins/action/debug.py
@@ -19,7 +19,6 @@ __metaclass__ = type
from ansible.plugins.action import ActionBase
from ansible.utils.boolean import boolean
-from ansible.template import Templar
class ActionModule(ActionBase):
''' Print statements during execution '''
@@ -35,8 +34,7 @@ class ActionModule(ActionBase):
result = dict(msg=self._task.args['msg'])
# FIXME: move the LOOKUP_REGEX somewhere else
elif 'var' in self._task.args: # and not utils.LOOKUP_REGEX.search(self._task.args['var']):
- templar = Templar(loader=self._loader, shared_loader_obj=self._shared_loader_obj, variables=task_vars)
- results = templar.template(self._task.args['var'], convert_bare=True)
+ results = self._templar.template(self._task.args['var'], convert_bare=True)
result = dict()
result[self._task.args['var']] = results
else:
diff --git a/lib/ansible/plugins/action/set_fact.py b/lib/ansible/plugins/action/set_fact.py
index 6086ee6e8b..10ff6f2322 100644
--- a/lib/ansible/plugins/action/set_fact.py
+++ b/lib/ansible/plugins/action/set_fact.py
@@ -19,7 +19,6 @@ __metaclass__ = type
from ansible.errors import AnsibleError
from ansible.plugins.action import ActionBase
-from ansible.template import Templar
from ansible.utils.boolean import boolean
class ActionModule(ActionBase):
@@ -27,11 +26,10 @@ class ActionModule(ActionBase):
TRANSFERS_FILES = False
def run(self, tmp=None, task_vars=dict()):
- templar = Templar(loader=self._loader, variables=task_vars)
facts = dict()
if self._task.args:
for (k, v) in self._task.args.iteritems():
- k = templar.template(k)
+ k = self._templar.template(k)
if isinstance(v, basestring) and v.lower() in ('true', 'false', 'yes', 'no'):
v = boolean(v)
facts[k] = v
diff --git a/lib/ansible/plugins/action/template.py b/lib/ansible/plugins/action/template.py
index a234ef2eee..7300848e6b 100644
--- a/lib/ansible/plugins/action/template.py
+++ b/lib/ansible/plugins/action/template.py
@@ -21,7 +21,6 @@ import base64
import os
from ansible.plugins.action import ActionBase
-from ansible.template import Templar
from ansible.utils.hashing import checksum_s
class ActionModule(ActionBase):
@@ -99,11 +98,10 @@ class ActionModule(ActionBase):
dest = os.path.join(dest, base)
# template the source data locally & get ready to transfer
- templar = Templar(loader=self._loader, variables=task_vars)
try:
with open(source, 'r') as f:
template_data = f.read()
- resultant = templar.template(template_data, preserve_trailing_newlines=True)
+ resultant = self._templar.template(template_data, preserve_trailing_newlines=True)
except Exception as e:
return dict(failed=True, msg=type(e).__name__ + ": " + str(e))
diff --git a/lib/ansible/plugins/filter b/lib/ansible/plugins/filter
deleted file mode 120000
index fa1d588570..0000000000
--- a/lib/ansible/plugins/filter
+++ /dev/null
@@ -1 +0,0 @@
-../../../lib/ansible/runner/filter_plugins
\ No newline at end of file
diff --git a/lib/ansible/plugins/filter/__init__.py b/lib/ansible/plugins/filter/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/lib/ansible/plugins/filter/core.py b/lib/ansible/plugins/filter/core.py
new file mode 100644
index 0000000000..bdf45509c3
--- /dev/null
+++ b/lib/ansible/plugins/filter/core.py
@@ -0,0 +1,351 @@
+# (c) 2012, Jeroen Hoekx
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see .
+
+from __future__ import absolute_import
+
+import sys
+import base64
+import json
+import os.path
+import types
+import pipes
+import glob
+import re
+import crypt
+import hashlib
+import string
+from functools import partial
+import operator as py_operator
+from random import SystemRandom, shuffle
+import uuid
+
+import yaml
+from jinja2.filters import environmentfilter
+from distutils.version import LooseVersion, StrictVersion
+
+from ansible import errors
+from ansible.utils.hashing import md5s, checksum_s
+from ansible.utils.unicode import unicode_wrap, to_unicode
+
+
+UUID_NAMESPACE_ANSIBLE = uuid.UUID('361E6D51-FAEC-444A-9079-341386DA8E2E')
+
+
+def to_nice_yaml(*a, **kw):
+ '''Make verbose, human readable yaml'''
+ transformed = yaml.safe_dump(*a, indent=4, allow_unicode=True, default_flow_style=False, **kw)
+ return to_unicode(transformed)
+
+def to_json(a, *args, **kw):
+ ''' Convert the value to JSON '''
+ return json.dumps(a, *args, **kw)
+
+def to_nice_json(a, *args, **kw):
+ '''Make verbose, human readable JSON'''
+ # python-2.6's json encoder is buggy (can't encode hostvars)
+ if sys.version_info < (2, 7):
+ try:
+ import simplejson
+ except ImportError:
+ pass
+ else:
+ try:
+ major = int(simplejson.__version__.split('.')[0])
+ except:
+ pass
+ else:
+ if major >= 2:
+ return simplejson.dumps(a, indent=4, sort_keys=True, *args, **kw)
+ # Fallback to the to_json filter
+ return to_json(a, *args, **kw)
+ return json.dumps(a, indent=4, sort_keys=True, *args, **kw)
+
+def failed(*a, **kw):
+ ''' Test if task result yields failed '''
+ item = a[0]
+ if type(item) != dict:
+ raise errors.AnsibleFilterError("|failed expects a dictionary")
+ rc = item.get('rc',0)
+ failed = item.get('failed',False)
+ if rc != 0 or failed:
+ return True
+ else:
+ return False
+
+def success(*a, **kw):
+ ''' Test if task result yields success '''
+ return not failed(*a, **kw)
+
+def changed(*a, **kw):
+ ''' Test if task result yields changed '''
+ item = a[0]
+ if type(item) != dict:
+ raise errors.AnsibleFilterError("|changed expects a dictionary")
+ if not 'changed' in item:
+ changed = False
+ if ('results' in item # some modules return a 'results' key
+ and type(item['results']) == list
+ and type(item['results'][0]) == dict):
+ for result in item['results']:
+ changed = changed or result.get('changed', False)
+ else:
+ changed = item.get('changed', False)
+ return changed
+
+def skipped(*a, **kw):
+ ''' Test if task result yields skipped '''
+ item = a[0]
+ if type(item) != dict:
+ raise errors.AnsibleFilterError("|skipped expects a dictionary")
+ skipped = item.get('skipped', False)
+ return skipped
+
+def mandatory(a):
+ ''' Make a variable mandatory '''
+ try:
+ a
+ except NameError:
+ raise errors.AnsibleFilterError('Mandatory variable not defined.')
+ else:
+ return a
+
+def bool(a):
+ ''' return a bool for the arg '''
+ if a is None or type(a) == bool:
+ return a
+ if type(a) in types.StringTypes:
+ a = a.lower()
+ if a in ['yes', 'on', '1', 'true', 1]:
+ return True
+ else:
+ return False
+
+def quote(a):
+ ''' return its argument quoted for shell usage '''
+ return pipes.quote(a)
+
+def fileglob(pathname):
+ ''' return list of matched files for glob '''
+ return glob.glob(pathname)
+
+def regex(value='', pattern='', ignorecase=False, match_type='search'):
+ ''' Expose `re` as a boolean filter using the `search` method by default.
+ This is likely only useful for `search` and `match` which already
+ have their own filters.
+ '''
+ if ignorecase:
+ flags = re.I
+ else:
+ flags = 0
+ _re = re.compile(pattern, flags=flags)
+ _bool = __builtins__.get('bool')
+ return _bool(getattr(_re, match_type, 'search')(value))
+
+def match(value, pattern='', ignorecase=False):
+ ''' Perform a `re.match` returning a boolean '''
+ return regex(value, pattern, ignorecase, 'match')
+
+def search(value, pattern='', ignorecase=False):
+ ''' Perform a `re.search` returning a boolean '''
+ return regex(value, pattern, ignorecase, 'search')
+
+def regex_replace(value='', pattern='', replacement='', ignorecase=False):
+ ''' Perform a `re.sub` returning a string '''
+
+ if not isinstance(value, basestring):
+ value = str(value)
+
+ if ignorecase:
+ flags = re.I
+ else:
+ flags = 0
+ _re = re.compile(pattern, flags=flags)
+ return _re.sub(replacement, value)
+
+def ternary(value, true_val, false_val):
+ ''' value ? true_val : false_val '''
+ if value:
+ return true_val
+ else:
+ return false_val
+
+
+def version_compare(value, version, operator='eq', strict=False):
+ ''' Perform a version comparison on a value '''
+ op_map = {
+ '==': 'eq', '=': 'eq', 'eq': 'eq',
+ '<': 'lt', 'lt': 'lt',
+ '<=': 'le', 'le': 'le',
+ '>': 'gt', 'gt': 'gt',
+ '>=': 'ge', 'ge': 'ge',
+ '!=': 'ne', '<>': 'ne', 'ne': 'ne'
+ }
+
+ if strict:
+ Version = StrictVersion
+ else:
+ Version = LooseVersion
+
+ if operator in op_map:
+ operator = op_map[operator]
+ else:
+ raise errors.AnsibleFilterError('Invalid operator type')
+
+ try:
+ method = getattr(py_operator, operator)
+ return method(Version(str(value)), Version(str(version)))
+ except Exception, e:
+ raise errors.AnsibleFilterError('Version comparison: %s' % e)
+
+@environmentfilter
+def rand(environment, end, start=None, step=None):
+ r = SystemRandom()
+ if isinstance(end, (int, long)):
+ if not start:
+ start = 0
+ if not step:
+ step = 1
+ return r.randrange(start, end, step)
+ elif hasattr(end, '__iter__'):
+ if start or step:
+ raise errors.AnsibleFilterError('start and step can only be used with integer values')
+ return r.choice(end)
+ else:
+ raise errors.AnsibleFilterError('random can only be used on sequences and integers')
+
+def randomize_list(mylist):
+ try:
+ mylist = list(mylist)
+ shuffle(mylist)
+ except:
+ pass
+ return mylist
+
+def get_hash(data, hashtype='sha1'):
+
+ try: # see if hash is supported
+ h = hashlib.new(hashtype)
+ except:
+ return None
+
+ h.update(data)
+ return h.hexdigest()
+
+def get_encrypted_password(password, hashtype='sha512', salt=None):
+
+ # TODO: find a way to construct dynamically from system
+ cryptmethod= {
+ 'md5': '1',
+ 'blowfish': '2a',
+ 'sha256': '5',
+ 'sha512': '6',
+ }
+
+ hastype = hashtype.lower()
+ if hashtype in cryptmethod:
+ if salt is None:
+ r = SystemRandom()
+ salt = ''.join([r.choice(string.ascii_letters + string.digits) for _ in range(16)])
+
+ saltstring = "$%s$%s" % (cryptmethod[hashtype],salt)
+ encrypted = crypt.crypt(password,saltstring)
+ return encrypted
+
+ return None
+
+def to_uuid(string):
+ return str(uuid.uuid5(UUID_NAMESPACE_ANSIBLE, str(string)))
+
+class FilterModule(object):
+ ''' Ansible core jinja2 filters '''
+
+ def filters(self):
+ return {
+ # base 64
+ 'b64decode': partial(unicode_wrap, base64.b64decode),
+ 'b64encode': partial(unicode_wrap, base64.b64encode),
+
+ # uuid
+ 'to_uuid': to_uuid,
+
+ # json
+ 'to_json': to_json,
+ 'to_nice_json': to_nice_json,
+ 'from_json': json.loads,
+
+ # yaml
+ 'to_yaml': yaml.safe_dump,
+ 'to_nice_yaml': to_nice_yaml,
+ 'from_yaml': yaml.safe_load,
+
+ # path
+ 'basename': partial(unicode_wrap, os.path.basename),
+ 'dirname': partial(unicode_wrap, os.path.dirname),
+ 'expanduser': partial(unicode_wrap, os.path.expanduser),
+ 'realpath': partial(unicode_wrap, os.path.realpath),
+ 'relpath': partial(unicode_wrap, os.path.relpath),
+
+ # failure testing
+ 'failed' : failed,
+ 'success' : success,
+
+ # changed testing
+ 'changed' : changed,
+
+ # skip testing
+ 'skipped' : skipped,
+
+ # variable existence
+ 'mandatory': mandatory,
+
+ # value as boolean
+ 'bool': bool,
+
+ # quote string for shell usage
+ 'quote': quote,
+
+ # hash filters
+ # md5 hex digest of string
+ 'md5': md5s,
+ # sha1 hex digeset of string
+ 'sha1': checksum_s,
+ # checksum of string as used by ansible for checksuming files
+ 'checksum': checksum_s,
+ # generic hashing
+ 'password_hash': get_encrypted_password,
+ 'hash': get_hash,
+
+ # file glob
+ 'fileglob': fileglob,
+
+ # regex
+ 'match': match,
+ 'search': search,
+ 'regex': regex,
+ 'regex_replace': regex_replace,
+
+ # ? : ;
+ 'ternary': ternary,
+
+ # list
+ # version comparison
+ 'version_compare': version_compare,
+
+ # random stuff
+ 'random': rand,
+ 'shuffle': randomize_list,
+ }
diff --git a/lib/ansible/plugins/filter/ipaddr.py b/lib/ansible/plugins/filter/ipaddr.py
new file mode 100644
index 0000000000..5d9d6e3136
--- /dev/null
+++ b/lib/ansible/plugins/filter/ipaddr.py
@@ -0,0 +1,659 @@
+# (c) 2014, Maciej Delmanowski
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see .
+
+from functools import partial
+
+try:
+ import netaddr
+except ImportError:
+ # in this case, we'll make the filters return error messages (see bottom)
+ netaddr = None
+else:
+ class mac_linux(netaddr.mac_unix):
+ pass
+ mac_linux.word_fmt = '%.2x'
+
+from ansible import errors
+
+
+# ---- IP address and network query helpers ----
+
+def _empty_ipaddr_query(v, vtype):
+ # We don't have any query to process, so just check what type the user
+ # expects, and return the IP address in a correct format
+ if v:
+ if vtype == 'address':
+ return str(v.ip)
+ elif vtype == 'network':
+ return str(v)
+
+def _6to4_query(v, vtype, value):
+ if v.version == 4:
+
+ if v.size == 1:
+ ipconv = str(v.ip)
+ elif v.size > 1:
+ if v.ip != v.network:
+ ipconv = str(v.ip)
+ else:
+ ipconv = False
+
+ if ipaddr(ipconv, 'public'):
+ numbers = list(map(int, ipconv.split('.')))
+
+ try:
+ return '2002:{:02x}{:02x}:{:02x}{:02x}::1/48'.format(*numbers)
+ except:
+ return False
+
+ elif v.version == 6:
+ if vtype == 'address':
+ if ipaddr(str(v), '2002::/16'):
+ return value
+ elif vtype == 'network':
+ if v.ip != v.network:
+ if ipaddr(str(v.ip), '2002::/16'):
+ return value
+ else:
+ return False
+
+def _ip_query(v):
+ if v.size == 1:
+ return str(v.ip)
+ if v.size > 1:
+ if v.ip != v.network:
+ return str(v.ip)
+
+def _gateway_query(v):
+ if v.size > 1:
+ if v.ip != v.network:
+ return str(v.ip) + '/' + str(v.prefixlen)
+
+def _bool_ipaddr_query(v):
+ if v:
+ return True
+
+def _broadcast_query(v):
+ if v.size > 1:
+ return str(v.broadcast)
+
+def _cidr_query(v):
+ return str(v)
+
+def _cidr_lookup_query(v, iplist, value):
+ try:
+ if v in iplist:
+ return value
+ except:
+ return False
+
+def _host_query(v):
+ if v.size == 1:
+ return str(v)
+ elif v.size > 1:
+ if v.ip != v.network:
+ return str(v.ip) + '/' + str(v.prefixlen)
+
+def _hostmask_query(v):
+ return str(v.hostmask)
+
+def _int_query(v, vtype):
+ if vtype == 'address':
+ return int(v.ip)
+ elif vtype == 'network':
+ return str(int(v.ip)) + '/' + str(int(v.prefixlen))
+
+def _ipv4_query(v, value):
+ if v.version == 6:
+ try:
+ return str(v.ipv4())
+ except:
+ return False
+ else:
+ return value
+
+def _ipv6_query(v, value):
+ if v.version == 4:
+ return str(v.ipv6())
+ else:
+ return value
+
+def _link_local_query(v, value):
+ v_ip = netaddr.IPAddress(str(v.ip))
+ if v.version == 4:
+ if ipaddr(str(v_ip), '169.254.0.0/24'):
+ return value
+
+ elif v.version == 6:
+ if ipaddr(str(v_ip), 'fe80::/10'):
+ return value
+
+def _loopback_query(v, value):
+ v_ip = netaddr.IPAddress(str(v.ip))
+ if v_ip.is_loopback():
+ return value
+
+def _multicast_query(v, value):
+ if v.is_multicast():
+ return value
+
+def _net_query(v):
+ if v.size > 1:
+ if v.ip == v.network:
+ return str(v.network) + '/' + str(v.prefixlen)
+
+def _netmask_query(v):
+ if v.size > 1:
+ return str(v.netmask)
+
+def _network_query(v):
+ if v.size > 1:
+ return str(v.network)
+
+def _prefix_query(v):
+ return int(v.prefixlen)
+
+def _private_query(v, value):
+ if v.is_private():
+ return value
+
+def _public_query(v, value):
+ v_ip = netaddr.IPAddress(str(v.ip))
+ if v_ip.is_unicast() and not v_ip.is_private() and \
+ not v_ip.is_loopback() and not v_ip.is_netmask() and \
+ not v_ip.is_hostmask():
+ return value
+
+def _revdns_query(v):
+ v_ip = netaddr.IPAddress(str(v.ip))
+ return v_ip.reverse_dns
+
+def _size_query(v):
+ return v.size
+
+def _subnet_query(v):
+ return str(v.cidr)
+
+def _type_query(v):
+ if v.size == 1:
+ return 'address'
+ if v.size > 1:
+ if v.ip != v.network:
+ return 'address'
+ else:
+ return 'network'
+
+def _unicast_query(v, value):
+ if v.is_unicast():
+ return value
+
+def _version_query(v):
+ return v.version
+
+def _wrap_query(v, vtype, value):
+ if v.version == 6:
+ if vtype == 'address':
+ return '[' + str(v.ip) + ']'
+ elif vtype == 'network':
+ return '[' + str(v.ip) + ']/' + str(v.prefixlen)
+ else:
+ return value
+
+
+# ---- HWaddr query helpers ----
+def _bare_query(v):
+ v.dialect = netaddr.mac_bare
+ return str(v)
+
+def _bool_hwaddr_query(v):
+ if v:
+ return True
+
+def _cisco_query(v):
+ v.dialect = netaddr.mac_cisco
+ return str(v)
+
+def _empty_hwaddr_query(v, value):
+ if v:
+ return value
+
+def _linux_query(v):
+ v.dialect = mac_linux
+ return str(v)
+
+def _postgresql_query(v):
+ v.dialect = netaddr.mac_pgsql
+ return str(v)
+
+def _unix_query(v):
+ v.dialect = netaddr.mac_unix
+ return str(v)
+
+def _win_query(v):
+ v.dialect = netaddr.mac_eui48
+ return str(v)
+
+
+# ---- IP address and network filters ----
+
+def ipaddr(value, query = '', version = False, alias = 'ipaddr'):
+ ''' Check if string is an IP address or network and filter it '''
+
+ query_func_extra_args = {
+ '': ('vtype',),
+ '6to4': ('vtype', 'value'),
+ 'cidr_lookup': ('iplist', 'value'),
+ 'int': ('vtype',),
+ 'ipv4': ('value',),
+ 'ipv6': ('value',),
+ 'link-local': ('value',),
+ 'loopback': ('value',),
+ 'lo': ('value',),
+ 'multicast': ('value',),
+ 'private': ('value',),
+ 'public': ('value',),
+ 'unicast': ('value',),
+ 'wrap': ('vtype', 'value'),
+ }
+ query_func_map = {
+ '': _empty_ipaddr_query,
+ '6to4': _6to4_query,
+ 'address': _ip_query,
+ 'address/prefix': _gateway_query,
+ 'bool': _bool_ipaddr_query,
+ 'broadcast': _broadcast_query,
+ 'cidr': _cidr_query,
+ 'cidr_lookup': _cidr_lookup_query,
+ 'gateway': _gateway_query,
+ 'gw': _gateway_query,
+ 'host': _host_query,
+ 'host/prefix': _gateway_query,
+ 'hostmask': _hostmask_query,
+ 'hostnet': _gateway_query,
+ 'int': _int_query,
+ 'ip': _ip_query,
+ 'ipv4': _ipv4_query,
+ 'ipv6': _ipv6_query,
+ 'link-local': _link_local_query,
+ 'lo': _loopback_query,
+ 'loopback': _loopback_query,
+ 'multicast': _multicast_query,
+ 'net': _net_query,
+ 'netmask': _netmask_query,
+ 'network': _network_query,
+ 'prefix': _prefix_query,
+ 'private': _private_query,
+ 'public': _public_query,
+ 'revdns': _revdns_query,
+ 'router': _gateway_query,
+ 'size': _size_query,
+ 'subnet': _subnet_query,
+ 'type': _type_query,
+ 'unicast': _unicast_query,
+ 'v4': _ipv4_query,
+ 'v6': _ipv6_query,
+ 'version': _version_query,
+ 'wrap': _wrap_query,
+ }
+
+ vtype = None
+
+ if not value:
+ return False
+
+ elif value == True:
+ return False
+
+ # Check if value is a list and parse each element
+ elif isinstance(value, (list, tuple)):
+
+ _ret = []
+ for element in value:
+ if ipaddr(element, str(query), version):
+ _ret.append(ipaddr(element, str(query), version))
+
+ if _ret:
+ return _ret
+ else:
+ return list()
+
+ # Check if value is a number and convert it to an IP address
+ elif str(value).isdigit():
+
+ # We don't know what IP version to assume, so let's check IPv4 first,
+ # then IPv6
+ try:
+ if ((not version) or (version and version == 4)):
+ v = netaddr.IPNetwork('0.0.0.0/0')
+ v.value = int(value)
+ v.prefixlen = 32
+ elif version and version == 6:
+ v = netaddr.IPNetwork('::/0')
+ v.value = int(value)
+ v.prefixlen = 128
+
+ # IPv4 didn't work the first time, so it definitely has to be IPv6
+ except:
+ try:
+ v = netaddr.IPNetwork('::/0')
+ v.value = int(value)
+ v.prefixlen = 128
+
+ # The value is too big for IPv6. Are you a nanobot?
+ except:
+ return False
+
+ # We got an IP address, let's mark it as such
+ value = str(v)
+ vtype = 'address'
+
+ # value has not been recognized, check if it's a valid IP string
+ else:
+ try:
+ v = netaddr.IPNetwork(value)
+
+ # value is a valid IP string, check if user specified
+ # CIDR prefix or just an IP address, this will indicate default
+ # output format
+ try:
+ address, prefix = value.split('/')
+ vtype = 'network'
+ except:
+ vtype = 'address'
+
+ # value hasn't been recognized, maybe it's a numerical CIDR?
+ except:
+ try:
+ address, prefix = value.split('/')
+ address.isdigit()
+ address = int(address)
+ prefix.isdigit()
+ prefix = int(prefix)
+
+ # It's not numerical CIDR, give up
+ except:
+ return False
+
+ # It is something, so let's try and build a CIDR from the parts
+ try:
+ v = netaddr.IPNetwork('0.0.0.0/0')
+ v.value = address
+ v.prefixlen = prefix
+
+ # It's not a valid IPv4 CIDR
+ except:
+ try:
+ v = netaddr.IPNetwork('::/0')
+ v.value = address
+ v.prefixlen = prefix
+
+ # It's not a valid IPv6 CIDR. Give up.
+ except:
+ return False
+
+ # We have a valid CIDR, so let's write it in correct format
+ value = str(v)
+ vtype = 'network'
+
+ # We have a query string but it's not in the known query types. Check if
+ # that string is a valid subnet, if so, we can check later if given IP
+ # address/network is inside that specific subnet
+ try:
+ ### ?? 6to4 and link-local were True here before. Should they still?
+ if query and (query not in query_func_map or query == 'cidr_lookup') and ipaddr(query, 'network'):
+ iplist = netaddr.IPSet([netaddr.IPNetwork(query)])
+ query = 'cidr_lookup'
+ except:
+ pass
+
+ # This code checks if value maches the IP version the user wants, ie. if
+ # it's any version ("ipaddr()"), IPv4 ("ipv4()") or IPv6 ("ipv6()")
+ # If version does not match, return False
+ if version and v.version != version:
+ return False
+
+ extras = []
+ for arg in query_func_extra_args.get(query, tuple()):
+ extras.append(locals()[arg])
+ try:
+ return query_func_map[query](v, *extras)
+ except KeyError:
+ try:
+ float(query)
+ if v.size == 1:
+ if vtype == 'address':
+ return str(v.ip)
+ elif vtype == 'network':
+ return str(v)
+
+ elif v.size > 1:
+ try:
+ return str(v[query]) + '/' + str(v.prefixlen)
+ except:
+ return False
+
+ else:
+ return value
+
+ except:
+ raise errors.AnsibleFilterError(alias + ': unknown filter type: %s' % query)
+
+ return False
+
+
+def ipwrap(value, query = ''):
+ try:
+ if isinstance(value, (list, tuple)):
+ _ret = []
+ for element in value:
+ if ipaddr(element, query, version = False, alias = 'ipwrap'):
+ _ret.append(ipaddr(element, 'wrap'))
+ else:
+ _ret.append(element)
+
+ return _ret
+ else:
+ _ret = ipaddr(value, query, version = False, alias = 'ipwrap')
+ if _ret:
+ return ipaddr(_ret, 'wrap')
+ else:
+ return value
+
+ except:
+ return value
+
+
+def ipv4(value, query = ''):
+ return ipaddr(value, query, version = 4, alias = 'ipv4')
+
+
+def ipv6(value, query = ''):
+ return ipaddr(value, query, version = 6, alias = 'ipv6')
+
+
+# Split given subnet into smaller subnets or find out the biggest subnet of
+# a given IP address with given CIDR prefix
+# Usage:
+#
+# - address or address/prefix | ipsubnet
+# returns CIDR subnet of a given input
+#
+# - address/prefix | ipsubnet(cidr)
+# returns number of possible subnets for given CIDR prefix
+#
+# - address/prefix | ipsubnet(cidr, index)
+# returns new subnet with given CIDR prefix
+#
+# - address | ipsubnet(cidr)
+# returns biggest subnet with given CIDR prefix that address belongs to
+#
+# - address | ipsubnet(cidr, index)
+# returns next indexed subnet which contains given address
+def ipsubnet(value, query = '', index = 'x'):
+ ''' Manipulate IPv4/IPv6 subnets '''
+
+ try:
+ vtype = ipaddr(value, 'type')
+ if vtype == 'address':
+ v = ipaddr(value, 'cidr')
+ elif vtype == 'network':
+ v = ipaddr(value, 'subnet')
+
+ value = netaddr.IPNetwork(v)
+ except:
+ return False
+
+ if not query:
+ return str(value)
+
+ elif str(query).isdigit():
+ vsize = ipaddr(v, 'size')
+ query = int(query)
+
+ try:
+ float(index)
+ index = int(index)
+
+ if vsize > 1:
+ try:
+ return str(list(value.subnet(query))[index])
+ except:
+ return False
+
+ elif vsize == 1:
+ try:
+ return str(value.supernet(query)[index])
+ except:
+ return False
+
+ except:
+ if vsize > 1:
+ try:
+ return str(len(list(value.subnet(query))))
+ except:
+ return False
+
+ elif vsize == 1:
+ try:
+ return str(value.supernet(query)[0])
+ except:
+ return False
+
+ return False
+
+# Returns the nth host within a network described by value.
+# Usage:
+#
+# - address or address/prefix | nthhost(nth)
+# returns the nth host within the given network
+def nthhost(value, query=''):
+ ''' Get the nth host within a given network '''
+ try:
+ vtype = ipaddr(value, 'type')
+ if vtype == 'address':
+ v = ipaddr(value, 'cidr')
+ elif vtype == 'network':
+ v = ipaddr(value, 'subnet')
+
+ value = netaddr.IPNetwork(v)
+ except:
+ return False
+
+ if not query:
+ return False
+
+ try:
+ vsize = ipaddr(v, 'size')
+ nth = int(query)
+ if value.size > nth:
+ return value[nth]
+
+ except ValueError:
+ return False
+
+ return False
+
+
+# ---- HWaddr / MAC address filters ----
+
+def hwaddr(value, query = '', alias = 'hwaddr'):
+ ''' Check if string is a HW/MAC address and filter it '''
+
+ query_func_extra_args = {
+ '': ('value',),
+ }
+ query_func_map = {
+ '': _empty_hwaddr_query,
+ 'bare': _bare_query,
+ 'bool': _bool_hwaddr_query,
+ 'cisco': _cisco_query,
+ 'eui48': _win_query,
+ 'linux': _linux_query,
+ 'pgsql': _postgresql_query,
+ 'postgresql': _postgresql_query,
+ 'psql': _postgresql_query,
+ 'unix': _unix_query,
+ 'win': _win_query,
+ }
+
+ try:
+ v = netaddr.EUI(value)
+ except:
+ if query and query != 'bool':
+ raise errors.AnsibleFilterError(alias + ': not a hardware address: %s' % value)
+
+ extras = []
+ for arg in query_func_extra_args.get(query, tuple()):
+ extras.append(locals()[arg])
+ try:
+ return query_func_map[query](v, *extras)
+ except KeyError:
+ raise errors.AnsibleFilterError(alias + ': unknown filter type: %s' % query)
+
+ return False
+
+def macaddr(value, query = ''):
+ return hwaddr(value, query, alias = 'macaddr')
+
+def _need_netaddr(f_name, *args, **kwargs):
+ raise errors.AnsibleFilterError('The {0} filter requires python-netaddr be'
+ ' installed on the ansible controller'.format(f_name))
+
+# ---- Ansible filters ----
+
+class FilterModule(object):
+ ''' IP address and network manipulation filters '''
+ filter_map = {
+ # IP addresses and networks
+ 'ipaddr': ipaddr,
+ 'ipwrap': ipwrap,
+ 'ipv4': ipv4,
+ 'ipv6': ipv6,
+ 'ipsubnet': ipsubnet,
+ 'nthhost': nthhost,
+
+ # MAC / HW addresses
+ 'hwaddr': hwaddr,
+ 'macaddr': macaddr
+ }
+
+ def filters(self):
+ if netaddr:
+ return self.filter_map
+ else:
+ # Need to install python-netaddr for these filters to work
+ return dict((f, partial(_need_netaddr, f)) for f in self.filter_map)
diff --git a/lib/ansible/plugins/filter/mathstuff.py b/lib/ansible/plugins/filter/mathstuff.py
new file mode 100644
index 0000000000..c6a49485a4
--- /dev/null
+++ b/lib/ansible/plugins/filter/mathstuff.py
@@ -0,0 +1,126 @@
+# (c) 2014, Brian Coca
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see .
+
+from __future__ import absolute_import
+
+import math
+import collections
+from ansible import errors
+
+def unique(a):
+ if isinstance(a,collections.Hashable):
+ c = set(a)
+ else:
+ c = []
+ for x in a:
+ if x not in c:
+ c.append(x)
+ return c
+
+def intersect(a, b):
+ if isinstance(a,collections.Hashable) and isinstance(b,collections.Hashable):
+ c = set(a) & set(b)
+ else:
+ c = unique(filter(lambda x: x in b, a))
+ return c
+
+def difference(a, b):
+ if isinstance(a,collections.Hashable) and isinstance(b,collections.Hashable):
+ c = set(a) - set(b)
+ else:
+ c = unique(filter(lambda x: x not in b, a))
+ return c
+
+def symmetric_difference(a, b):
+ if isinstance(a,collections.Hashable) and isinstance(b,collections.Hashable):
+ c = set(a) ^ set(b)
+ else:
+ c = unique(filter(lambda x: x not in intersect(a,b), union(a,b)))
+ return c
+
+def union(a, b):
+ if isinstance(a,collections.Hashable) and isinstance(b,collections.Hashable):
+ c = set(a) | set(b)
+ else:
+ c = unique(a + b)
+ return c
+
+def min(a):
+ _min = __builtins__.get('min')
+ return _min(a);
+
+def max(a):
+ _max = __builtins__.get('max')
+ return _max(a);
+
+def isnotanumber(x):
+ try:
+ return math.isnan(x)
+ except TypeError:
+ return False
+
+
+def logarithm(x, base=math.e):
+ try:
+ if base == 10:
+ return math.log10(x)
+ else:
+ return math.log(x, base)
+ except TypeError, e:
+ raise errors.AnsibleFilterError('log() can only be used on numbers: %s' % str(e))
+
+
+def power(x, y):
+ try:
+ return math.pow(x, y)
+ except TypeError, e:
+ raise errors.AnsibleFilterError('pow() can only be used on numbers: %s' % str(e))
+
+
+def inversepower(x, base=2):
+ try:
+ if base == 2:
+ return math.sqrt(x)
+ else:
+ return math.pow(x, 1.0/float(base))
+ except TypeError, e:
+ raise errors.AnsibleFilterError('root() can only be used on numbers: %s' % str(e))
+
+
+class FilterModule(object):
+ ''' Ansible math jinja2 filters '''
+
+ def filters(self):
+ return {
+ # general math
+ 'isnan': isnotanumber,
+ 'min' : min,
+ 'max' : max,
+
+ # exponents and logarithms
+ 'log': logarithm,
+ 'pow': power,
+ 'root': inversepower,
+
+ # set theory
+ 'unique' : unique,
+ 'intersect': intersect,
+ 'difference': difference,
+ 'symmetric_difference': symmetric_difference,
+ 'union': union,
+
+ }
From 1194195b1aa797f9a1e2d2b74990d233fccc9b3c Mon Sep 17 00:00:00 2001
From: Brian Coca
Date: Mon, 4 May 2015 11:38:28 -0400
Subject: [PATCH 028/971] smoother commands with less quotes for pbrun
---
lib/ansible/executor/connection_info.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/lib/ansible/executor/connection_info.py b/lib/ansible/executor/connection_info.py
index 1c168a8e26..9e91cd09ea 100644
--- a/lib/ansible/executor/connection_info.py
+++ b/lib/ansible/executor/connection_info.py
@@ -226,7 +226,7 @@ class ConnectionInformation:
elif self.become_method == 'pbrun':
exe = become_settings.get('pbrun_exe', 'pbrun')
flags = become_settings.get('pbrun_flags', '')
- becomecmd = '%s -b -l %s -u %s "%s"' % (exe, flags, self.become_user, success_cmd)
+ becomecmd = '%s -b -l %s -u %s %s' % (exe, flags, self.become_user, success_cmd)
elif self.become_method == 'pfexec':
exe = become_settings.get('pfexec_exe', 'pbrun')
From fdb059187721779590d38646a215d4668cbc3f3a Mon Sep 17 00:00:00 2001
From: Toshio Kuratomi
Date: Mon, 4 May 2015 12:06:02 -0700
Subject: [PATCH 029/971] Update module pointers
---
lib/ansible/modules/core | 2 +-
lib/ansible/modules/extras | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/lib/ansible/modules/core b/lib/ansible/modules/core
index 0341ddd35e..85c8a892c8 160000
--- a/lib/ansible/modules/core
+++ b/lib/ansible/modules/core
@@ -1 +1 @@
-Subproject commit 0341ddd35ed5ff477ad5de2488d947255ce86259
+Subproject commit 85c8a892c80b92730831d95fa654ef6d35b0eca0
diff --git a/lib/ansible/modules/extras b/lib/ansible/modules/extras
index 495ad450e5..70ea058563 160000
--- a/lib/ansible/modules/extras
+++ b/lib/ansible/modules/extras
@@ -1 +1 @@
-Subproject commit 495ad450e53feb1cd26218dc68056cc34d1ea9ff
+Subproject commit 70ea05856356ad36f48b4bb7267d637efc56d292
From 124a0d3519dac7d774c2cc5710a69b10a4ec4c92 Mon Sep 17 00:00:00 2001
From: Brian Coca
Date: Mon, 4 May 2015 16:44:54 -0400
Subject: [PATCH 030/971] now properly inherits from ansible.cfg sudo/su ask
pass fixes #10891
---
lib/ansible/cli/__init__.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/lib/ansible/cli/__init__.py b/lib/ansible/cli/__init__.py
index 0b0494e032..4a7f5bbacc 100644
--- a/lib/ansible/cli/__init__.py
+++ b/lib/ansible/cli/__init__.py
@@ -245,9 +245,9 @@ class CLI(object):
if runas_opts:
# priv user defaults to root later on to enable detecting when this option was given here
- parser.add_option('-K', '--ask-sudo-pass', default=False, dest='ask_sudo_pass', action='store_true',
+ parser.add_option('-K', '--ask-sudo-pass', default=C.DEFAULT_ASK_SUDO_PASS, dest='ask_sudo_pass', action='store_true',
help='ask for sudo password (deprecated, use become)')
- parser.add_option('--ask-su-pass', default=False, dest='ask_su_pass', action='store_true',
+ parser.add_option('--ask-su-pass', default=C.DEFAULT_ASK_SU_PASS, dest='ask_su_pass', action='store_true',
help='ask for su password (deprecated, use become)')
parser.add_option("-s", "--sudo", default=C.DEFAULT_SUDO, action="store_true", dest='sudo',
help="run operations with sudo (nopasswd) (deprecated, use become)")
From 013c4631e3a65035471d85aabd9227c0fa701e10 Mon Sep 17 00:00:00 2001
From: Brian Coca
Date: Mon, 4 May 2015 18:37:38 -0400
Subject: [PATCH 031/971] hack to prevent tempalte/copy errors on vagrant
synced folders that report incorrectly errno 26 fixes #9526
---
lib/ansible/module_utils/basic.py | 5 +++--
1 file changed, 3 insertions(+), 2 deletions(-)
diff --git a/lib/ansible/module_utils/basic.py b/lib/ansible/module_utils/basic.py
index 54a1a9cfff..fd0108c98b 100644
--- a/lib/ansible/module_utils/basic.py
+++ b/lib/ansible/module_utils/basic.py
@@ -1356,8 +1356,9 @@ class AnsibleModule(object):
# Optimistically try a rename, solves some corner cases and can avoid useless work, throws exception if not atomic.
os.rename(src, dest)
except (IOError,OSError), e:
- # only try workarounds for errno 18 (cross device), 1 (not permitted) and 13 (permission denied)
- if e.errno != errno.EPERM and e.errno != errno.EXDEV and e.errno != errno.EACCES:
+ # only try workarounds for errno 18 (cross device), 1 (not permitted), 13 (permission denied)
+ # and 26 (text file busy) which happens on vagrant synced folders
+ if e.errno not in [errno.EPERM, errno.EXDEV, errno.EACCES, errno.ETXTBSY]
self.fail_json(msg='Could not replace file: %s to %s: %s' % (src, dest, e))
dest_dir = os.path.dirname(dest)
From 483c61414e67a1b6c9f7ace406298cb2db08bf1d Mon Sep 17 00:00:00 2001
From: Brian Coca
Date: Mon, 4 May 2015 18:42:44 -0400
Subject: [PATCH 032/971] added missing :
---
lib/ansible/module_utils/basic.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/lib/ansible/module_utils/basic.py b/lib/ansible/module_utils/basic.py
index fd0108c98b..0c42a2315a 100644
--- a/lib/ansible/module_utils/basic.py
+++ b/lib/ansible/module_utils/basic.py
@@ -1358,7 +1358,7 @@ class AnsibleModule(object):
except (IOError,OSError), e:
# only try workarounds for errno 18 (cross device), 1 (not permitted), 13 (permission denied)
# and 26 (text file busy) which happens on vagrant synced folders
- if e.errno not in [errno.EPERM, errno.EXDEV, errno.EACCES, errno.ETXTBSY]
+ if e.errno not in [errno.EPERM, errno.EXDEV, errno.EACCES, errno.ETXTBSY]:
self.fail_json(msg='Could not replace file: %s to %s: %s' % (src, dest, e))
dest_dir = os.path.dirname(dest)
From fba5588028def5463f9b281fe69f117b76c3845b Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Tue, 5 May 2015 13:17:04 -0500
Subject: [PATCH 033/971] Handle empty role definitions in YAML (v2)
---
lib/ansible/playbook/play.py | 3 +++
lib/ansible/playbook/role/metadata.py | 3 +++
2 files changed, 6 insertions(+)
diff --git a/lib/ansible/playbook/play.py b/lib/ansible/playbook/play.py
index b99c01fdf7..b247503d9c 100644
--- a/lib/ansible/playbook/play.py
+++ b/lib/ansible/playbook/play.py
@@ -174,6 +174,9 @@ class Play(Base, Taggable, Become):
list of role definitions and creates the Role from those objects
'''
+ if ds is None:
+ ds = []
+
role_includes = load_list_of_roles(ds, variable_manager=self._variable_manager, loader=self._loader)
roles = []
diff --git a/lib/ansible/playbook/role/metadata.py b/lib/ansible/playbook/role/metadata.py
index 461a9a4a62..61e92ce9b5 100644
--- a/lib/ansible/playbook/role/metadata.py
+++ b/lib/ansible/playbook/role/metadata.py
@@ -65,6 +65,9 @@ class RoleMetadata(Base):
which returns a list of RoleInclude objects
'''
+ if ds is None:
+ ds = []
+
current_role_path = None
if self._owner:
current_role_path = os.path.dirname(self._owner._role_path)
From 8fae2abed4c12a55ae0c98b374b9bfd2fb4d287e Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Tue, 5 May 2015 13:41:32 -0500
Subject: [PATCH 034/971] Properly fail out of the task loop in the linear
strategy on failures (v2)
---
lib/ansible/executor/playbook_executor.py | 8 +++++++-
lib/ansible/plugins/strategies/linear.py | 3 +++
samples/test_play_failure.yml | 9 +++++++++
3 files changed, 19 insertions(+), 1 deletion(-)
create mode 100644 samples/test_play_failure.yml
diff --git a/lib/ansible/executor/playbook_executor.py b/lib/ansible/executor/playbook_executor.py
index 2d5958697b..5d72ef15bd 100644
--- a/lib/ansible/executor/playbook_executor.py
+++ b/lib/ansible/executor/playbook_executor.py
@@ -117,15 +117,17 @@ class PlaybookExecutor:
if len(batch) == 0:
self._tqm.send_callback('v2_playbook_on_play_start', new_play)
self._tqm.send_callback('v2_playbook_on_no_hosts_matched')
- result = 0
+ result = 1
break
# restrict the inventory to the hosts in the serialized batch
self._inventory.restrict_to_hosts(batch)
# and run it...
result = self._tqm.run(play=play)
+ # if the last result wasn't zero, break out of the serial batch loop
if result != 0:
break
+ # if the last result wasn't zero, break out of the play loop
if result != 0:
break
@@ -134,6 +136,10 @@ class PlaybookExecutor:
if entry:
entrylist.append(entry) # per playbook
+ # if the last result wasn't zero, break out of the playbook file name loop
+ if result != 0:
+ break
+
if entrylist:
return entrylist
diff --git a/lib/ansible/plugins/strategies/linear.py b/lib/ansible/plugins/strategies/linear.py
index 95ecac1451..bd510dc557 100644
--- a/lib/ansible/plugins/strategies/linear.py
+++ b/lib/ansible/plugins/strategies/linear.py
@@ -226,6 +226,9 @@ class StrategyModule(StrategyBase):
# FIXME: this should also be moved to the base class in a method
included_files = []
for res in host_results:
+ if res.is_failed():
+ return 1
+
if res._task.action == 'include':
if res._task.loop:
include_results = res._result['results']
diff --git a/samples/test_play_failure.yml b/samples/test_play_failure.yml
new file mode 100644
index 0000000000..b33fc2e757
--- /dev/null
+++ b/samples/test_play_failure.yml
@@ -0,0 +1,9 @@
+- hosts: localhost
+ gather_facts: no
+ tasks:
+ - fail:
+
+- hosts: localhost
+ gather_facts: no
+ tasks:
+ - debug: msg="you should not see me..."
From d34b586eb6bf162c6c168a3065b3471f0522abf8 Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Tue, 5 May 2015 16:40:11 -0500
Subject: [PATCH 035/971] Add ability for connection plugins to set attributes
based on host variables (v2)
---
lib/ansible/executor/task_executor.py | 4 +++-
lib/ansible/plugins/connections/__init__.py | 11 +++++++++++
2 files changed, 14 insertions(+), 1 deletion(-)
diff --git a/lib/ansible/executor/task_executor.py b/lib/ansible/executor/task_executor.py
index 7fa2134948..6d62eea68b 100644
--- a/lib/ansible/executor/task_executor.py
+++ b/lib/ansible/executor/task_executor.py
@@ -209,7 +209,9 @@ class TaskExecutor:
# get the connection and the handler for this execution
self._connection = self._get_connection(variables)
- self._handler = self._get_action_handler(connection=self._connection, templar=templar)
+ self._connection.set_host_overrides(host=self._host)
+
+ self._handler = self._get_action_handler(connection=self._connection, templar=templar)
# Evaluate the conditional (if any) for this task, which we do before running
# the final task post-validation. We do this before the post validation due to
diff --git a/lib/ansible/plugins/connections/__init__.py b/lib/ansible/plugins/connections/__init__.py
index d11f365182..5558f5ba86 100644
--- a/lib/ansible/plugins/connections/__init__.py
+++ b/lib/ansible/plugins/connections/__init__.py
@@ -64,6 +64,17 @@ class ConnectionBase:
raise AnsibleError("Internal Error: this connection module does not support running commands via %s" % become_method)
+ def set_host_overrides(self, host):
+ '''
+ An optional method, which can be used to set connection plugin parameters
+ from variables set on the host (or groups to which the host belongs)
+
+ Any connection plugin using this should first initialize its attributes in
+ an overridden `def __init__(self):`, and then use `host.get_vars()` to find
+ variables which may be used to set those attributes in this method.
+ '''
+ pass
+
@abstractproperty
def transport(self):
"""String used to identify this Connection class from other classes"""
From 8214ac78410868d809e20fb260db2f7bb7ddc660 Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Wed, 6 May 2015 01:31:02 -0500
Subject: [PATCH 036/971] Add serializer/deserializer to plugin base object
(v2)
Fixes #10923
---
lib/ansible/plugins/__init__.py | 30 ++++++++++++++++++++++++++++++
1 file changed, 30 insertions(+)
diff --git a/lib/ansible/plugins/__init__.py b/lib/ansible/plugins/__init__.py
index 5791677bd2..ad18bfe09b 100644
--- a/lib/ansible/plugins/__init__.py
+++ b/lib/ansible/plugins/__init__.py
@@ -77,6 +77,36 @@ class PluginLoader:
self._extra_dirs = []
self._searched_paths = set()
+ def __setstate__(self, data):
+ '''
+ Deserializer.
+ '''
+
+ class_name = data.get('class_name')
+ package = data.get('package')
+ config = data.get('config')
+ subdir = data.get('subdir')
+ aliases = data.get('aliases')
+
+ self.__init__(class_name, package, config, subdir, aliases)
+ self._extra_dirs = data.get('_extra_dirs', [])
+ self._searched_paths = data.get('_searched_paths', set())
+
+ def __getstate__(self):
+ '''
+ Serializer.
+ '''
+
+ return dict(
+ class_name = self.class_name,
+ package = self.package,
+ config = self.config,
+ subdir = self.subdir,
+ aliases = self.aliases,
+ _extra_dirs = self._extra_dirs,
+ _searched_paths = self._searched_paths,
+ )
+
def print_paths(self):
''' Returns a string suitable for printing of the search path '''
From 50542db0bed0f5be4fd06d11fea489ccbc2b8902 Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Wed, 6 May 2015 02:56:52 -0500
Subject: [PATCH 037/971] Make the default playbook name an empty string (v2)
---
lib/ansible/playbook/play.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/lib/ansible/playbook/play.py b/lib/ansible/playbook/play.py
index b247503d9c..c891571a98 100644
--- a/lib/ansible/playbook/play.py
+++ b/lib/ansible/playbook/play.py
@@ -58,7 +58,7 @@ class Play(Base, Taggable, Become):
# Connection
_gather_facts = FieldAttribute(isa='string', default='smart')
_hosts = FieldAttribute(isa='list', default=[], required=True)
- _name = FieldAttribute(isa='string', default='')
+ _name = FieldAttribute(isa='string', default='')
# Variable Attributes
_vars_files = FieldAttribute(isa='list', default=[])
From 5489d172de95a94bb92e63090202e519b2204c39 Mon Sep 17 00:00:00 2001
From: gimoh
Date: Wed, 6 May 2015 11:57:25 +0100
Subject: [PATCH 038/971] Use same interpreter for test-module and module it
runs
Default python interpreter to the same interpreter the test-module
script is executed with. This is so that the interpreter doesn't have
to be specified twice in the command when using non-default python
(e.g. ``/path/to/python ./hacking/test-module -I python=/path/to/python ...``)
---
hacking/test-module | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/hacking/test-module b/hacking/test-module
index c226f32e88..44b49b06b9 100755
--- a/hacking/test-module
+++ b/hacking/test-module
@@ -59,7 +59,8 @@ def parse():
help="path to python debugger (e.g. /usr/bin/pdb)")
parser.add_option('-I', '--interpreter', dest='interpreter',
help="path to interpreter to use for this module (e.g. ansible_python_interpreter=/usr/bin/python)",
- metavar='INTERPRETER_TYPE=INTERPRETER_PATH')
+ metavar='INTERPRETER_TYPE=INTERPRETER_PATH',
+ default='python={}'.format(sys.executable))
parser.add_option('-c', '--check', dest='check', action='store_true',
help="run the module in check mode")
options, args = parser.parse_args()
From 79fe1901f6642e9178d2ae778613f7be888d246d Mon Sep 17 00:00:00 2001
From: Toshio Kuratomi
Date: Wed, 6 May 2015 06:05:44 -0700
Subject: [PATCH 039/971] Update module pointers for v2
---
lib/ansible/modules/core | 2 +-
lib/ansible/modules/extras | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/lib/ansible/modules/core b/lib/ansible/modules/core
index 85c8a892c8..aedcd37ff6 160000
--- a/lib/ansible/modules/core
+++ b/lib/ansible/modules/core
@@ -1 +1 @@
-Subproject commit 85c8a892c80b92730831d95fa654ef6d35b0eca0
+Subproject commit aedcd37ff69e074f702ef592096f2a02448c4936
diff --git a/lib/ansible/modules/extras b/lib/ansible/modules/extras
index 2690f096a4..3d00e1c5d1 160000
--- a/lib/ansible/modules/extras
+++ b/lib/ansible/modules/extras
@@ -1 +1 @@
-Subproject commit 2690f096a47646cd17db135648def88afc40d92c
+Subproject commit 3d00e1c5d10dde12146d52dbc493cad6454756b5
From 7733dc7bb51dd1632babfbdf90e6c305cc5764a7 Mon Sep 17 00:00:00 2001
From: Toshio Kuratomi
Date: Wed, 6 May 2015 06:41:16 -0700
Subject: [PATCH 040/971] Fix for new octal syntax
---
lib/ansible/plugins/connections/paramiko_ssh.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/lib/ansible/plugins/connections/paramiko_ssh.py b/lib/ansible/plugins/connections/paramiko_ssh.py
index 01e95451b8..797eeea9e0 100644
--- a/lib/ansible/plugins/connections/paramiko_ssh.py
+++ b/lib/ansible/plugins/connections/paramiko_ssh.py
@@ -370,7 +370,7 @@ class Connection(ConnectionBase):
# the file will be moved into place rather than cleaned up.
tmp_keyfile = tempfile.NamedTemporaryFile(dir=key_dir, delete=False)
- os.chmod(tmp_keyfile.name, key_stat.st_mode & 07777)
+ os.chmod(tmp_keyfile.name, key_stat.st_mode & 0o7777)
os.chown(tmp_keyfile.name, key_stat.st_uid, key_stat.st_gid)
self._save_ssh_host_keys(tmp_keyfile.name)
From 4f28a814ae97eb81c16a90a7d217b5a301041627 Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Wed, 6 May 2015 08:46:33 -0500
Subject: [PATCH 041/971] Return a list instead of tuple when password is
specified to ssh connection plugin (v2)
---
lib/ansible/plugins/connections/ssh.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/lib/ansible/plugins/connections/ssh.py b/lib/ansible/plugins/connections/ssh.py
index 49e1e3b966..7c95cc3c0f 100644
--- a/lib/ansible/plugins/connections/ssh.py
+++ b/lib/ansible/plugins/connections/ssh.py
@@ -144,7 +144,7 @@ class Connection(ConnectionBase):
except OSError:
raise AnsibleError("to use the 'ssh' connection type with passwords, you must install the sshpass program")
(self.rfd, self.wfd) = os.pipe()
- return ("sshpass", "-d{0}".format(self.rfd))
+ return ["sshpass", "-d{0}".format(self.rfd)]
return []
def _send_password(self):
From 1152c7327af74b4fbd57b47a83833e8647295b50 Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Wed, 6 May 2015 15:18:37 -0500
Subject: [PATCH 042/971] Fix serialization bug for plugins (v2)
---
lib/ansible/plugins/__init__.py | 30 ++++++++++++++++++++----------
1 file changed, 20 insertions(+), 10 deletions(-)
diff --git a/lib/ansible/plugins/__init__.py b/lib/ansible/plugins/__init__.py
index ad18bfe09b..36b5c3d033 100644
--- a/lib/ansible/plugins/__init__.py
+++ b/lib/ansible/plugins/__init__.py
@@ -88,6 +88,9 @@ class PluginLoader:
subdir = data.get('subdir')
aliases = data.get('aliases')
+ PATH_CACHE[class_name] = data.get('PATH_CACHE')
+ PLUGIN_PATH_CACHE[class_name] = data.get('PLUGIN_PATH_CACHE')
+
self.__init__(class_name, package, config, subdir, aliases)
self._extra_dirs = data.get('_extra_dirs', [])
self._searched_paths = data.get('_searched_paths', set())
@@ -98,13 +101,15 @@ class PluginLoader:
'''
return dict(
- class_name = self.class_name,
- package = self.package,
- config = self.config,
- subdir = self.subdir,
- aliases = self.aliases,
- _extra_dirs = self._extra_dirs,
- _searched_paths = self._searched_paths,
+ class_name = self.class_name,
+ package = self.package,
+ config = self.config,
+ subdir = self.subdir,
+ aliases = self.aliases,
+ _extra_dirs = self._extra_dirs,
+ _searched_paths = self._searched_paths,
+ PATH_CACHE = PATH_CACHE[self.class_name],
+ PLUGIN_PATH_CACHE = PLUGIN_PATH_CACHE[self.class_name],
)
def print_paths(self):
@@ -258,12 +263,14 @@ class PluginLoader:
path = self.find_plugin(name)
if path is None:
return None
- elif kwargs.get('class_only', False):
- return getattr(self._module_cache[path], self.class_name)
if path not in self._module_cache:
self._module_cache[path] = imp.load_source('.'.join([self.package, name]), path)
- return getattr(self._module_cache[path], self.class_name)(*args, **kwargs)
+
+ if kwargs.get('class_only', False):
+ return getattr(self._module_cache[path], self.class_name)
+ else:
+ return getattr(self._module_cache[path], self.class_name)(*args, **kwargs)
def all(self, *args, **kwargs):
''' instantiates all plugins with the same arguments '''
@@ -275,12 +282,15 @@ class PluginLoader:
name, ext = os.path.splitext(os.path.basename(path))
if name.startswith("_"):
continue
+
if path not in self._module_cache:
self._module_cache[path] = imp.load_source('.'.join([self.package, name]), path)
+
if kwargs.get('class_only', False):
obj = getattr(self._module_cache[path], self.class_name)
else:
obj = getattr(self._module_cache[path], self.class_name)(*args, **kwargs)
+
# set extra info on the module, in case we want it later
setattr(obj, '_original_path', path)
yield obj
From cee7cd5d3b979f7481e0c7c3e42aa040193d14a7 Mon Sep 17 00:00:00 2001
From: Toshio Kuratomi
Date: Thu, 7 May 2015 08:29:04 -0700
Subject: [PATCH 043/971] Update v2 module refs
---
lib/ansible/modules/core | 2 +-
lib/ansible/modules/extras | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/lib/ansible/modules/core b/lib/ansible/modules/core
index aedcd37ff6..31b6f75570 160000
--- a/lib/ansible/modules/core
+++ b/lib/ansible/modules/core
@@ -1 +1 @@
-Subproject commit aedcd37ff69e074f702ef592096f2a02448c4936
+Subproject commit 31b6f75570de2d9c321c596e659fd5daf42e786d
diff --git a/lib/ansible/modules/extras b/lib/ansible/modules/extras
index 3d00e1c5d1..66a96ad6e2 160000
--- a/lib/ansible/modules/extras
+++ b/lib/ansible/modules/extras
@@ -1 +1 @@
-Subproject commit 3d00e1c5d10dde12146d52dbc493cad6454756b5
+Subproject commit 66a96ad6e2a93f7ed786c630cf81e996b9a50403
From 4f4df29cb0bddde5c88c9357f78c24c1ef0a0ac7 Mon Sep 17 00:00:00 2001
From: Matt Martz
Date: Wed, 6 May 2015 17:06:43 -0500
Subject: [PATCH 044/971] Add ability to specify using ssh_args in synchronize
for v2
---
lib/ansible/plugins/action/synchronize.py | 6 ++++++
1 file changed, 6 insertions(+)
diff --git a/lib/ansible/plugins/action/synchronize.py b/lib/ansible/plugins/action/synchronize.py
index 1bc64ff4d5..171bcef6e0 100644
--- a/lib/ansible/plugins/action/synchronize.py
+++ b/lib/ansible/plugins/action/synchronize.py
@@ -22,6 +22,8 @@ import os.path
from ansible.plugins.action import ActionBase
from ansible.utils.boolean import boolean
+from ansible import constants
+
class ActionModule(ActionBase):
@@ -81,6 +83,7 @@ class ActionModule(ActionBase):
src = self._task.args.get('src', None)
dest = self._task.args.get('dest', None)
+ use_ssh_args = self._task.args.pop('use_ssh_args', None)
# FIXME: this doesn't appear to be used anywhere?
local_rsync_path = task_vars.get('ansible_rsync_path')
@@ -162,6 +165,9 @@ class ActionModule(ActionBase):
if rsync_path:
self._task.args['rsync_path'] = '"%s"' % rsync_path
+ if use_ssh_args:
+ self._task.args['ssh_args'] = constants.ANSIBLE_SSH_ARGS
+
# run the module and store the result
result = self._execute_module('synchronize')
From 88e8ecb620e99948f162b920354366851d79f94f Mon Sep 17 00:00:00 2001
From: Matt Martz
Date: Thu, 7 May 2015 12:20:11 -0500
Subject: [PATCH 045/971] Actually get the synchronize action plugin to work
---
lib/ansible/plugins/action/synchronize.py | 22 +++++++++++++---------
1 file changed, 13 insertions(+), 9 deletions(-)
diff --git a/lib/ansible/plugins/action/synchronize.py b/lib/ansible/plugins/action/synchronize.py
index 171bcef6e0..c1b2f60e7f 100644
--- a/lib/ansible/plugins/action/synchronize.py
+++ b/lib/ansible/plugins/action/synchronize.py
@@ -51,7 +51,7 @@ class ActionModule(ActionBase):
path = self._get_absolute_path(path=path)
return path
- def _process_remote(self, host, task, path, user):
+ def _process_remote(self, host, path, user):
transport = self._connection_info.connection
return_data = None
if not host in ['127.0.0.1', 'localhost'] or transport != "local":
@@ -71,7 +71,7 @@ class ActionModule(ActionBase):
def run(self, tmp=None, task_vars=dict()):
''' generates params and passes them on to the rsync module '''
- original_transport = task_vars.get('ansible_connection', self._connection_info.connection)
+ original_transport = task_vars.get('ansible_connection') or self._connection_info.connection
transport_overridden = False
if task_vars.get('delegate_to') is None:
task_vars['delegate_to'] = '127.0.0.1'
@@ -79,7 +79,7 @@ class ActionModule(ActionBase):
if original_transport != 'local':
task_vars['ansible_connection'] = 'local'
transport_overridden = True
- self.runner.sudo = False
+ self._connection_info.become = False
src = self._task.args.get('src', None)
dest = self._task.args.get('dest', None)
@@ -90,14 +90,14 @@ class ActionModule(ActionBase):
# from the perspective of the rsync call the delegate is the localhost
src_host = '127.0.0.1'
- dest_host = task_vars.get('ansible_ssh_host', task_vars.get('inventory_hostname'))
+ dest_host = task_vars.get('ansible_ssh_host') or task_vars.get('inventory_hostname')
# allow ansible_ssh_host to be templated
dest_is_local = dest_host in ['127.0.0.1', 'localhost']
# CHECK FOR NON-DEFAULT SSH PORT
dest_port = self._task.args.get('dest_port')
- inv_port = task_vars.get('ansible_ssh_port', task_vars.get('inventory_hostname'))
+ inv_port = task_vars.get('ansible_ssh_port') or task_vars.get('inventory_hostname')
if inv_port != dest_port and inv_port != task_vars.get('inventory_hostname'):
dest_port = inv_port
@@ -133,17 +133,18 @@ class ActionModule(ActionBase):
user = task_vars['hostvars'][conn.delegate].get('ansible_ssh_user')
if not use_delegate or not user:
- user = task_vars.get('ansible_ssh_user', self.runner.remote_user)
+ user = task_vars.get('ansible_ssh_user') or self._connection_info.remote_user
if use_delegate:
# FIXME
- private_key = task_vars.get('ansible_ssh_private_key_file', self.runner.private_key_file)
+ private_key = task_vars.get('ansible_ssh_private_key_file') or self._connection_info.private_key_file
else:
- private_key = task_vars.get('ansible_ssh_private_key_file', self.runner.private_key_file)
+ private_key = task_vars.get('ansible_ssh_private_key_file') or self._connection_info.private_key_file
if private_key is not None:
private_key = os.path.expanduser(private_key)
-
+ self._task.args['private_key'] = private_key
+
# use the mode to define src and dest's url
if self._task.args.get('mode', 'push') == 'pull':
# src is a remote path: @, dest is a local path
@@ -154,6 +155,9 @@ class ActionModule(ActionBase):
src = self._process_origin(src_host, src, user)
dest = self._process_remote(dest_host, dest, user)
+ self._task.args['src'] = src
+ self._task.args['dest'] = dest
+
# Allow custom rsync path argument.
rsync_path = self._task.args.get('rsync_path', None)
From 8db21f99b74c4c483bf53df599db20d9257ff55f Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Thu, 7 May 2015 12:53:22 -0500
Subject: [PATCH 046/971] Set the inventory on the variable manager for the
adhoc cli usage (v2)
---
lib/ansible/cli/adhoc.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/lib/ansible/cli/adhoc.py b/lib/ansible/cli/adhoc.py
index 16c2dc9e42..f7692a1335 100644
--- a/lib/ansible/cli/adhoc.py
+++ b/lib/ansible/cli/adhoc.py
@@ -93,6 +93,7 @@ class AdHocCLI(CLI):
variable_manager = VariableManager()
inventory = Inventory(loader=loader, variable_manager=variable_manager, host_list=self.options.inventory)
+ variable_manager.set_inventory(inventory)
hosts = inventory.list_hosts(pattern)
if len(hosts) == 0:
From 198476e34545a356aeddb405ddd73ae309b9e109 Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Thu, 7 May 2015 13:06:51 -0500
Subject: [PATCH 047/971] Cleaning up some portions of synchronize action
plugin (v2)
---
lib/ansible/plugins/action/synchronize.py | 5 +----
1 file changed, 1 insertion(+), 4 deletions(-)
diff --git a/lib/ansible/plugins/action/synchronize.py b/lib/ansible/plugins/action/synchronize.py
index c1b2f60e7f..219a982cb1 100644
--- a/lib/ansible/plugins/action/synchronize.py
+++ b/lib/ansible/plugins/action/synchronize.py
@@ -96,10 +96,7 @@ class ActionModule(ActionBase):
dest_is_local = dest_host in ['127.0.0.1', 'localhost']
# CHECK FOR NON-DEFAULT SSH PORT
- dest_port = self._task.args.get('dest_port')
- inv_port = task_vars.get('ansible_ssh_port') or task_vars.get('inventory_hostname')
- if inv_port != dest_port and inv_port != task_vars.get('inventory_hostname'):
- dest_port = inv_port
+ dest_port = task_vars.get('ansible_ssh_port') or self._task.args.get('dest_port') or 22
# edge case: explicit delegate and dest_host are the same
if dest_host == task_vars.get('delegate_to'):
From 0d3e015dd105d32395995c3e583ee8e9f8fb18f1 Mon Sep 17 00:00:00 2001
From: Aleksey Zhukov
Date: Thu, 7 May 2015 22:53:10 +0300
Subject: [PATCH 048/971] Update DigitalOcean dynamic inventory to API v2
---
plugins/inventory/digital_ocean.py | 299 +++++++----------------------
1 file changed, 74 insertions(+), 225 deletions(-)
diff --git a/plugins/inventory/digital_ocean.py b/plugins/inventory/digital_ocean.py
index 1c3eccd21e..29c4856efb 100755
--- a/plugins/inventory/digital_ocean.py
+++ b/plugins/inventory/digital_ocean.py
@@ -68,10 +68,7 @@ When run against a specific host, this script returns the following variables:
usage: digital_ocean.py [-h] [--list] [--host HOST] [--all]
[--droplets] [--regions] [--images] [--sizes]
[--ssh-keys] [--domains] [--pretty]
- [--cache-path CACHE_PATH]
- [--cache-max_age CACHE_MAX_AGE]
- [--refresh-cache] [--client-id CLIENT_ID]
- [--api-key API_KEY]
+ [--api-token API_TOKEN]
Produce an Ansible Inventory file based on DigitalOcean credentials
@@ -89,16 +86,8 @@ optional arguments:
--ssh-keys List SSH keys as JSON
--domains List Domains as JSON
--pretty, -p Pretty-print results
- --cache-path CACHE_PATH
- Path to the cache files (default: .)
- --cache-max_age CACHE_MAX_AGE
- Maximum age of the cached items (default: 0)
- --refresh-cache Force refresh of cache by making API requests to
- DigitalOcean (default: False - use cache files)
- --client-id CLIENT_ID, -c CLIENT_ID
- DigitalOcean Client ID
- --api-key API_KEY, -a API_KEY
- DigitalOcean API Key
+ --api-token API_TOKEN, -a API_TOKEN
+ DigitalOcean API Token
```
'''
@@ -157,11 +146,6 @@ class DigitalOceanInventory(object):
# DigitalOceanInventory data
self.data = {} # All DigitalOcean data
self.inventory = {} # Ansible Inventory
- self.index = {} # Various indices of Droplet metadata
-
- # Define defaults
- self.cache_path = '.'
- self.cache_max_age = 0
# Read settings, environment variables, and CLI arguments
self.read_settings()
@@ -169,49 +153,40 @@ class DigitalOceanInventory(object):
self.read_cli_args()
# Verify credentials were set
- if not hasattr(self, 'client_id') or not hasattr(self, 'api_key'):
- print '''Could not find values for DigitalOcean client_id and api_key.
-They must be specified via either ini file, command line argument (--client-id and --api-key),
-or environment variables (DO_CLIENT_ID and DO_API_KEY)'''
+ if not hasattr(self, 'api_token'):
+ print '''Could not find values for DigitalOcean api_token.
+They must be specified via either ini file, command line argument (--api-token),
+or environment variables (DO_API_TOKEN)'''
sys.exit(-1)
# env command, show DigitalOcean credentials
if self.args.env:
- print "DO_CLIENT_ID=%s DO_API_KEY=%s" % (self.client_id, self.api_key)
+ print "DO_API_TOKEN=%s" % self.api_token
sys.exit(0)
- # Manage cache
- self.cache_filename = self.cache_path + "/ansible-digital_ocean.cache"
- self.cache_refreshed = False
-
- if not self.args.force_cache and self.args.refresh_cache or not self.is_cache_valid():
- self.load_all_data_from_digital_ocean()
- else:
- self.load_from_cache()
- if len(self.data) == 0:
- if self.args.force_cache:
- print '''Cache is empty and --force-cache was specified'''
- sys.exit(-1)
- self.load_all_data_from_digital_ocean()
- else:
- # We always get fresh droplets for --list, --host, --all, and --droplets
- # unless --force-cache is specified
- if not self.args.force_cache and (
- self.args.list or self.args.host or self.args.all or self.args.droplets):
- self.load_droplets_from_digital_ocean()
+ self.manager = DoManager(None, self.api_token, api_version=2)
# Pick the json_data to print based on the CLI command
- if self.args.droplets: json_data = { 'droplets': self.data['droplets'] }
- elif self.args.regions: json_data = { 'regions': self.data['regions'] }
- elif self.args.images: json_data = { 'images': self.data['images'] }
- elif self.args.sizes: json_data = { 'sizes': self.data['sizes'] }
- elif self.args.ssh_keys: json_data = { 'ssh_keys': self.data['ssh_keys'] }
- elif self.args.domains: json_data = { 'domains': self.data['domains'] }
- elif self.args.all: json_data = self.data
-
- elif self.args.host: json_data = self.load_droplet_variables_for_host()
+ if self.args.droplets:
+ json_data = self.load_from_digital_ocean('droplets')
+ elif self.args.regions:
+ json_data = self.load_from_digital_ocean('regions')
+ elif self.args.images:
+ json_data = self.load_from_digital_ocean('images')
+ elif self.args.sizes:
+ json_data = self.load_from_digital_ocean('sizes')
+ elif self.args.ssh_keys:
+ json_data = self.load_from_digital_ocean('ssh_keys')
+ elif self.args.domains:
+ json_data = self.load_from_digital_ocean('domains')
+ elif self.args.all:
+ json_data = self.load_from_digital_ocean()
+ elif self.args.host:
+ json_data = self.load_droplet_variables_for_host()
else: # '--list' this is last to make it default
- json_data = self.inventory
+ self.data = self.load_from_digital_ocean('droplets')
+ self.build_inventory()
+ json_data = self.inventory
if self.args.pretty:
print json.dumps(json_data, sort_keys=True, indent=2)
@@ -230,10 +205,8 @@ or environment variables (DO_CLIENT_ID and DO_API_KEY)'''
config.read(os.path.dirname(os.path.realpath(__file__)) + '/digital_ocean.ini')
# Credentials
- if config.has_option('digital_ocean', 'client_id'):
- self.client_id = config.get('digital_ocean', 'client_id')
- if config.has_option('digital_ocean', 'api_key'):
- self.api_key = config.get('digital_ocean', 'api_key')
+ if config.has_option('digital_ocean', 'api_token'):
+ self.api_token = config.get('digital_ocean', 'api_token')
# Cache related
if config.has_option('digital_ocean', 'cache_path'):
@@ -245,8 +218,10 @@ or environment variables (DO_CLIENT_ID and DO_API_KEY)'''
def read_environment(self):
''' Reads the settings from environment variables '''
# Setup credentials
- if os.getenv("DO_CLIENT_ID"): self.client_id = os.getenv("DO_CLIENT_ID")
- if os.getenv("DO_API_KEY"): self.api_key = os.getenv("DO_API_KEY")
+ if os.getenv("DO_API_TOKEN"):
+ self.api_token = os.getenv("DO_API_TOKEN")
+ if os.getenv("DO_API_KEY"):
+ self.api_token = os.getenv("DO_API_KEY")
def read_cli_args(self):
@@ -266,73 +241,42 @@ or environment variables (DO_CLIENT_ID and DO_API_KEY)'''
parser.add_argument('--pretty','-p', action='store_true', help='Pretty-print results')
- parser.add_argument('--cache-path', action='store', help='Path to the cache files (default: .)')
- parser.add_argument('--cache-max_age', action='store', help='Maximum age of the cached items (default: 0)')
- parser.add_argument('--force-cache', action='store_true', default=False, help='Only use data from the cache')
- parser.add_argument('--refresh-cache','-r', action='store_true', default=False, help='Force refresh of cache by making API requests to DigitalOcean (default: False - use cache files)')
-
- parser.add_argument('--env','-e', action='store_true', help='Display DO_CLIENT_ID and DO_API_KEY')
- parser.add_argument('--client-id','-c', action='store', help='DigitalOcean Client ID')
- parser.add_argument('--api-key','-a', action='store', help='DigitalOcean API Key')
+ parser.add_argument('--env','-e', action='store_true', help='Display DO_API_TOKEN')
+ parser.add_argument('--api-token','-a', action='store', help='DigitalOcean API Token')
self.args = parser.parse_args()
- if self.args.client_id: self.client_id = self.args.client_id
- if self.args.api_key: self.api_key = self.args.api_key
- if self.args.cache_path: self.cache_path = self.args.cache_path
- if self.args.cache_max_age: self.cache_max_age = self.args.cache_max_age
+ if self.args.api_token:
+ self.api_token = self.args.api_token
# Make --list default if none of the other commands are specified
- if (not self.args.droplets and not self.args.regions and not self.args.images and
- not self.args.sizes and not self.args.ssh_keys and not self.args.domains and
- not self.args.all and not self.args.host):
- self.args.list = True
+ if (not self.args.droplets and not self.args.regions and
+ not self.args.images and not self.args.sizes and
+ not self.args.ssh_keys and not self.args.domains and
+ not self.args.all and not self.args.host):
+ self.args.list = True
###########################################################################
# Data Management
###########################################################################
- def load_all_data_from_digital_ocean(self):
- ''' Use dopy to get all the information from DigitalOcean and save data in cache files '''
- manager = DoManager(self.client_id, self.api_key)
-
- self.data = {}
- self.data['droplets'] = self.sanitize_list(manager.all_active_droplets())
- self.data['regions'] = self.sanitize_list(manager.all_regions())
- self.data['images'] = self.sanitize_list(manager.all_images(filter=None))
- self.data['sizes'] = self.sanitize_list(manager.sizes())
- self.data['ssh_keys'] = self.sanitize_list(manager.all_ssh_keys())
- self.data['domains'] = self.sanitize_list(manager.all_domains())
-
- self.index = {}
- self.index['region_to_name'] = self.build_index(self.data['regions'], 'id', 'name')
- self.index['size_to_name'] = self.build_index(self.data['sizes'], 'id', 'name')
- self.index['image_to_name'] = self.build_index(self.data['images'], 'id', 'name')
- self.index['image_to_distro'] = self.build_index(self.data['images'], 'id', 'distribution')
- self.index['host_to_droplet'] = self.build_index(self.data['droplets'], 'ip_address', 'id', False)
-
- self.build_inventory()
-
- self.write_to_cache()
-
-
- def load_droplets_from_digital_ocean(self):
- ''' Use dopy to get droplet information from DigitalOcean and save data in cache files '''
- manager = DoManager(self.client_id, self.api_key)
- self.data['droplets'] = self.sanitize_list(manager.all_active_droplets())
- self.index['host_to_droplet'] = self.build_index(self.data['droplets'], 'ip_address', 'id', False)
- self.build_inventory()
- self.write_to_cache()
-
-
- def build_index(self, source_seq, key_from, key_to, use_slug=True):
- dest_dict = {}
- for item in source_seq:
- name = (use_slug and item.has_key('slug')) and item['slug'] or item[key_to]
- key = item[key_from]
- dest_dict[key] = name
- return dest_dict
+ def load_from_digital_ocean(self, resource=None):
+ '''Get JSON from DigitalOcean API'''
+ json_data = {}
+ if resource == 'droplets' or resource is None:
+ json_data['droplets'] = self.manager.all_active_droplets()
+ if resource == 'regions' or resource is None:
+ json_data['regions'] = self.manager.all_regions()
+ if resource == 'images' or resource is None:
+ json_data['images'] = self.manager.all_images(filter=None)
+ if resource == 'sizes' or resource is None:
+ json_data['sizes'] = self.manager.sizes()
+ if resource == 'ssh_keys' or resource is None:
+ json_data['ssh_keys'] = self.manager.all_ssh_keys()
+ if resource == 'domains' or resource is None:
+ json_data['domains'] = self.manager.all_domains()
+ return json_data
def build_inventory(self):
@@ -345,107 +289,27 @@ or environment variables (DO_CLIENT_ID and DO_API_KEY)'''
self.inventory[droplet['id']] = [dest]
self.push(self.inventory, droplet['name'], dest)
- self.push(self.inventory, 'region_'+droplet['region_id'], dest)
- self.push(self.inventory, 'image_' +droplet['image_id'], dest)
- self.push(self.inventory, 'size_' +droplet['size_id'], dest)
- self.push(self.inventory, 'status_'+droplet['status'], dest)
+ self.push(self.inventory, 'region_' + droplet['region']['slug'], dest)
+ self.push(self.inventory, 'image_' + str(droplet['image']['id']), dest)
+ self.push(self.inventory, 'size_' + droplet['size']['slug'], dest)
- region_name = self.index['region_to_name'].get(droplet['region_id'])
- if region_name:
- self.push(self.inventory, 'region_'+region_name, dest)
+ image_slug = droplet['image']['slug']
+ if image_slug:
+ self.push(self.inventory, 'image_' + self.to_safe(image_slug), dest)
+ else:
+ image_name = droplet['image']['name']
+ if image_name:
+ self.push(self.inventory, 'image_' + self.to_safe(image_name), dest)
- size_name = self.index['size_to_name'].get(droplet['size_id'])
- if size_name:
- self.push(self.inventory, 'size_'+size_name, dest)
-
- image_name = self.index['image_to_name'].get(droplet['image_id'])
- if image_name:
- self.push(self.inventory, 'image_'+image_name, dest)
-
- distro_name = self.index['image_to_distro'].get(droplet['image_id'])
- if distro_name:
- self.push(self.inventory, 'distro_'+distro_name, dest)
+ self.push(self.inventory, 'distro_' + self.to_safe(droplet['image']['distribution']), dest)
+ self.push(self.inventory, 'status_' + droplet['status'], dest)
def load_droplet_variables_for_host(self):
'''Generate a JSON response to a --host call'''
- host = self.to_safe(str(self.args.host))
+ host = int(self.args.host)
- if not host in self.index['host_to_droplet']:
- # try updating cache
- if not self.args.force_cache:
- self.load_all_data_from_digital_ocean()
- if not host in self.index['host_to_droplet']:
- # host might not exist anymore
- return {}
-
- droplet = None
- if self.cache_refreshed:
- for drop in self.data['droplets']:
- if drop['ip_address'] == host:
- droplet = self.sanitize_dict(drop)
- break
- else:
- # Cache wasn't refreshed this run, so hit DigitalOcean API
- manager = DoManager(self.client_id, self.api_key)
- droplet_id = self.index['host_to_droplet'][host]
- droplet = self.sanitize_dict(manager.show_droplet(droplet_id))
-
- if not droplet:
- return {}
-
- # Put all the information in a 'do_' namespace
- info = {}
- for k, v in droplet.items():
- info['do_'+k] = v
-
- # Generate user-friendly variables (i.e. not the ID's)
- if droplet.has_key('region_id'):
- info['do_region'] = self.index['region_to_name'].get(droplet['region_id'])
- if droplet.has_key('size_id'):
- info['do_size'] = self.index['size_to_name'].get(droplet['size_id'])
- if droplet.has_key('image_id'):
- info['do_image'] = self.index['image_to_name'].get(droplet['image_id'])
- info['do_distro'] = self.index['image_to_distro'].get(droplet['image_id'])
-
- return info
-
-
-
- ###########################################################################
- # Cache Management
- ###########################################################################
-
- def is_cache_valid(self):
- ''' Determines if the cache files have expired, or if it is still valid '''
- if os.path.isfile(self.cache_filename):
- mod_time = os.path.getmtime(self.cache_filename)
- current_time = time()
- if (mod_time + self.cache_max_age) > current_time:
- return True
- return False
-
-
- def load_from_cache(self):
- ''' Reads the data from the cache file and assigns it to member variables as Python Objects'''
- cache = open(self.cache_filename, 'r')
- json_data = cache.read()
- cache.close()
- data = json.loads(json_data)
-
- self.data = data['data']
- self.inventory = data['inventory']
- self.index = data['index']
-
-
- def write_to_cache(self):
- ''' Writes data in JSON format to a file '''
- data = { 'data': self.data, 'index': self.index, 'inventory': self.inventory }
- json_data = json.dumps(data, sort_keys=True, indent=2)
-
- cache = open(self.cache_filename, 'w')
- cache.write(json_data)
- cache.close()
+ return self.manager.show_droplet(host)
@@ -456,7 +320,7 @@ or environment variables (DO_CLIENT_ID and DO_API_KEY)'''
def push(self, my_dict, key, element):
''' Pushed an element onto an array that may not have been defined in the dict '''
if key in my_dict:
- my_dict[key].append(element);
+ my_dict[key].append(element)
else:
my_dict[key] = [element]
@@ -466,21 +330,6 @@ or environment variables (DO_CLIENT_ID and DO_API_KEY)'''
return re.sub("[^A-Za-z0-9\-\.]", "_", word)
- def sanitize_dict(self, d):
- new_dict = {}
- for k, v in d.items():
- if v != None:
- new_dict[self.to_safe(str(k))] = self.to_safe(str(v))
- return new_dict
-
-
- def sanitize_list(self, seq):
- new_seq = []
- for d in seq:
- new_seq.append(self.sanitize_dict(d))
- return new_seq
-
-
###########################################################################
# Run the script
From cd6d1f9221ce1b437cbe92b20b4f8fa3f5926562 Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Thu, 7 May 2015 21:14:16 -0500
Subject: [PATCH 049/971] Fix pickling errors with cache plugins (v2)
Fixes #10945
---
lib/ansible/plugins/cache/base.py | 4 ++++
lib/ansible/plugins/cache/memcached.py | 2 ++
lib/ansible/plugins/cache/memory.py | 2 ++
lib/ansible/plugins/cache/redis.py | 3 +++
4 files changed, 11 insertions(+)
diff --git a/lib/ansible/plugins/cache/base.py b/lib/ansible/plugins/cache/base.py
index 051f02d0b0..1f85aa6174 100644
--- a/lib/ansible/plugins/cache/base.py
+++ b/lib/ansible/plugins/cache/base.py
@@ -26,6 +26,9 @@ from six import add_metaclass
@add_metaclass(ABCMeta)
class BaseCacheModule:
+ def __init__(self):
+ self.__getstate__ = self.copy
+
@abstractmethod
def get(self, key):
pass
@@ -53,3 +56,4 @@ class BaseCacheModule:
@abstractmethod
def copy(self):
pass
+
diff --git a/lib/ansible/plugins/cache/memcached.py b/lib/ansible/plugins/cache/memcached.py
index e7321a5a6b..519ca776e0 100644
--- a/lib/ansible/plugins/cache/memcached.py
+++ b/lib/ansible/plugins/cache/memcached.py
@@ -113,6 +113,8 @@ class CacheModuleKeys(collections.MutableSet):
self._cache = cache
self._keyset = dict(*args, **kwargs)
+ super(CacheModule, self).__init__()
+
def __contains__(self, key):
return key in self._keyset
diff --git a/lib/ansible/plugins/cache/memory.py b/lib/ansible/plugins/cache/memory.py
index 1562836151..19591a40cf 100644
--- a/lib/ansible/plugins/cache/memory.py
+++ b/lib/ansible/plugins/cache/memory.py
@@ -24,6 +24,8 @@ class CacheModule(BaseCacheModule):
def __init__(self, *args, **kwargs):
self._cache = {}
+ super(CacheModule, self).__init__()
+
def get(self, key):
return self._cache.get(key)
diff --git a/lib/ansible/plugins/cache/redis.py b/lib/ansible/plugins/cache/redis.py
index 287c14bd2a..b7a624520a 100644
--- a/lib/ansible/plugins/cache/redis.py
+++ b/lib/ansible/plugins/cache/redis.py
@@ -51,6 +51,8 @@ class CacheModule(BaseCacheModule):
self._cache = StrictRedis(*connection)
self._keys_set = 'ansible_cache_keys'
+ super(CacheModule, self).__init__()
+
def _make_key(self, key):
return self._prefix + key
@@ -100,3 +102,4 @@ class CacheModule(BaseCacheModule):
for key in self.keys():
ret[key] = self.get(key)
return ret
+
From 0f1eb3cfc2b6eb6652d13aa4cc1055b7d726f4fb Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Thu, 7 May 2015 23:56:33 -0500
Subject: [PATCH 050/971] Better fix for serializing/deserializing cache
plugins (v2)
---
lib/ansible/plugins/cache/base.py | 3 ---
lib/ansible/plugins/cache/memcached.py | 8 ++++++--
lib/ansible/plugins/cache/memory.py | 8 ++++++--
lib/ansible/plugins/cache/redis.py | 7 +++++--
4 files changed, 17 insertions(+), 9 deletions(-)
diff --git a/lib/ansible/plugins/cache/base.py b/lib/ansible/plugins/cache/base.py
index 1f85aa6174..767964b281 100644
--- a/lib/ansible/plugins/cache/base.py
+++ b/lib/ansible/plugins/cache/base.py
@@ -26,9 +26,6 @@ from six import add_metaclass
@add_metaclass(ABCMeta)
class BaseCacheModule:
- def __init__(self):
- self.__getstate__ = self.copy
-
@abstractmethod
def get(self, key):
pass
diff --git a/lib/ansible/plugins/cache/memcached.py b/lib/ansible/plugins/cache/memcached.py
index 519ca776e0..a34855bafc 100644
--- a/lib/ansible/plugins/cache/memcached.py
+++ b/lib/ansible/plugins/cache/memcached.py
@@ -113,8 +113,6 @@ class CacheModuleKeys(collections.MutableSet):
self._cache = cache
self._keyset = dict(*args, **kwargs)
- super(CacheModule, self).__init__()
-
def __contains__(self, key):
return key in self._keyset
@@ -193,3 +191,9 @@ class CacheModule(BaseCacheModule):
def copy(self):
return self._keys.copy()
+
+ def __getstate__(self):
+ return dict()
+
+ def __setstate__(self, data):
+ self.__init__()
diff --git a/lib/ansible/plugins/cache/memory.py b/lib/ansible/plugins/cache/memory.py
index 19591a40cf..417ef20e0e 100644
--- a/lib/ansible/plugins/cache/memory.py
+++ b/lib/ansible/plugins/cache/memory.py
@@ -24,8 +24,6 @@ class CacheModule(BaseCacheModule):
def __init__(self, *args, **kwargs):
self._cache = {}
- super(CacheModule, self).__init__()
-
def get(self, key):
return self._cache.get(key)
@@ -46,3 +44,9 @@ class CacheModule(BaseCacheModule):
def copy(self):
return self._cache.copy()
+
+ def __getstate__(self):
+ return self.copy()
+
+ def __setstate__(self, data):
+ self._cache = data
diff --git a/lib/ansible/plugins/cache/redis.py b/lib/ansible/plugins/cache/redis.py
index b7a624520a..6c97f3eab8 100644
--- a/lib/ansible/plugins/cache/redis.py
+++ b/lib/ansible/plugins/cache/redis.py
@@ -51,8 +51,6 @@ class CacheModule(BaseCacheModule):
self._cache = StrictRedis(*connection)
self._keys_set = 'ansible_cache_keys'
- super(CacheModule, self).__init__()
-
def _make_key(self, key):
return self._prefix + key
@@ -103,3 +101,8 @@ class CacheModule(BaseCacheModule):
ret[key] = self.get(key)
return ret
+ def __getstate__(self):
+ return dict()
+
+ def __setstate__(self, data):
+ self.__init__()
From 56c9614e74668dc4cfc2b1de3372d6bd24a96769 Mon Sep 17 00:00:00 2001
From: Brian Coca
Date: Fri, 8 May 2015 14:25:31 -0400
Subject: [PATCH 051/971] made playbook include taggable, removed unused
conditional import
---
lib/ansible/playbook/playbook_include.py | 3 +--
1 file changed, 1 insertion(+), 2 deletions(-)
diff --git a/lib/ansible/playbook/playbook_include.py b/lib/ansible/playbook/playbook_include.py
index 5c91dd14ad..075e6dcbdf 100644
--- a/lib/ansible/playbook/playbook_include.py
+++ b/lib/ansible/playbook/playbook_include.py
@@ -25,11 +25,10 @@ from ansible.parsing.splitter import split_args, parse_kv
from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject, AnsibleMapping
from ansible.playbook.attribute import FieldAttribute
from ansible.playbook.base import Base
-from ansible.playbook.conditional import Conditional
from ansible.playbook.taggable import Taggable
from ansible.errors import AnsibleParserError
-class PlaybookInclude(Base):
+class PlaybookInclude(Base, Taggable):
_name = FieldAttribute(isa='string')
_include = FieldAttribute(isa='string')
From a0fc8bb0bd834e29a652ed7face4ca360dc6cc56 Mon Sep 17 00:00:00 2001
From: Matt Martz
Date: Fri, 8 May 2015 11:34:19 -0500
Subject: [PATCH 052/971] Testing additions and fixes
* Fix import pathing for units.mock
* Add some additional requirements
* Use compileall to test compatiblity with different python versions
---
.travis.yml | 6 ++++
setup.py | 2 +-
test-requirements.txt | 1 +
test/units/executor/test_play_iterator.py | 2 +-
test/units/playbook/test_play.py | 2 +-
test/units/playbook/test_playbook.py | 2 +-
test/units/playbook/test_role.py | 2 +-
test/units/vars/test_variable_manager.py | 2 +-
tox.ini | 36 ++++++++++++++---------
9 files changed, 35 insertions(+), 20 deletions(-)
diff --git a/.travis.yml b/.travis.yml
index 6e18e06050..e53b870597 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -3,6 +3,12 @@ language: python
env:
- TOXENV=py26
- TOXENV=py27
+addons:
+ apt:
+ sources:
+ - deadsnakes
+ packages:
+ - python2.4
install:
- pip install tox
script:
diff --git a/setup.py b/setup.py
index 3752741406..1f73836cbd 100644
--- a/setup.py
+++ b/setup.py
@@ -21,7 +21,7 @@ setup(name='ansible',
author_email='michael@ansible.com',
url='http://ansible.com/',
license='GPLv3',
- install_requires=['paramiko', 'jinja2', "PyYAML", 'setuptools', 'pycrypto >= 2.6'],
+ install_requires=['paramiko', 'jinja2', "PyYAML", 'setuptools', 'pycrypto >= 2.6', 'six'],
package_dir={ '': 'lib' },
packages=find_packages('lib'),
package_data={
diff --git a/test-requirements.txt b/test-requirements.txt
index abb61ed1e9..fe65457f37 100644
--- a/test-requirements.txt
+++ b/test-requirements.txt
@@ -7,3 +7,4 @@ mock
passlib
coverage
coveralls
+unittest2
diff --git a/test/units/executor/test_play_iterator.py b/test/units/executor/test_play_iterator.py
index 47c0352b25..2fa32c7119 100644
--- a/test/units/executor/test_play_iterator.py
+++ b/test/units/executor/test_play_iterator.py
@@ -26,7 +26,7 @@ from ansible.errors import AnsibleError, AnsibleParserError
from ansible.executor.play_iterator import PlayIterator
from ansible.playbook import Playbook
-from test.mock.loader import DictDataLoader
+from units.mock.loader import DictDataLoader
class TestPlayIterator(unittest.TestCase):
diff --git a/test/units/playbook/test_play.py b/test/units/playbook/test_play.py
index 22486f4129..637b6dbbe1 100644
--- a/test/units/playbook/test_play.py
+++ b/test/units/playbook/test_play.py
@@ -27,7 +27,7 @@ from ansible.playbook.play import Play
from ansible.playbook.role import Role
from ansible.playbook.task import Task
-from test.mock.loader import DictDataLoader
+from units.mock.loader import DictDataLoader
class TestPlay(unittest.TestCase):
diff --git a/test/units/playbook/test_playbook.py b/test/units/playbook/test_playbook.py
index dfb52dc7b1..97307c4b27 100644
--- a/test/units/playbook/test_playbook.py
+++ b/test/units/playbook/test_playbook.py
@@ -26,7 +26,7 @@ from ansible.errors import AnsibleError, AnsibleParserError
from ansible.playbook import Playbook
from ansible.vars import VariableManager
-from test.mock.loader import DictDataLoader
+from units.mock.loader import DictDataLoader
class TestPlaybook(unittest.TestCase):
diff --git a/test/units/playbook/test_role.py b/test/units/playbook/test_role.py
index d0f3708898..7aab5133da 100644
--- a/test/units/playbook/test_role.py
+++ b/test/units/playbook/test_role.py
@@ -28,7 +28,7 @@ from ansible.playbook.role import Role
from ansible.playbook.role.include import RoleInclude
from ansible.playbook.task import Task
-from test.mock.loader import DictDataLoader
+from units.mock.loader import DictDataLoader
class TestRole(unittest.TestCase):
diff --git a/test/units/vars/test_variable_manager.py b/test/units/vars/test_variable_manager.py
index f8d815eb6f..173ba1370d 100644
--- a/test/units/vars/test_variable_manager.py
+++ b/test/units/vars/test_variable_manager.py
@@ -24,7 +24,7 @@ from ansible.compat.tests.mock import patch, MagicMock
from ansible.vars import VariableManager
-from test.mock.loader import DictDataLoader
+from units.mock.loader import DictDataLoader
class TestVariableManager(unittest.TestCase):
diff --git a/tox.ini b/tox.ini
index 5440a5825c..26d80ff7d3 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,23 +1,31 @@
[tox]
-envlist = {py26,py27}-v{1}
+envlist = {py26,py27}
[testenv]
commands = make tests
deps = -r{toxinidir}/test-requirements.txt
whitelist_externals = make
-[testenv:py26-v1]
+[testenv:py26]
+commands =
+ python -m compileall -fq -x 'test|samples' .
+ python2.4 -m compileall -fq -x 'module_utils/(a10|rax|openstack|ec2|gce).py' lib/ansible/module_utils
+ make tests
+deps = -r{toxinidir}/test-requirements.txt
+whitelist_externals =
+ make
+ python2.4
-[testenv:py27-v1]
+[testenv:py27]
+commands =
+ python -m compileall -fq -x 'test|samples' .
+ make tests
+deps = -r{toxinidir}/test-requirements.txt
+whitelist_externals = make
-[testenv:py26-v2]
-deps = -r{toxinidir}/v2/test-requirements.txt
-commands = make newtests
-
-[testenv:py27-v2]
-deps = -r{toxinidir}/v2/test-requirements.txt
-commands = make newtests
-
-[testenv:py34-v2]
-deps = -r{toxinidir}/v2/test-requirements.txt
-commands = make newtests
+[testenv:py34]
+commands =
+ python -m compileall -fq -x 'lib/ansible/module_utils' lib
+ make tests
+deps = -r-r{toxinidir}/test-requirements.txt
+whitelist_externals = make
From 3a87b2727d5cf5cbedef0d68eb95a81d4f54a69d Mon Sep 17 00:00:00 2001
From: Toshio Kuratomi
Date: Fri, 8 May 2015 13:10:40 -0700
Subject: [PATCH 053/971] Fix format strings for python2.6
---
lib/ansible/parsing/vault/__init__.py | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/lib/ansible/parsing/vault/__init__.py b/lib/ansible/parsing/vault/__init__.py
index e45fddc197..40d02d3d59 100644
--- a/lib/ansible/parsing/vault/__init__.py
+++ b/lib/ansible/parsing/vault/__init__.py
@@ -102,7 +102,7 @@ class VaultLib(object):
cipher = globals()['Vault' + self.cipher_name]
this_cipher = cipher()
else:
- raise errors.AnsibleError("{} cipher could not be found".format(self.cipher_name))
+ raise errors.AnsibleError("{0} cipher could not be found".format(self.cipher_name))
"""
# combine sha + data
@@ -135,7 +135,7 @@ class VaultLib(object):
cipher = globals()['Vault' + ciphername]
this_cipher = cipher()
else:
- raise errors.AnsibleError("{} cipher could not be found".format(ciphername))
+ raise errors.AnsibleError("{0} cipher could not be found".format(ciphername))
# try to unencrypt data
data = this_cipher.decrypt(data, self.password)
@@ -379,7 +379,7 @@ class VaultAES(object):
d = d_i = b''
while len(d) < key_length + iv_length:
- text = "{}{}{}".format(d_i, password, salt)
+ text = "{0}{1}{2}".format(d_i, password, salt)
d_i = md5(to_bytes(text)).digest()
d += d_i
From 7f21f270d9ea51b352c6918a3d70a522367b7cd1 Mon Sep 17 00:00:00 2001
From: Toshio Kuratomi
Date: Fri, 8 May 2015 13:18:19 -0700
Subject: [PATCH 054/971] Be more lenient in instance check: MutableMapping is
more general than dict
---
lib/ansible/vars/__init__.py | 5 +++--
1 file changed, 3 insertions(+), 2 deletions(-)
diff --git a/lib/ansible/vars/__init__.py b/lib/ansible/vars/__init__.py
index f30d52b7a3..040c224448 100644
--- a/lib/ansible/vars/__init__.py
+++ b/lib/ansible/vars/__init__.py
@@ -22,6 +22,7 @@ __metaclass__ = type
import os
from collections import defaultdict
+from collections import MutableMapping
try:
from hashlib import sha1
@@ -73,7 +74,7 @@ class VariableManager:
def set_extra_vars(self, value):
''' ensures a clean copy of the extra_vars are used to set the value '''
- assert isinstance(value, dict)
+ assert isinstance(value, MutableMapping)
self._extra_vars = value.copy()
def set_inventory(self, inventory):
@@ -83,7 +84,7 @@ class VariableManager:
'''
Validates that both arguments are dictionaries, or an error is raised.
'''
- if not (isinstance(a, dict) and isinstance(b, dict)):
+ if not (isinstance(a, MutableMapping) and isinstance(b, MutableMapping)):
raise AnsibleError("failed to combine variables, expected dicts but got a '%s' and a '%s'" % (type(a).__name__, type(b).__name__))
def _combine_vars(self, a, b):
From f9f8af06fc241659468c8c1663dfa4aaff7f1eb8 Mon Sep 17 00:00:00 2001
From: Toshio Kuratomi
Date: Fri, 8 May 2015 13:49:10 -0700
Subject: [PATCH 055/971] Change asserts to assertIsInstance for better error
messages
---
test/units/playbook/test_block.py | 10 +++++-----
1 file changed, 5 insertions(+), 5 deletions(-)
diff --git a/test/units/playbook/test_block.py b/test/units/playbook/test_block.py
index 348681527b..2c20200226 100644
--- a/test/units/playbook/test_block.py
+++ b/test/units/playbook/test_block.py
@@ -60,18 +60,18 @@ class TestBlock(unittest.TestCase):
)
b = Block.load(ds)
self.assertEqual(len(b.block), 1)
- assert isinstance(b.block[0], Task)
+ self.assertIsInstance(b.block[0], Task)
self.assertEqual(len(b.rescue), 1)
- assert isinstance(b.rescue[0], Task)
+ self.assertIsInstance(b.rescue[0], Task)
self.assertEqual(len(b.always), 1)
- assert isinstance(b.always[0], Task)
+ self.assertIsInstance(b.always[0], Task)
# not currently used
#self.assertEqual(len(b.otherwise), 1)
- #assert isinstance(b.otherwise[0], Task)
+ #self.assertIsInstance(b.otherwise[0], Task)
def test_load_implicit_block(self):
ds = [dict(action='foo')]
b = Block.load(ds)
self.assertEqual(len(b.block), 1)
- assert isinstance(b.block[0], Task)
+ self.assertIsInstance(b.block[0], Task)
From d1977dad23fb3d9ae4095066c03ede44ed11d656 Mon Sep 17 00:00:00 2001
From: Brian Coca
Date: Fri, 8 May 2015 19:19:03 -0400
Subject: [PATCH 056/971] started implementing syntax check
---
lib/ansible/cli/playbook.py | 2 +-
lib/ansible/executor/playbook_executor.py | 9 ++++++++-
2 files changed, 9 insertions(+), 2 deletions(-)
diff --git a/lib/ansible/cli/playbook.py b/lib/ansible/cli/playbook.py
index eb60bacbd2..69e411dc87 100644
--- a/lib/ansible/cli/playbook.py
+++ b/lib/ansible/cli/playbook.py
@@ -87,7 +87,7 @@ class PlaybookCLI(CLI):
passwords = {}
# don't deal with privilege escalation or passwords when we don't need to
- if not self.options.listhosts and not self.options.listtasks and not self.options.listtags:
+ if not self.options.listhosts and not self.options.listtasks and not self.options.listtags and not self.options.syntax:
self.normalize_become_options()
(sshpass, becomepass) = self.ask_passwords()
passwords = { 'conn_pass': sshpass, 'become_pass': becomepass }
diff --git a/lib/ansible/executor/playbook_executor.py b/lib/ansible/executor/playbook_executor.py
index 5d72ef15bd..5e339e4031 100644
--- a/lib/ansible/executor/playbook_executor.py
+++ b/lib/ansible/executor/playbook_executor.py
@@ -46,7 +46,7 @@ class PlaybookExecutor:
self._options = options
self.passwords = passwords
- if options.listhosts or options.listtasks or options.listtags:
+ if options.listhosts or options.listtasks or options.listtags or options.syntax:
self._tqm = None
else:
self._tqm = TaskQueueManager(inventory=inventory, variable_manager=variable_manager, loader=loader, display=display, options=options, passwords=self.passwords)
@@ -85,6 +85,9 @@ class PlaybookExecutor:
new_play = play.copy()
new_play.post_validate(templar)
+ if self._options.syntax:
+ continue
+
if self._tqm is None:
# we are just doing a listing
@@ -147,6 +150,10 @@ class PlaybookExecutor:
if self._tqm is not None:
self._cleanup()
+ if self._options.syntax:
+ self.display.display("No issues encountered")
+ return result
+
# FIXME: this stat summary stuff should be cleaned up and moved
# to a new method, if it even belongs here...
self._display.banner("PLAY RECAP")
From d2782f0d84c4e344c18f647b1ac3bfd903d75366 Mon Sep 17 00:00:00 2001
From: Monty Taylor
Date: Mon, 11 May 2015 08:06:21 -0400
Subject: [PATCH 057/971] Remove unneeded required_one_of for openstack
We're being too strict - there is a third possibility, which is that a
user will have defined the OS_* environment variables and expect them to
pass through.
---
lib/ansible/module_utils/openstack.py | 6 +-----
lib/ansible/utils/module_docs_fragments/openstack.py | 7 +++++--
v2/ansible/module_utils/openstack.py | 6 +-----
3 files changed, 7 insertions(+), 12 deletions(-)
diff --git a/lib/ansible/module_utils/openstack.py b/lib/ansible/module_utils/openstack.py
index b58cc53428..4069449144 100644
--- a/lib/ansible/module_utils/openstack.py
+++ b/lib/ansible/module_utils/openstack.py
@@ -93,11 +93,7 @@ def openstack_full_argument_spec(**kwargs):
def openstack_module_kwargs(**kwargs):
- ret = dict(
- required_one_of=[
- ['cloud', 'auth'],
- ],
- )
+ ret = {}
for key in ('mutually_exclusive', 'required_together', 'required_one_of'):
if key in kwargs:
if key in ret:
diff --git a/lib/ansible/utils/module_docs_fragments/openstack.py b/lib/ansible/utils/module_docs_fragments/openstack.py
index 7e42841d6d..3dff423772 100644
--- a/lib/ansible/utils/module_docs_fragments/openstack.py
+++ b/lib/ansible/utils/module_docs_fragments/openstack.py
@@ -23,7 +23,9 @@ class ModuleDocFragment(object):
options:
cloud:
description:
- - Named cloud to operate against. Provides default values for I(auth) and I(auth_plugin)
+ - Named cloud to operate against. Provides default values for I(auth) and
+ I(auth_type). This parameter is not needed if I(auth) is provided or if
+ OpenStack OS_* environment variables are present.
required: false
auth:
description:
@@ -32,7 +34,8 @@ options:
I(auth_url), I(username), I(password), I(project_name) and any
information about domains if the cloud supports them. For other plugins,
this param will need to contain whatever parameters that auth plugin
- requires. This parameter is not needed if a named cloud is provided.
+ requires. This parameter is not needed if a named cloud is provided or
+ OpenStack OS_* environment variables are present.
required: false
auth_type:
description:
diff --git a/v2/ansible/module_utils/openstack.py b/v2/ansible/module_utils/openstack.py
index b58cc53428..4069449144 100644
--- a/v2/ansible/module_utils/openstack.py
+++ b/v2/ansible/module_utils/openstack.py
@@ -93,11 +93,7 @@ def openstack_full_argument_spec(**kwargs):
def openstack_module_kwargs(**kwargs):
- ret = dict(
- required_one_of=[
- ['cloud', 'auth'],
- ],
- )
+ ret = {}
for key in ('mutually_exclusive', 'required_together', 'required_one_of'):
if key in kwargs:
if key in ret:
From cd14d73be8ae29ade22a9e7bad9bef1fccd1c67b Mon Sep 17 00:00:00 2001
From: Monty Taylor
Date: Mon, 11 May 2015 08:10:37 -0400
Subject: [PATCH 058/971] Add defaults and a link to os-client-config docs
---
lib/ansible/utils/module_docs_fragments/openstack.py | 6 +++++-
1 file changed, 5 insertions(+), 1 deletion(-)
diff --git a/lib/ansible/utils/module_docs_fragments/openstack.py b/lib/ansible/utils/module_docs_fragments/openstack.py
index 3dff423772..99897eee6d 100644
--- a/lib/ansible/utils/module_docs_fragments/openstack.py
+++ b/lib/ansible/utils/module_docs_fragments/openstack.py
@@ -80,14 +80,17 @@ options:
- A path to a CA Cert bundle that can be used as part of verifying
SSL API requests.
required: false
+ default: None
cert:
description:
- A path to a client certificate to use as part of the SSL transaction
required: false
+ default: None
key:
description:
- A path to a client key to use as part of the SSL transaction
required: false
+ default: None
endpoint_type:
description:
- Endpoint URL type to fetch from the service catalog.
@@ -103,5 +106,6 @@ notes:
can come from a yaml config file in /etc/ansible/openstack.yaml,
/etc/openstack/clouds.yaml or ~/.config/openstack/clouds.yaml, then from
standard environment variables, then finally by explicit parameters in
- plays.
+ plays. More information can be found at
+ U(http://docs.openstack.org/developer/os-client-config)
'''
From f141ec967141972e43849458419a39177daecc40 Mon Sep 17 00:00:00 2001
From: Toshio Kuratomi
Date: Mon, 11 May 2015 09:28:19 -0700
Subject: [PATCH 059/971] Update v2 module refs
---
lib/ansible/modules/core | 2 +-
lib/ansible/modules/extras | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/lib/ansible/modules/core b/lib/ansible/modules/core
index 31b6f75570..42abf85be7 160000
--- a/lib/ansible/modules/core
+++ b/lib/ansible/modules/core
@@ -1 +1 @@
-Subproject commit 31b6f75570de2d9c321c596e659fd5daf42e786d
+Subproject commit 42abf85be7acbd95f6904a313c34a9495e99ca14
diff --git a/lib/ansible/modules/extras b/lib/ansible/modules/extras
index 66a96ad6e2..6bf4558df8 160000
--- a/lib/ansible/modules/extras
+++ b/lib/ansible/modules/extras
@@ -1 +1 @@
-Subproject commit 66a96ad6e2a93f7ed786c630cf81e996b9a50403
+Subproject commit 6bf4558df8c61ae457dc7e5be58855d2931b607f
From daf533c80e934b219a40373042b513cd00aac695 Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Mon, 11 May 2015 11:22:41 -0500
Subject: [PATCH 060/971] V2 fixes
* PluginLoader class will now be more selective about loading some
plugin classes, if a required base class is specified (used to avoid
loading v1 plugins that have changed significantly in their apis)
* Added ability for the connection info class to read values from a
given hosts variables, to support "magic" variables
* Added some more magic variables to the VariableManager output
* Fixed a bug in the ActionBase class, where the module configuration
code was not correctly handling unicode
---
lib/ansible/executor/connection_info.py | 27 +++++++++++++++++-
lib/ansible/executor/process/worker.py | 2 +-
lib/ansible/plugins/__init__.py | 38 +++++++++++++++++--------
lib/ansible/plugins/action/__init__.py | 34 ++++++++++------------
lib/ansible/vars/__init__.py | 10 +++++--
5 files changed, 75 insertions(+), 36 deletions(-)
diff --git a/lib/ansible/executor/connection_info.py b/lib/ansible/executor/connection_info.py
index 9e91cd09ea..bf78cf63a5 100644
--- a/lib/ansible/executor/connection_info.py
+++ b/lib/ansible/executor/connection_info.py
@@ -29,6 +29,20 @@ from ansible.errors import AnsibleError
__all__ = ['ConnectionInformation']
+# the magic variable mapping dictionary below is used to translate
+# host/inventory variables to fields in the ConnectionInformation
+# object. The dictionary values are tuples, to account for aliases
+# in variable names.
+
+MAGIC_VARIABLE_MAPPING = dict(
+ connection = ('ansible_connection',),
+ remote_addr = ('ansible_ssh_host', 'ansible_host'),
+ remote_user = ('ansible_ssh_user', 'ansible_user'),
+ port = ('ansible_ssh_port', 'ansible_port'),
+ password = ('ansible_ssh_pass', 'ansible_password'),
+ private_key_file = ('ansible_ssh_private_key_file', 'ansible_private_key_file'),
+ shell = ('ansible_shell_type',),
+)
class ConnectionInformation:
@@ -51,6 +65,7 @@ class ConnectionInformation:
self.port = None
self.private_key_file = C.DEFAULT_PRIVATE_KEY_FILE
self.timeout = C.DEFAULT_TIMEOUT
+ self.shell = None
# privilege escalation
self.become = None
@@ -170,7 +185,7 @@ class ConnectionInformation:
else:
setattr(self, field, value)
- def set_task_override(self, task):
+ def set_task_and_host_override(self, task, host):
'''
Sets attributes from the task if they are set, which will override
those from the play.
@@ -179,12 +194,22 @@ class ConnectionInformation:
new_info = ConnectionInformation()
new_info.copy(self)
+ # loop through a subset of attributes on the task object and set
+ # connection fields based on their values
for attr in ('connection', 'remote_user', 'become', 'become_user', 'become_pass', 'become_method', 'environment', 'no_log'):
if hasattr(task, attr):
attr_val = getattr(task, attr)
if attr_val:
setattr(new_info, attr, attr_val)
+ # finally, use the MAGIC_VARIABLE_MAPPING dictionary to update this
+ # connection info object with 'magic' variables from inventory
+ variables = host.get_vars()
+ for (attr, variable_names) in MAGIC_VARIABLE_MAPPING.iteritems():
+ for variable_name in variable_names:
+ if variable_name in variables:
+ setattr(new_info, attr, variables[variable_name])
+
return new_info
def make_become_cmd(self, cmd, executable, become_settings=None):
diff --git a/lib/ansible/executor/process/worker.py b/lib/ansible/executor/process/worker.py
index d8e8960fe4..e1488ebcb1 100644
--- a/lib/ansible/executor/process/worker.py
+++ b/lib/ansible/executor/process/worker.py
@@ -111,7 +111,7 @@ class WorkerProcess(multiprocessing.Process):
# apply the given task's information to the connection info,
# which may override some fields already set by the play or
# the options specified on the command line
- new_connection_info = connection_info.set_task_override(task)
+ new_connection_info = connection_info.set_task_and_host_override(task=task, host=host)
# execute the task and build a TaskResult from the result
debug("running TaskExecutor() for %s/%s" % (host, task))
diff --git a/lib/ansible/plugins/__init__.py b/lib/ansible/plugins/__init__.py
index 36b5c3d033..8d23ae796c 100644
--- a/lib/ansible/plugins/__init__.py
+++ b/lib/ansible/plugins/__init__.py
@@ -55,9 +55,10 @@ class PluginLoader:
The first match is used.
'''
- def __init__(self, class_name, package, config, subdir, aliases={}):
+ def __init__(self, class_name, package, config, subdir, aliases={}, required_base_class=None):
self.class_name = class_name
+ self.base_class = required_base_class
self.package = package
self.config = config
self.subdir = subdir
@@ -87,11 +88,12 @@ class PluginLoader:
config = data.get('config')
subdir = data.get('subdir')
aliases = data.get('aliases')
+ base_class = data.get('base_class')
PATH_CACHE[class_name] = data.get('PATH_CACHE')
PLUGIN_PATH_CACHE[class_name] = data.get('PLUGIN_PATH_CACHE')
- self.__init__(class_name, package, config, subdir, aliases)
+ self.__init__(class_name, package, config, subdir, aliases, base_class)
self._extra_dirs = data.get('_extra_dirs', [])
self._searched_paths = data.get('_searched_paths', set())
@@ -102,6 +104,7 @@ class PluginLoader:
return dict(
class_name = self.class_name,
+ base_class = self.base_class,
package = self.package,
config = self.config,
subdir = self.subdir,
@@ -268,9 +271,13 @@ class PluginLoader:
self._module_cache[path] = imp.load_source('.'.join([self.package, name]), path)
if kwargs.get('class_only', False):
- return getattr(self._module_cache[path], self.class_name)
+ obj = getattr(self._module_cache[path], self.class_name)
else:
- return getattr(self._module_cache[path], self.class_name)(*args, **kwargs)
+ obj = getattr(self._module_cache[path], self.class_name)(*args, **kwargs)
+ if self.base_class and self.base_class not in [base.__name__ for base in obj.__class__.__bases__]:
+ return None
+
+ return obj
def all(self, *args, **kwargs):
''' instantiates all plugins with the same arguments '''
@@ -291,6 +298,9 @@ class PluginLoader:
else:
obj = getattr(self._module_cache[path], self.class_name)(*args, **kwargs)
+ if self.base_class and self.base_class not in [base.__name__ for base in obj.__class__.__bases__]:
+ continue
+
# set extra info on the module, in case we want it later
setattr(obj, '_original_path', path)
yield obj
@@ -299,21 +309,22 @@ action_loader = PluginLoader(
'ActionModule',
'ansible.plugins.action',
C.DEFAULT_ACTION_PLUGIN_PATH,
- 'action_plugins'
+ 'action_plugins',
+ required_base_class='ActionBase',
)
cache_loader = PluginLoader(
'CacheModule',
'ansible.plugins.cache',
C.DEFAULT_CACHE_PLUGIN_PATH,
- 'cache_plugins'
+ 'cache_plugins',
)
callback_loader = PluginLoader(
'CallbackModule',
'ansible.plugins.callback',
C.DEFAULT_CALLBACK_PLUGIN_PATH,
- 'callback_plugins'
+ 'callback_plugins',
)
connection_loader = PluginLoader(
@@ -321,7 +332,8 @@ connection_loader = PluginLoader(
'ansible.plugins.connections',
C.DEFAULT_CONNECTION_PLUGIN_PATH,
'connection_plugins',
- aliases={'paramiko': 'paramiko_ssh'}
+ aliases={'paramiko': 'paramiko_ssh'},
+ required_base_class='ConnectionBase',
)
shell_loader = PluginLoader(
@@ -335,28 +347,29 @@ module_loader = PluginLoader(
'',
'ansible.modules',
C.DEFAULT_MODULE_PATH,
- 'library'
+ 'library',
)
lookup_loader = PluginLoader(
'LookupModule',
'ansible.plugins.lookup',
C.DEFAULT_LOOKUP_PLUGIN_PATH,
- 'lookup_plugins'
+ 'lookup_plugins',
+ required_base_class='LookupBase',
)
vars_loader = PluginLoader(
'VarsModule',
'ansible.plugins.vars',
C.DEFAULT_VARS_PLUGIN_PATH,
- 'vars_plugins'
+ 'vars_plugins',
)
filter_loader = PluginLoader(
'FilterModule',
'ansible.plugins.filter',
C.DEFAULT_FILTER_PLUGIN_PATH,
- 'filter_plugins'
+ 'filter_plugins',
)
fragment_loader = PluginLoader(
@@ -371,4 +384,5 @@ strategy_loader = PluginLoader(
'ansible.plugins.strategies',
None,
'strategy_plugins',
+ required_base_class='StrategyBase',
)
diff --git a/lib/ansible/plugins/action/__init__.py b/lib/ansible/plugins/action/__init__.py
index 83c129687e..d6861118b2 100644
--- a/lib/ansible/plugins/action/__init__.py
+++ b/lib/ansible/plugins/action/__init__.py
@@ -34,6 +34,7 @@ from ansible.parsing.utils.jsonify import jsonify
from ansible.plugins import shell_loader
from ansible.utils.debug import debug
+from ansible.utils.unicode import to_bytes
class ActionBase:
@@ -51,21 +52,21 @@ class ActionBase:
self._loader = loader
self._templar = templar
self._shared_loader_obj = shared_loader_obj
- self._shell = self.get_shell()
+
+ # load the shell plugin for this action/connection
+ if self._connection_info.shell:
+ shell_type = self._connection_info.shell
+ elif hasattr(connection, '_shell'):
+ shell_type = getattr(connection, '_shell')
+ else:
+ shell_type = os.path.basename(C.DEFAULT_EXECUTABLE)
+
+ self._shell = shell_loader.get(shell_type)
+ if not self._shell:
+ raise AnsibleError("Invalid shell type specified (%s), or the plugin for that shell type is missing." % shell_type)
self._supports_check_mode = True
- def get_shell(self):
-
- if hasattr(self._connection, '_shell'):
- shell_plugin = getattr(self._connection, '_shell', '')
- else:
- shell_plugin = shell_loader.get(os.path.basename(C.DEFAULT_EXECUTABLE))
- if shell_plugin is None:
- shell_plugin = shell_loader.get('sh')
-
- return shell_plugin
-
def _configure_module(self, module_name, module_args):
'''
Handles the loading and templating of the module code through the
@@ -201,18 +202,13 @@ class ActionBase:
Copies the module data out to the temporary module path.
'''
- if type(data) == dict:
+ if isinstance(data, dict):
data = jsonify(data)
afd, afile = tempfile.mkstemp()
afo = os.fdopen(afd, 'w')
try:
- # FIXME: is this still necessary?
- #if not isinstance(data, unicode):
- # #ensure the data is valid UTF-8
- # data = data.decode('utf-8')
- #else:
- # data = data.encode('utf-8')
+ data = to_bytes(data, errors='strict')
afo.write(data)
except Exception as e:
#raise AnsibleError("failure encoding into utf-8: %s" % str(e))
diff --git a/lib/ansible/vars/__init__.py b/lib/ansible/vars/__init__.py
index 040c224448..4cf10709b9 100644
--- a/lib/ansible/vars/__init__.py
+++ b/lib/ansible/vars/__init__.py
@@ -212,9 +212,13 @@ class VariableManager:
# FIXME: make sure all special vars are here
# Finally, we create special vars
- if host and self._inventory is not None:
- hostvars = HostVars(vars_manager=self, inventory=self._inventory, loader=loader)
- all_vars['hostvars'] = hostvars
+
+ if host:
+ all_vars['groups'] = [group.name for group in host.get_groups()]
+
+ if self._inventory is not None:
+ hostvars = HostVars(vars_manager=self, inventory=self._inventory, loader=loader)
+ all_vars['hostvars'] = hostvars
if self._inventory is not None:
all_vars['inventory_dir'] = self._inventory.basedir()
From 7b1c6fbab906eba6056f6c573f4b54f8e099d9f2 Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Mon, 11 May 2015 12:48:03 -0500
Subject: [PATCH 061/971] Fix playbook includes so tags are obeyed (v2)
---
lib/ansible/playbook/playbook_include.py | 5 +++--
samples/included_playbook.yml | 6 ++++++
samples/test_playbook.include | 2 ++
3 files changed, 11 insertions(+), 2 deletions(-)
create mode 100644 samples/included_playbook.yml
create mode 100644 samples/test_playbook.include
diff --git a/lib/ansible/playbook/playbook_include.py b/lib/ansible/playbook/playbook_include.py
index 075e6dcbdf..1f4bddd4a3 100644
--- a/lib/ansible/playbook/playbook_include.py
+++ b/lib/ansible/playbook/playbook_include.py
@@ -61,10 +61,11 @@ class PlaybookInclude(Base, Taggable):
pb._load_playbook_data(file_name=file_name, variable_manager=variable_manager)
- # finally, playbook includes can specify a list of variables, which are simply
- # used to update the vars of each play in the playbook
+ # finally, update each loaded playbook entry with any variables specified
+ # on the included playbook and/or any tags which may have been set
for entry in pb._entries:
entry.vars.update(new_obj.vars)
+ entry.tags = list(set(entry.tags).union(new_obj.tags))
return pb
diff --git a/samples/included_playbook.yml b/samples/included_playbook.yml
new file mode 100644
index 0000000000..d56e9c68f7
--- /dev/null
+++ b/samples/included_playbook.yml
@@ -0,0 +1,6 @@
+- hosts: localhost
+ gather_facts: no
+ tags:
+ - included
+ tasks:
+ - debug: msg="incuded playbook, variable is {{a}}"
diff --git a/samples/test_playbook.include b/samples/test_playbook.include
new file mode 100644
index 0000000000..95c1a82147
--- /dev/null
+++ b/samples/test_playbook.include
@@ -0,0 +1,2 @@
+- include: included_playbook.yml a=1
+ tags: include
From fd321355d69cf2450549f44bfe1572d6f75a0dac Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Mon, 11 May 2015 14:04:17 -0500
Subject: [PATCH 062/971] Adding 'role_path' to VariableManager "magic"
variables (v2)
---
lib/ansible/vars/__init__.py | 5 ++++-
1 file changed, 4 insertions(+), 1 deletion(-)
diff --git a/lib/ansible/vars/__init__.py b/lib/ansible/vars/__init__.py
index 4cf10709b9..736b9529ef 100644
--- a/lib/ansible/vars/__init__.py
+++ b/lib/ansible/vars/__init__.py
@@ -212,7 +212,6 @@ class VariableManager:
# FIXME: make sure all special vars are here
# Finally, we create special vars
-
if host:
all_vars['groups'] = [group.name for group in host.get_groups()]
@@ -220,6 +219,10 @@ class VariableManager:
hostvars = HostVars(vars_manager=self, inventory=self._inventory, loader=loader)
all_vars['hostvars'] = hostvars
+ if task:
+ if task._role:
+ all_vars['role_path'] = task._role._role_path
+
if self._inventory is not None:
all_vars['inventory_dir'] = self._inventory.basedir()
From 8fdf9ae59b5c760c72451b0e863ec7c35a7c01cf Mon Sep 17 00:00:00 2001
From: Brian Coca
Date: Tue, 12 May 2015 12:18:55 -0400
Subject: [PATCH 063/971] moved module_doc_fragments to v2
---
{v1 => lib}/ansible/utils/module_docs_fragments/__init__.py | 0
{v1 => lib}/ansible/utils/module_docs_fragments/aws.py | 0
{v1 => lib}/ansible/utils/module_docs_fragments/cloudstack.py | 0
{v1 => lib}/ansible/utils/module_docs_fragments/files.py | 0
{v1 => lib}/ansible/utils/module_docs_fragments/openstack.py | 0
{v1 => lib}/ansible/utils/module_docs_fragments/rackspace.py | 0
{lib => v1}/ansible/utils/module_docs_fragments | 0
7 files changed, 0 insertions(+), 0 deletions(-)
rename {v1 => lib}/ansible/utils/module_docs_fragments/__init__.py (100%)
rename {v1 => lib}/ansible/utils/module_docs_fragments/aws.py (100%)
rename {v1 => lib}/ansible/utils/module_docs_fragments/cloudstack.py (100%)
rename {v1 => lib}/ansible/utils/module_docs_fragments/files.py (100%)
rename {v1 => lib}/ansible/utils/module_docs_fragments/openstack.py (100%)
rename {v1 => lib}/ansible/utils/module_docs_fragments/rackspace.py (100%)
rename {lib => v1}/ansible/utils/module_docs_fragments (100%)
diff --git a/v1/ansible/utils/module_docs_fragments/__init__.py b/lib/ansible/utils/module_docs_fragments/__init__.py
similarity index 100%
rename from v1/ansible/utils/module_docs_fragments/__init__.py
rename to lib/ansible/utils/module_docs_fragments/__init__.py
diff --git a/v1/ansible/utils/module_docs_fragments/aws.py b/lib/ansible/utils/module_docs_fragments/aws.py
similarity index 100%
rename from v1/ansible/utils/module_docs_fragments/aws.py
rename to lib/ansible/utils/module_docs_fragments/aws.py
diff --git a/v1/ansible/utils/module_docs_fragments/cloudstack.py b/lib/ansible/utils/module_docs_fragments/cloudstack.py
similarity index 100%
rename from v1/ansible/utils/module_docs_fragments/cloudstack.py
rename to lib/ansible/utils/module_docs_fragments/cloudstack.py
diff --git a/v1/ansible/utils/module_docs_fragments/files.py b/lib/ansible/utils/module_docs_fragments/files.py
similarity index 100%
rename from v1/ansible/utils/module_docs_fragments/files.py
rename to lib/ansible/utils/module_docs_fragments/files.py
diff --git a/v1/ansible/utils/module_docs_fragments/openstack.py b/lib/ansible/utils/module_docs_fragments/openstack.py
similarity index 100%
rename from v1/ansible/utils/module_docs_fragments/openstack.py
rename to lib/ansible/utils/module_docs_fragments/openstack.py
diff --git a/v1/ansible/utils/module_docs_fragments/rackspace.py b/lib/ansible/utils/module_docs_fragments/rackspace.py
similarity index 100%
rename from v1/ansible/utils/module_docs_fragments/rackspace.py
rename to lib/ansible/utils/module_docs_fragments/rackspace.py
diff --git a/lib/ansible/utils/module_docs_fragments b/v1/ansible/utils/module_docs_fragments
similarity index 100%
rename from lib/ansible/utils/module_docs_fragments
rename to v1/ansible/utils/module_docs_fragments
From 12a800c0e7586f98d91ebc5e41d7cf7eadd6bb69 Mon Sep 17 00:00:00 2001
From: Brian Coca
Date: Tue, 12 May 2015 12:24:32 -0400
Subject: [PATCH 064/971] fixed less opts issue
---
lib/ansible/cli/__init__.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/lib/ansible/cli/__init__.py b/lib/ansible/cli/__init__.py
index 4a7f5bbacc..98b524b44a 100644
--- a/lib/ansible/cli/__init__.py
+++ b/lib/ansible/cli/__init__.py
@@ -426,7 +426,7 @@ class CLI(object):
def pager_pipe(text, cmd):
''' pipe text through a pager '''
if 'LESS' not in os.environ:
- os.environ['LESS'] = LESS_OPTS
+ os.environ['LESS'] = self.LESS_OPTS
try:
cmd = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, stdout=sys.stdout)
cmd.communicate(input=text)
From 09605706d9b8495f76b346616cf7bc4568e3e01f Mon Sep 17 00:00:00 2001
From: Brian Coca
Date: Tue, 12 May 2015 12:26:20 -0400
Subject: [PATCH 065/971] relly fix it this time
---
lib/ansible/cli/__init__.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/lib/ansible/cli/__init__.py b/lib/ansible/cli/__init__.py
index 98b524b44a..1e997f58d3 100644
--- a/lib/ansible/cli/__init__.py
+++ b/lib/ansible/cli/__init__.py
@@ -426,7 +426,7 @@ class CLI(object):
def pager_pipe(text, cmd):
''' pipe text through a pager '''
if 'LESS' not in os.environ:
- os.environ['LESS'] = self.LESS_OPTS
+ os.environ['LESS'] = CLI.LESS_OPTS
try:
cmd = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, stdout=sys.stdout)
cmd.communicate(input=text)
From 3697d6582fef5d01e3f2c5da8b3aa35ad5f35500 Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Tue, 12 May 2015 11:10:22 -0500
Subject: [PATCH 066/971] Connection plugins no longer auto-connect (v2)
Also fixed a bug in ssh.py where an attribute was being inappropriately
initialized in the _connect() method instead of __init__()
---
lib/ansible/plugins/connections/__init__.py | 2 --
lib/ansible/plugins/connections/ssh.py | 2 +-
2 files changed, 1 insertion(+), 3 deletions(-)
diff --git a/lib/ansible/plugins/connections/__init__.py b/lib/ansible/plugins/connections/__init__.py
index 5558f5ba86..70807b08f6 100644
--- a/lib/ansible/plugins/connections/__init__.py
+++ b/lib/ansible/plugins/connections/__init__.py
@@ -54,8 +54,6 @@ class ConnectionBase:
if not hasattr(self, '_connected'):
self._connected = False
- self._connect()
-
def _become_method_supported(self, become_method):
''' Checks if the current class supports this privilege escalation method '''
diff --git a/lib/ansible/plugins/connections/ssh.py b/lib/ansible/plugins/connections/ssh.py
index 7c95cc3c0f..426dc6b49d 100644
--- a/lib/ansible/plugins/connections/ssh.py
+++ b/lib/ansible/plugins/connections/ssh.py
@@ -41,6 +41,7 @@ class Connection(ConnectionBase):
def __init__(self, *args, **kwargs):
# SSH connection specific init stuff
+ self._common_args = []
self.HASHED_KEY_MAGIC = "|1|"
self._has_pipelining = True
@@ -65,7 +66,6 @@ class Connection(ConnectionBase):
if self._connected:
return self
- self._common_args = []
extra_args = C.ANSIBLE_SSH_ARGS
if extra_args is not None:
# make sure there is no empty string added as this can produce weird errors
From 361eb291467258f4fbc29569510916bf7b253bc2 Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Tue, 12 May 2015 11:30:08 -0500
Subject: [PATCH 067/971] Also make task_executor connect explicitly (v2)
---
lib/ansible/executor/task_executor.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/lib/ansible/executor/task_executor.py b/lib/ansible/executor/task_executor.py
index 6d62eea68b..9bc875b02a 100644
--- a/lib/ansible/executor/task_executor.py
+++ b/lib/ansible/executor/task_executor.py
@@ -210,6 +210,7 @@ class TaskExecutor:
# get the connection and the handler for this execution
self._connection = self._get_connection(variables)
self._connection.set_host_overrides(host=self._host)
+ self._connection._connect()
self._handler = self._get_action_handler(connection=self._connection, templar=templar)
From 1ca8cb8553c07dab5baf5c95646316970d29006b Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Tue, 12 May 2015 12:24:57 -0500
Subject: [PATCH 068/971] Fixing up v2 unit tests
---
lib/ansible/playbook/block.py | 2 +-
test/units/executor/test_play_iterator.py | 22 ++++++++++++++++------
test/units/playbook/test_play.py | 6 +++---
test/units/vars/test_variable_manager.py | 1 +
4 files changed, 21 insertions(+), 10 deletions(-)
diff --git a/lib/ansible/playbook/block.py b/lib/ansible/playbook/block.py
index d65f787127..1bbc06183f 100644
--- a/lib/ansible/playbook/block.py
+++ b/lib/ansible/playbook/block.py
@@ -66,7 +66,7 @@ class Block(Base, Become, Conditional, Taggable):
return all_vars
@staticmethod
- def load(data, play, parent_block=None, role=None, task_include=None, use_handlers=False, variable_manager=None, loader=None):
+ def load(data, play=None, parent_block=None, role=None, task_include=None, use_handlers=False, variable_manager=None, loader=None):
b = Block(play=play, parent_block=parent_block, role=role, task_include=task_include, use_handlers=use_handlers)
return b.load_data(data, variable_manager=variable_manager, loader=loader)
diff --git a/test/units/executor/test_play_iterator.py b/test/units/executor/test_play_iterator.py
index 2fa32c7119..7f8ed4d681 100644
--- a/test/units/executor/test_play_iterator.py
+++ b/test/units/executor/test_play_iterator.py
@@ -23,6 +23,7 @@ from ansible.compat.tests import unittest
from ansible.compat.tests.mock import patch, MagicMock
from ansible.errors import AnsibleError, AnsibleParserError
+from ansible.executor.connection_info import ConnectionInformation
from ansible.executor.play_iterator import PlayIterator
from ansible.playbook import Playbook
@@ -67,19 +68,28 @@ class TestPlayIterator(unittest.TestCase):
inventory.get_hosts.return_value = hosts
inventory.filter_hosts.return_value = hosts
- itr = PlayIterator(inventory, p._entries[0])
- task = itr.get_next_task_for_host(hosts[0])
+ connection_info = ConnectionInformation(play=p._entries[0])
+
+ itr = PlayIterator(
+ inventory=inventory,
+ play=p._entries[0],
+ connection_info=connection_info,
+ all_vars=dict(),
+ )
+
+ (host_state, task) = itr.get_next_task_for_host(hosts[0])
print(task)
self.assertIsNotNone(task)
- task = itr.get_next_task_for_host(hosts[0])
+ (host_state, task) = itr.get_next_task_for_host(hosts[0])
print(task)
self.assertIsNotNone(task)
- task = itr.get_next_task_for_host(hosts[0])
+ (host_state, task) = itr.get_next_task_for_host(hosts[0])
print(task)
self.assertIsNotNone(task)
- task = itr.get_next_task_for_host(hosts[0])
+ (host_state, task) = itr.get_next_task_for_host(hosts[0])
print(task)
self.assertIsNotNone(task)
- task = itr.get_next_task_for_host(hosts[0])
+ (host_state, task) = itr.get_next_task_for_host(hosts[0])
print(task)
self.assertIsNone(task)
+
diff --git a/test/units/playbook/test_play.py b/test/units/playbook/test_play.py
index 637b6dbbe1..561da36272 100644
--- a/test/units/playbook/test_play.py
+++ b/test/units/playbook/test_play.py
@@ -23,9 +23,9 @@ from ansible.compat.tests import unittest
from ansible.compat.tests.mock import patch, MagicMock
from ansible.errors import AnsibleError, AnsibleParserError
+from ansible.playbook.block import Block
from ansible.playbook.play import Play
from ansible.playbook.role import Role
-from ansible.playbook.task import Task
from units.mock.loader import DictDataLoader
@@ -39,7 +39,7 @@ class TestPlay(unittest.TestCase):
def test_empty_play(self):
p = Play.load(dict())
- self.assertEqual(str(p), "PLAY: ")
+ self.assertEqual(str(p), "PLAY: ")
def test_basic_play(self):
p = Play.load(dict(
@@ -129,4 +129,4 @@ class TestPlay(unittest.TestCase):
tasks = p.compile()
self.assertEqual(len(tasks), 1)
- self.assertIsInstance(tasks[0], Task)
+ self.assertIsInstance(tasks[0], Block)
diff --git a/test/units/vars/test_variable_manager.py b/test/units/vars/test_variable_manager.py
index 173ba1370d..9abed8f948 100644
--- a/test/units/vars/test_variable_manager.py
+++ b/test/units/vars/test_variable_manager.py
@@ -137,6 +137,7 @@ class TestVariableManager(unittest.TestCase):
fake_loader = DictDataLoader({})
mock_task = MagicMock()
+ mock_task._role = None
mock_task.get_vars.return_value = dict(foo="bar")
v = VariableManager()
From 9b646dea41e68c3b68c2b16d87c604b38990bfd4 Mon Sep 17 00:00:00 2001
From: Serge van Ginderachter
Date: Tue, 12 May 2015 12:51:35 -0500
Subject: [PATCH 069/971] Add optional 'skip_missing' flag to subelements
---
docsite/rst/playbooks_loops.rst | 33 ++++++++-
lib/ansible/plugins/lookup/subelements.py | 72 +++++++++++++++----
.../roles/test_iterators/tasks/main.yml | 35 ++++++++-
.../roles/test_iterators/vars/main.yml | 34 +++++++++
4 files changed, 157 insertions(+), 17 deletions(-)
diff --git a/docsite/rst/playbooks_loops.rst b/docsite/rst/playbooks_loops.rst
index e71c81cefc..5456791f61 100644
--- a/docsite/rst/playbooks_loops.rst
+++ b/docsite/rst/playbooks_loops.rst
@@ -147,9 +147,26 @@ How might that be accomplished? Let's assume you had the following defined and
authorized:
- /tmp/alice/onekey.pub
- /tmp/alice/twokey.pub
+ mysql:
+ password: mysql-password
+ hosts:
+ - "%"
+ - "127.0.0.1"
+ - "::1"
+ - "localhost"
+ privs:
+ - "*.*:SELECT"
+ - "DB1.*:ALL"
- name: bob
authorized:
- /tmp/bob/id_rsa.pub
+ mysql:
+ password: other-mysql-password
+ hosts:
+ - "db1"
+ privs:
+ - "*.*:SELECT"
+ - "DB2.*:ALL"
It might happen like so::
@@ -161,9 +178,23 @@ It might happen like so::
- users
- authorized
-Subelements walks a list of hashes (aka dictionaries) and then traverses a list with a given key inside of those
+Given the mysql hosts and privs subkey lists, you can also iterate over a list in a nested subkey::
+
+ - name: Setup MySQL users
+ mysql_user: name={{ item.0.user }} password={{ item.0.mysql.password }} host={{ item.1 }} priv={{ item.0.mysql.privs | join('/') }}
+ with_subelements:
+ - users
+ - mysql.hosts
+
+Subelements walks a list of hashes (aka dictionaries) and then traverses a list with a given (nested sub-)key inside of those
records.
+Optionally, you can add a third element to the subelements list, that holds a
+dictionary of flags. Currently you can add the 'skip_missing' flag. If set to
+True, the lookup plugin will skip the lists items that do not contain the given
+subkey. Without this flag, or if that flag is set to False, the plugin will
+yield an error and complain about the missing subkey.
+
The authorized_key pattern is exactly where it comes up most.
.. _looping_over_integer_sequences:
diff --git a/lib/ansible/plugins/lookup/subelements.py b/lib/ansible/plugins/lookup/subelements.py
index 09a2ca306a..0636387be6 100644
--- a/lib/ansible/plugins/lookup/subelements.py
+++ b/lib/ansible/plugins/lookup/subelements.py
@@ -20,40 +20,82 @@ __metaclass__ = type
from ansible.errors import *
from ansible.plugins.lookup import LookupBase
from ansible.utils.listify import listify_lookup_plugin_terms
+from ansible.utils.boolean import boolean
+
+FLAGS = ('skip_missing',)
+
class LookupModule(LookupBase):
def run(self, terms, variables, **kwargs):
- terms[0] = listify_lookup_plugin_terms(terms[0], variables, loader=self._loader)
+ def _raise_terms_error(msg=""):
+ raise errors.AnsibleError(
+ "subelements lookup expects a list of two or three items, "
+ + msg)
+ terms = listify_lookup_plugin_terms(terms, self.basedir, inject)
+ terms[0] = listify_lookup_plugin_terms(terms[0], self.basedir, inject)
- if not isinstance(terms, list) or not len(terms) == 2:
- raise AnsibleError("subelements lookup expects a list of two items, first a dict or a list, and second a string")
+ # check lookup terms - check number of terms
+ if not isinstance(terms, list) or not 2 <= len(terms) <= 3:
+ _raise_terms_error()
- if isinstance(terms[0], dict): # convert to list:
- if terms[0].get('skipped',False) != False:
+ # first term should be a list (or dict), second a string holding the subkey
+ if not isinstance(terms[0], (list, dict)) or not isinstance(terms[1], basestring):
+ _raise_terms_error("first a dict or a list, second a string pointing to the subkey")
+ subelements = terms[1].split(".")
+
+ if isinstance(terms[0], dict): # convert to list:
+ if terms[0].get('skipped', False) is not False:
# the registered result was completely skipped
return []
elementlist = []
for key in terms[0].iterkeys():
elementlist.append(terms[0][key])
- else:
+ else:
elementlist = terms[0]
- subelement = terms[1]
+ # check for optional flags in third term
+ flags = {}
+ if len(terms) == 3:
+ flags = terms[2]
+ if not isinstance(flags, dict) and not all([isinstance(key, basestring) and key in FLAGS for key in flags]):
+ _raise_terms_error("the optional third item must be a dict with flags %s" % FLAGS)
+ # build_items
ret = []
for item0 in elementlist:
if not isinstance(item0, dict):
- raise AnsibleError("subelements lookup expects a dictionary, got '%s'" %item0)
- if item0.get('skipped', False) != False:
+ raise errors.AnsibleError("subelements lookup expects a dictionary, got '%s'" % item0)
+ if item0.get('skipped', False) is not False:
# this particular item is to be skipped
- continue
- if not subelement in item0:
- raise AnsibleError("could not find '%s' key in iterated item '%s'" % (subelement, item0))
- if not isinstance(item0[subelement], list):
- raise AnsibleError("the key %s should point to a list, got '%s'" % (subelement, item0[subelement]))
- sublist = item0.pop(subelement, [])
+ continue
+
+ skip_missing = boolean(flags.get('skip_missing', False))
+ subvalue = item0
+ lastsubkey = False
+ sublist = []
+ for subkey in subelements:
+ if subkey == subelements[-1]:
+ lastsubkey = True
+ if not subkey in subvalue:
+ if skip_missing:
+ continue
+ else:
+ raise errors.AnsibleError("could not find '%s' key in iterated item '%s'" % (subkey, subvalue))
+ if not lastsubkey:
+ if not isinstance(subvalue[subkey], dict):
+ if skip_missing:
+ continue
+ else:
+ raise errors.AnsibleError("the key %s should point to a dictionary, got '%s'" % (subkey, subvalue[subkey]))
+ else:
+ subvalue = subvalue[subkey]
+ else: # lastsubkey
+ if not isinstance(subvalue[subkey], list):
+ raise errors.AnsibleError("the key %s should point to a list, got '%s'" % (subkey, subvalue[subkey]))
+ else:
+ sublist = subvalue.pop(subkey, [])
for item1 in sublist:
ret.append((item0, item1))
diff --git a/test/integration/roles/test_iterators/tasks/main.yml b/test/integration/roles/test_iterators/tasks/main.yml
index c95eaff3da..931e304582 100644
--- a/test/integration/roles/test_iterators/tasks/main.yml
+++ b/test/integration/roles/test_iterators/tasks/main.yml
@@ -39,7 +39,7 @@
set_fact: "{{ item.0 + item.1 }}=x"
with_nested:
- [ 'a', 'b' ]
- - [ 'c', 'd' ]
+ - [ 'c', 'd' ]
- debug: var=ac
- debug: var=ad
@@ -97,6 +97,39 @@
- "_ye == 'e'"
- "_yf == 'f'"
+- name: test with_subelements in subkeys
+ set_fact: "{{ '_'+ item.0.id + item.1 }}={{ item.1 }}"
+ with_subelements:
+ - element_data
+ - the.sub.key.list
+
+- name: verify with_subelements in subkeys results
+ assert:
+ that:
+ - "_xq == 'q'"
+ - "_xr == 'r'"
+ - "_yi == 'i'"
+ - "_yo == 'o'"
+
+- name: test with_subelements with missing key or subkey
+ set_fact: "{{ '_'+ item.0.id + item.1 }}={{ item.1 }}"
+ with_subelements:
+ - element_data_missing
+ - the.sub.key.list
+ - skip_missing: yes
+ register: _subelements_missing_subkeys
+
+- debug: var=_subelements_missing_subkeys.skipped
+- debug: var=_subelements_missing_subkeys.results|length
+- name: verify with_subelements in subkeys results
+ assert:
+ that:
+ - _subelements_missing_subkeys.skipped is not defined
+ - _subelements_missing_subkeys.results|length == 2
+ - "_xk == 'k'"
+ - "_xl == 'l'"
+
+
# WITH_TOGETHER
- name: test with_together
diff --git a/test/integration/roles/test_iterators/vars/main.yml b/test/integration/roles/test_iterators/vars/main.yml
index cd0078c9a9..f7ef50f57a 100644
--- a/test/integration/roles/test_iterators/vars/main.yml
+++ b/test/integration/roles/test_iterators/vars/main.yml
@@ -3,7 +3,41 @@ element_data:
the_list:
- "f"
- "d"
+ the:
+ sub:
+ key:
+ list:
+ - "q"
+ - "r"
- id: y
the_list:
- "e"
- "f"
+ the:
+ sub:
+ key:
+ list:
+ - "i"
+ - "o"
+element_data_missing:
+ - id: x
+ the_list:
+ - "f"
+ - "d"
+ the:
+ sub:
+ key:
+ list:
+ - "k"
+ - "l"
+ - id: y
+ the_list:
+ - "f"
+ - "d"
+ - id: z
+ the_list:
+ - "e"
+ - "f"
+ the:
+ sub:
+ key:
From d0d0e9933f7a515bbb2c951ef106e3006fc29bb7 Mon Sep 17 00:00:00 2001
From: Toshio Kuratomi
Date: Tue, 12 May 2015 11:03:11 -0700
Subject: [PATCH 070/971] Update module refs in v2
---
lib/ansible/modules/core | 2 +-
lib/ansible/modules/extras | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/lib/ansible/modules/core b/lib/ansible/modules/core
index 42abf85be7..2a6a79c367 160000
--- a/lib/ansible/modules/core
+++ b/lib/ansible/modules/core
@@ -1 +1 @@
-Subproject commit 42abf85be7acbd95f6904a313c34a9495e99ca14
+Subproject commit 2a6a79c3675b56bf3a171feb1f310689c01e894e
diff --git a/lib/ansible/modules/extras b/lib/ansible/modules/extras
index 6bf4558df8..8afc822d0c 160000
--- a/lib/ansible/modules/extras
+++ b/lib/ansible/modules/extras
@@ -1 +1 @@
-Subproject commit 6bf4558df8c61ae457dc7e5be58855d2931b607f
+Subproject commit 8afc822d0c6b89eee710cf989612a3d2c137cb3c
From b03b7892f8ca3f62371863da22542b38fdb5d3be Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Tue, 12 May 2015 13:08:46 -0500
Subject: [PATCH 071/971] Fix method of exiting task loop (v2)
---
lib/ansible/plugins/strategies/linear.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/lib/ansible/plugins/strategies/linear.py b/lib/ansible/plugins/strategies/linear.py
index bd510dc557..f1efadd547 100644
--- a/lib/ansible/plugins/strategies/linear.py
+++ b/lib/ansible/plugins/strategies/linear.py
@@ -226,7 +226,7 @@ class StrategyModule(StrategyBase):
# FIXME: this should also be moved to the base class in a method
included_files = []
for res in host_results:
- if res.is_failed():
+ if res._host in self._tqm._failed_hosts:
return 1
if res._task.action == 'include':
From dcb54d9657882638a1ccd661d83d8400d9d47499 Mon Sep 17 00:00:00 2001
From: Jan Losinski
Date: Tue, 12 May 2015 18:43:16 +0200
Subject: [PATCH 072/971] Add integration test to verify #10073
In issue #10073 a misbehaviour in literal handling for inline lookup
arguments that can cause unexpected behaviur was reported. This
integration testcase reproduce the problem.
After applying pull request #10991 the issue is fixed and the test
passes.
Signed-off-by: Jan Losinski
---
.../roles/test_lookups/tasks/main.yml | 23 +++++++++++++++++++
1 file changed, 23 insertions(+)
diff --git a/test/integration/roles/test_lookups/tasks/main.yml b/test/integration/roles/test_lookups/tasks/main.yml
index 8440ff5772..f9970f70a2 100644
--- a/test/integration/roles/test_lookups/tasks/main.yml
+++ b/test/integration/roles/test_lookups/tasks/main.yml
@@ -129,3 +129,26 @@
debug: msg={{item}}
with_items: things2
+
+# BUG #10073 nested template handling
+
+- name: set variable that clashes
+ set_fact:
+ LOGNAME: foobar
+
+
+- name: get LOGNAME environment var value
+ shell: echo {{ '$LOGNAME' }}
+ register: known_var_value
+
+- name: do the lookup for env LOGNAME
+ set_fact:
+ test_val: "{{ lookup('env', 'LOGNAME') }}"
+
+- debug: var=test_val
+
+- name: compare values
+ assert:
+ that:
+ - "test_val == known_var_value.stdout"
+
From 4d999f8fe014e3fd11f9fe2146f3c99f1e355e48 Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Tue, 12 May 2015 15:08:35 -0500
Subject: [PATCH 073/971] Fix logic error in parent attribute retrieval for
blocks/roles (v2)
---
lib/ansible/playbook/block.py | 9 +++++++--
1 file changed, 7 insertions(+), 2 deletions(-)
diff --git a/lib/ansible/playbook/block.py b/lib/ansible/playbook/block.py
index 1bbc06183f..a82aae1e67 100644
--- a/lib/ansible/playbook/block.py
+++ b/lib/ansible/playbook/block.py
@@ -274,15 +274,20 @@ class Block(Base, Become, Conditional, Taggable):
value = parent_value
if self._role and (not value or extend):
parent_value = getattr(self._role, attr)
+ if extend:
+ value = self._extend_value(value, parent_value)
+ else:
+ value = parent_value
+
if len(self._dep_chain) and (not value or extend):
reverse_dep_chain = self._dep_chain[:]
reverse_dep_chain.reverse()
for dep in reverse_dep_chain:
dep_value = getattr(dep, attr)
if extend:
- value = self._extend_value(value, parent_value)
+ value = self._extend_value(value, dep_value)
else:
- value = parent_value
+ value = dep_value
if value and not extend:
break
From 830225d9c14b002babb9b8d10a3e1d7be31a97bd Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Tue, 12 May 2015 15:09:03 -0500
Subject: [PATCH 074/971] Fix errors in subelements lookup plugin and
associated tests (v2)
---
lib/ansible/plugins/lookup/subelements.py | 4 ++--
test/integration/roles/test_iterators/tasks/main.yml | 2 +-
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/lib/ansible/plugins/lookup/subelements.py b/lib/ansible/plugins/lookup/subelements.py
index 0636387be6..b934a053eb 100644
--- a/lib/ansible/plugins/lookup/subelements.py
+++ b/lib/ansible/plugins/lookup/subelements.py
@@ -33,8 +33,8 @@ class LookupModule(LookupBase):
raise errors.AnsibleError(
"subelements lookup expects a list of two or three items, "
+ msg)
- terms = listify_lookup_plugin_terms(terms, self.basedir, inject)
- terms[0] = listify_lookup_plugin_terms(terms[0], self.basedir, inject)
+ terms = listify_lookup_plugin_terms(terms, variables, loader=self._loader)
+ terms[0] = listify_lookup_plugin_terms(terms[0], variables, loader=self._loader)
# check lookup terms - check number of terms
if not isinstance(terms, list) or not 2 <= len(terms) <= 3:
diff --git a/test/integration/roles/test_iterators/tasks/main.yml b/test/integration/roles/test_iterators/tasks/main.yml
index 931e304582..539ac2a4e7 100644
--- a/test/integration/roles/test_iterators/tasks/main.yml
+++ b/test/integration/roles/test_iterators/tasks/main.yml
@@ -119,7 +119,7 @@
- skip_missing: yes
register: _subelements_missing_subkeys
-- debug: var=_subelements_missing_subkeys.skipped
+- debug: var=_subelements_missing_subkeys
- debug: var=_subelements_missing_subkeys.results|length
- name: verify with_subelements in subkeys results
assert:
From 079fca27a20aefef17d3b572f6934c3d1d4e0040 Mon Sep 17 00:00:00 2001
From: Toshio Kuratomi
Date: Wed, 13 May 2015 06:57:04 -0700
Subject: [PATCH 075/971] Update module refs for v2
---
lib/ansible/modules/core | 2 +-
lib/ansible/modules/extras | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/lib/ansible/modules/core b/lib/ansible/modules/core
index 2a6a79c367..46a5531893 160000
--- a/lib/ansible/modules/core
+++ b/lib/ansible/modules/core
@@ -1 +1 @@
-Subproject commit 2a6a79c3675b56bf3a171feb1f310689c01e894e
+Subproject commit 46a553189331dcbe2017aa47345c1c10640263bc
diff --git a/lib/ansible/modules/extras b/lib/ansible/modules/extras
index 8afc822d0c..aa86c5ff90 160000
--- a/lib/ansible/modules/extras
+++ b/lib/ansible/modules/extras
@@ -1 +1 @@
-Subproject commit 8afc822d0c6b89eee710cf989612a3d2c137cb3c
+Subproject commit aa86c5ff9010a5201c8ee5ffd2b0045abfaba899
From 3861597696e3504c78eb4f08172682c4816eca7d Mon Sep 17 00:00:00 2001
From: Aleksey Zhukov
Date: Wed, 13 May 2015 18:12:48 +0300
Subject: [PATCH 076/971] Bring back cache
---
plugins/inventory/digital_ocean.ini | 9 +-
plugins/inventory/digital_ocean.py | 156 ++++++++++++++++++++++------
2 files changed, 129 insertions(+), 36 deletions(-)
diff --git a/plugins/inventory/digital_ocean.ini b/plugins/inventory/digital_ocean.ini
index c4e3fe2141..021899731c 100644
--- a/plugins/inventory/digital_ocean.ini
+++ b/plugins/inventory/digital_ocean.ini
@@ -3,12 +3,11 @@
[digital_ocean]
-# The module needs your DigitalOcean Client ID and API Key.
-# These may also be specified on the command line via --client-id and --api-key
-# or via the environment variables DO_CLIENT_ID and DO_API_KEY
+# The module needs your DigitalOcean API Token.
+# It may also be specified on the command line via --api-token
+# or via the environment variables DO_API_TOKEN or DO_API_KEY
#
-#client_id = abcdefg123456
-#api_key = 123456abcdefg
+#api_token = 123456abcdefg
# API calls to DigitalOcean may be slow. For this reason, we cache the results
diff --git a/plugins/inventory/digital_ocean.py b/plugins/inventory/digital_ocean.py
index 29c4856efb..9bfb184d57 100755
--- a/plugins/inventory/digital_ocean.py
+++ b/plugins/inventory/digital_ocean.py
@@ -24,12 +24,12 @@ found. You can force this script to use the cache with --force-cache.
Configuration is read from `digital_ocean.ini`, then from environment variables,
then and command-line arguments.
-Most notably, the DigitalOcean Client ID and API Key must be specified. They
-can be specified in the INI file or with the following environment variables:
- export DO_CLIENT_ID='DO123' DO_API_KEY='abc123'
+Most notably, the DigitalOcean API Token must be specified. It can be specified
+in the INI file or with the following environment variables:
+ export DO_API_TOKEN='abc123' or
+ export DO_API_KEY='abc123'
-Alternatively, they can be passed on the command-line with --client-id and
---api-key.
+Alternatively, it can be passed on the command-line with --api-token.
If you specify DigitalOcean credentials in the INI file, a handy way to
get them into your environment (e.g., to use the digital_ocean module)
@@ -43,31 +43,40 @@ The following groups are generated from --list:
- image_ID
- image_NAME
- distro_NAME (distribution NAME from image)
- - region_ID
- region_NAME
- - size_ID
- size_NAME
- status_STATUS
When run against a specific host, this script returns the following variables:
+ - do_backup_ids
- do_created_at
- - do_distroy
+ - do_disk
+ - do_features - list
- do_id
- - do_image
- - do_image_id
+ - do_image - object
- do_ip_address
+ - do_kernel - object
+ - do_locked
+ - de_memory
- do_name
- - do_region
- - do_region_id
- - do_size
- - do_size_id
+ - do_networks - object
+ - do_next_backup_window
+ - do_region - object
+ - do_size - object
+ - do_size_slug
+ - do_snapshot_ids - list
- do_status
+ - do_vcpus
-----
```
usage: digital_ocean.py [-h] [--list] [--host HOST] [--all]
[--droplets] [--regions] [--images] [--sizes]
[--ssh-keys] [--domains] [--pretty]
+ [--cache-path CACHE_PATH]
+ [--cache-max_age CACHE_MAX_AGE]
+ [--force-cache]
+ [--refresh-cache]
[--api-token API_TOKEN]
Produce an Ansible Inventory file based on DigitalOcean credentials
@@ -86,6 +95,13 @@ optional arguments:
--ssh-keys List SSH keys as JSON
--domains List Domains as JSON
--pretty, -p Pretty-print results
+ --cache-path CACHE_PATH
+ Path to the cache files (default: .)
+ --cache-max_age CACHE_MAX_AGE
+ Maximum age of the cached items (default: 0)
+ --force-cache Only use data from the cache
+ --refresh-cache Force refresh of cache by making API requests to
+ DigitalOcean (default: False - use cache files)
--api-token API_TOKEN, -a API_TOKEN
DigitalOcean API Token
```
@@ -147,6 +163,10 @@ class DigitalOceanInventory(object):
self.data = {} # All DigitalOcean data
self.inventory = {} # Ansible Inventory
+ # Define defaults
+ self.cache_path = '.'
+ self.cache_max_age = 0
+
# Read settings, environment variables, and CLI arguments
self.read_settings()
self.read_environment()
@@ -164,27 +184,45 @@ or environment variables (DO_API_TOKEN)'''
print "DO_API_TOKEN=%s" % self.api_token
sys.exit(0)
+ # Manage cache
+ self.cache_filename = self.cache_path + "/ansible-digital_ocean.cache"
+ self.cache_refreshed = False
+
+ if self.is_cache_valid:
+ self.load_from_cache()
+ if len(self.data) == 0:
+ if self.args.force_cache:
+ print '''Cache is empty and --force-cache was specified'''
+ sys.exit(-1)
+
self.manager = DoManager(None, self.api_token, api_version=2)
# Pick the json_data to print based on the CLI command
if self.args.droplets:
- json_data = self.load_from_digital_ocean('droplets')
+ self.load_from_digital_ocean('droplets')
+ json_data = {'droplets': self.data['droplets']}
elif self.args.regions:
- json_data = self.load_from_digital_ocean('regions')
+ self.load_from_digital_ocean('regions')
+ json_data = {'regions': self.data['regions']}
elif self.args.images:
- json_data = self.load_from_digital_ocean('images')
+ self.load_from_digital_ocean('images')
+ json_data = {'images': self.data['images']}
elif self.args.sizes:
- json_data = self.load_from_digital_ocean('sizes')
+ self.load_from_digital_ocean('sizes')
+ json_data = {'sizes': self.data['sizes']}
elif self.args.ssh_keys:
- json_data = self.load_from_digital_ocean('ssh_keys')
+ self.load_from_digital_ocean('ssh_keys')
+ json_data = {'ssh_keys': self.data['ssh_keys']}
elif self.args.domains:
- json_data = self.load_from_digital_ocean('domains')
+ self.load_from_digital_ocean('domains')
+ json_data = {'domains': self.data['domains']}
elif self.args.all:
- json_data = self.load_from_digital_ocean()
+ self.load_from_digital_ocean()
+ json_data = self.data
elif self.args.host:
json_data = self.load_droplet_variables_for_host()
else: # '--list' this is last to make it default
- self.data = self.load_from_digital_ocean('droplets')
+ self.load_from_digital_ocean('droplets')
self.build_inventory()
json_data = self.inventory
@@ -241,6 +279,12 @@ or environment variables (DO_API_TOKEN)'''
parser.add_argument('--pretty','-p', action='store_true', help='Pretty-print results')
+ parser.add_argument('--cache-path', action='store', help='Path to the cache files (default: .)')
+ parser.add_argument('--cache-max_age', action='store', help='Maximum age of the cached items (default: 0)')
+ parser.add_argument('--force-cache', action='store_true', default=False, help='Only use data from the cache')
+ parser.add_argument('--refresh-cache','-r', action='store_true', default=False,
+ help='Force refresh of cache by making API requests to DigitalOcean (default: False - use cache files)')
+
parser.add_argument('--env','-e', action='store_true', help='Display DO_API_TOKEN')
parser.add_argument('--api-token','-a', action='store', help='DigitalOcean API Token')
@@ -263,20 +307,25 @@ or environment variables (DO_API_TOKEN)'''
def load_from_digital_ocean(self, resource=None):
'''Get JSON from DigitalOcean API'''
- json_data = {}
+ if self.args.force_cache:
+ return
+ if self.args.refresh_cache:
+ resource=None
+
if resource == 'droplets' or resource is None:
- json_data['droplets'] = self.manager.all_active_droplets()
+ self.data['droplets'] = self.manager.all_active_droplets()
if resource == 'regions' or resource is None:
- json_data['regions'] = self.manager.all_regions()
+ self.data['regions'] = self.manager.all_regions()
if resource == 'images' or resource is None:
- json_data['images'] = self.manager.all_images(filter=None)
+ self.data['images'] = self.manager.all_images(filter=None)
if resource == 'sizes' or resource is None:
- json_data['sizes'] = self.manager.sizes()
+ self.data['sizes'] = self.manager.sizes()
if resource == 'ssh_keys' or resource is None:
- json_data['ssh_keys'] = self.manager.all_ssh_keys()
+ self.data['ssh_keys'] = self.manager.all_ssh_keys()
if resource == 'domains' or resource is None:
- json_data['domains'] = self.manager.all_domains()
- return json_data
+ self.data['domains'] = self.manager.all_domains()
+
+ self.write_to_cache()
def build_inventory(self):
@@ -309,8 +358,53 @@ or environment variables (DO_API_TOKEN)'''
'''Generate a JSON response to a --host call'''
host = int(self.args.host)
- return self.manager.show_droplet(host)
+ droplet = self.manager.show_droplet(host)
+ # Put all the information in a 'do_' namespace
+ info = {}
+ for k, v in droplet.items():
+ info['do_'+k] = v
+
+ return {'droplet': info}
+
+
+
+ ###########################################################################
+ # Cache Management
+ ###########################################################################
+
+ def is_cache_valid(self):
+ ''' Determines if the cache files have expired, or if it is still valid '''
+ if os.path.isfile(self.cache_filename):
+ mod_time = os.path.getmtime(self.cache_filename)
+ current_time = time()
+ if (mod_time + self.cache_max_age) > current_time:
+ return True
+ return False
+
+
+ def load_from_cache(self):
+ ''' Reads the data from the cache file and assigns it to member variables as Python Objects'''
+ try:
+ cache = open(self.cache_filename, 'r')
+ json_data = cache.read()
+ cache.close()
+ data = json.loads(json_data)
+ except IOError:
+ data = {'data': {}, 'inventory': {}}
+
+ self.data = data['data']
+ self.inventory = data['inventory']
+
+
+ def write_to_cache(self):
+ ''' Writes data in JSON format to a file '''
+ data = { 'data': self.data, 'inventory': self.inventory }
+ json_data = json.dumps(data, sort_keys=True, indent=2)
+
+ cache = open(self.cache_filename, 'w')
+ cache.write(json_data)
+ cache.close()
###########################################################################
From b85ce3883451e20c7869dce39d795ba6cf62ed08 Mon Sep 17 00:00:00 2001
From: Brian Coca
Date: Wed, 13 May 2015 11:15:04 -0400
Subject: [PATCH 077/971] slight changes to error handling to align with v1
---
bin/ansible | 18 ++++++++++++++----
lib/ansible/cli/adhoc.py | 2 +-
2 files changed, 15 insertions(+), 5 deletions(-)
diff --git a/bin/ansible b/bin/ansible
index 467dd505a2..12ad89fcff 100755
--- a/bin/ansible
+++ b/bin/ansible
@@ -35,7 +35,7 @@ except Exception:
import os
import sys
-from ansible.errors import AnsibleError, AnsibleOptionsError
+from ansible.errors import AnsibleError, AnsibleOptionsError, AnsibleParserError
from ansible.utils.display import Display
########################################################
@@ -70,10 +70,20 @@ if __name__ == '__main__':
except AnsibleOptionsError as e:
cli.parser.print_help()
display.display(str(e), stderr=True, color='red')
- sys.exit(1)
+ sys.exit(5)
+ except AnsibleParserError as e:
+ display.display(str(e), stderr=True, color='red')
+ sys.exit(4)
+# TQM takes care of these, but leaving comment to reserve the exit codes
+# except AnsibleHostUnreachable as e:
+# display.display(str(e), stderr=True, color='red')
+# sys.exit(3)
+# except AnsibleHostFailed as e:
+# display.display(str(e), stderr=True, color='red')
+# sys.exit(2)
except AnsibleError as e:
display.display(str(e), stderr=True, color='red')
- sys.exit(2)
+ sys.exit(1)
except KeyboardInterrupt:
display.error("interrupted")
- sys.exit(4)
+ sys.exit(99)
diff --git a/lib/ansible/cli/adhoc.py b/lib/ansible/cli/adhoc.py
index f7692a1335..9a055e5e62 100644
--- a/lib/ansible/cli/adhoc.py
+++ b/lib/ansible/cli/adhoc.py
@@ -105,7 +105,7 @@ class AdHocCLI(CLI):
return 0
if self.options.module_name in C.MODULE_REQUIRE_ARGS and not self.options.module_args:
- raise AnsibleError("No argument passed to %s module" % self.options.module_name)
+ raise AnsibleOptionsError("No argument passed to %s module" % self.options.module_name)
#TODO: implement async support
#if self.options.seconds:
From b94e2a1f4ee1631d311f6943f6653c391d5022de Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Wed, 13 May 2015 11:27:12 -0500
Subject: [PATCH 078/971] Fixing bugs related to parsing and fixing up parsing
integration tests (v2)
---
lib/ansible/parsing/mod_args.py | 18 +++++++++++++----
lib/ansible/plugins/strategies/__init__.py | 2 +-
test/integration/Makefile | 10 +++++-----
.../roles/test_good_parsing/tasks/main.yml | 20 +++++++++----------
4 files changed, 30 insertions(+), 20 deletions(-)
diff --git a/lib/ansible/parsing/mod_args.py b/lib/ansible/parsing/mod_args.py
index ed527f1b08..87b3813d8f 100644
--- a/lib/ansible/parsing/mod_args.py
+++ b/lib/ansible/parsing/mod_args.py
@@ -264,13 +264,23 @@ class ModuleArgsParser:
thing = value
action, args = self._normalize_parameters(value, action=action, additional_args=additional_args)
+ # FIXME: this should probably be somewhere else
+ RAW_PARAM_MODULES = (
+ 'command',
+ 'shell',
+ 'script',
+ 'include',
+ 'include_vars',
+ 'add_host',
+ 'group_by',
+ 'set_fact',
+ 'meta',
+ )
# if we didn't see any module in the task at all, it's not a task really
if action is None:
raise AnsibleParserError("no action detected in task", obj=self._task_ds)
- # FIXME: disabled for now, as there are other places besides the shell/script modules where
- # having variables as the sole param for the module is valid (include_vars, add_host, and group_by?)
- #elif args.get('_raw_params', '') != '' and action not in ('command', 'shell', 'script', 'include_vars'):
- # raise AnsibleParserError("this task has extra params, which is only allowed in the command, shell or script module.", obj=self._task_ds)
+ elif args.get('_raw_params', '') != '' and action not in RAW_PARAM_MODULES:
+ raise AnsibleParserError("this task '%s' has extra params, which is only allowed in the following modules: %s" % (action, ", ".join(RAW_PARAM_MODULES)), obj=self._task_ds)
# shell modules require special handling
(action, args) = self._handle_shell_weirdness(action, args)
diff --git a/lib/ansible/plugins/strategies/__init__.py b/lib/ansible/plugins/strategies/__init__.py
index f610334371..a3668ba089 100644
--- a/lib/ansible/plugins/strategies/__init__.py
+++ b/lib/ansible/plugins/strategies/__init__.py
@@ -335,7 +335,7 @@ class StrategyBase:
# set the vars for this task from those specified as params to the include
for b in block_list:
- b._vars = included_file._args.copy()
+ b.vars = included_file._args.copy()
return block_list
diff --git a/test/integration/Makefile b/test/integration/Makefile
index 28de76c7cd..3ee38b0ab7 100644
--- a/test/integration/Makefile
+++ b/test/integration/Makefile
@@ -24,11 +24,11 @@ CONSUL_RUNNING := $(shell python consul_running.py)
all: parsing test_var_precedence unicode test_templating_settings non_destructive destructive includes check_mode test_hash test_handlers test_group_by test_vault test_tags
parsing:
- ansible-playbook bad_parsing.yml -i $(INVENTORY) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -vvv $(TEST_FLAGS) --tags prepare,common,scenario1; [ $$? -eq 3 ]
- ansible-playbook bad_parsing.yml -i $(INVENTORY) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -vvv $(TEST_FLAGS) --tags prepare,common,scenario2; [ $$? -eq 3 ]
- ansible-playbook bad_parsing.yml -i $(INVENTORY) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -vvv $(TEST_FLAGS) --tags prepare,common,scenario3; [ $$? -eq 3 ]
- ansible-playbook bad_parsing.yml -i $(INVENTORY) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -vvv $(TEST_FLAGS) --tags prepare,common,scenario4; [ $$? -eq 3 ]
- ansible-playbook bad_parsing.yml -i $(INVENTORY) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -vvv $(TEST_FLAGS) --tags prepare,common,scenario5; [ $$? -eq 3 ]
+ ansible-playbook bad_parsing.yml -i $(INVENTORY) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -vvv $(TEST_FLAGS) --tags prepare,common,scenario1; [ $$? -eq 4 ]
+ ansible-playbook bad_parsing.yml -i $(INVENTORY) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -vvv $(TEST_FLAGS) --tags prepare,common,scenario2; [ $$? -eq 4 ]
+ ansible-playbook bad_parsing.yml -i $(INVENTORY) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -vvv $(TEST_FLAGS) --tags prepare,common,scenario3; [ $$? -eq 4 ]
+ ansible-playbook bad_parsing.yml -i $(INVENTORY) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -vvv $(TEST_FLAGS) --tags prepare,common,scenario4; [ $$? -eq 4 ]
+ ansible-playbook bad_parsing.yml -i $(INVENTORY) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -vvv $(TEST_FLAGS) --tags prepare,common,scenario5; [ $$? -eq 4 ]
ansible-playbook good_parsing.yml -i $(INVENTORY) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -v $(TEST_FLAGS)
includes:
diff --git a/test/integration/roles/test_good_parsing/tasks/main.yml b/test/integration/roles/test_good_parsing/tasks/main.yml
index 27475ce0f5..482d0efac5 100644
--- a/test/integration/roles/test_good_parsing/tasks/main.yml
+++ b/test/integration/roles/test_good_parsing/tasks/main.yml
@@ -152,17 +152,17 @@
that:
- complex_param == "this is a param in a complex arg with double quotes"
-- name: test variable module name
- action: "{{ variable_module_name }} msg='this should be debugged'"
- register: result
+#- name: test variable module name
+# action: "{{ variable_module_name }} msg='this should be debugged'"
+# register: result
+#
+#- debug: var=result
-- debug: var=result
-
-- name: assert the task with variable module name ran
- assert:
- that:
- - result.invocation.module_name == "debug"
- - result.msg == "this should be debugged"
+#- name: assert the task with variable module name ran
+# assert:
+# that:
+# - result.invocation.module_name == "debug"
+# - result.msg == "this should be debugged"
- name: test conditional includes
include: test_include_conditional.yml
From bbda86ad0a43183236e58c44a63db93b9631deac Mon Sep 17 00:00:00 2001
From: Toshio Kuratomi
Date: Wed, 13 May 2015 11:04:12 -0700
Subject: [PATCH 079/971] Fix parsing tests so that they all run
---
.../roles/test_bad_parsing/tasks/main.yml | 20 ++++++++-----------
.../test_bad_parsing/tasks/scenario1.yml | 5 +++++
.../test_bad_parsing/tasks/scenario2.yml | 5 +++++
.../test_bad_parsing/tasks/scenario3.yml | 5 +++++
.../test_bad_parsing/tasks/scenario4.yml | 5 +++++
5 files changed, 28 insertions(+), 12 deletions(-)
create mode 100644 test/integration/roles/test_bad_parsing/tasks/scenario1.yml
create mode 100644 test/integration/roles/test_bad_parsing/tasks/scenario2.yml
create mode 100644 test/integration/roles/test_bad_parsing/tasks/scenario3.yml
create mode 100644 test/integration/roles/test_bad_parsing/tasks/scenario4.yml
diff --git a/test/integration/roles/test_bad_parsing/tasks/main.yml b/test/integration/roles/test_bad_parsing/tasks/main.yml
index 3899821de6..4636383d9e 100644
--- a/test/integration/roles/test_bad_parsing/tasks/main.yml
+++ b/test/integration/roles/test_bad_parsing/tasks/main.yml
@@ -29,24 +29,20 @@
- file: name={{test_file}} state=touch
tags: common
-- name: test that we cannot insert arguments
- file: path={{ test_file }} {{ test_input }}
- failed_when: False # ignore the module, just test the parser
+- name: include test that we cannot insert arguments
+ include: scenario1.yml
tags: scenario1
-- name: test that we cannot duplicate arguments
- file: path={{ test_file }} owner=test2 {{ test_input }}
- failed_when: False # ignore the module, just test the parser
+- name: include test that we cannot duplicate arguments
+ include: scenario2.yml
tags: scenario2
-- name: test that we can't do this for the shell module
- shell: echo hi {{ chdir }}
- failed_when: False
+- name: include test that we can't do this for the shell module
+ include: scneario3.yml
tags: scenario3
-- name: test that we can't go all Little Bobby Droptables on a quoted var to add more
- file: "name={{ bad_var }}"
- failed_when: False
+- name: include test that we can't go all Little Bobby Droptables on a quoted var to add more
+ include: scenario4.yml
tags: scenario4
- name: test that a missing/malformed jinja2 filter fails
diff --git a/test/integration/roles/test_bad_parsing/tasks/scenario1.yml b/test/integration/roles/test_bad_parsing/tasks/scenario1.yml
new file mode 100644
index 0000000000..dab20be749
--- /dev/null
+++ b/test/integration/roles/test_bad_parsing/tasks/scenario1.yml
@@ -0,0 +1,5 @@
+- name: test that we cannot insert arguments
+ file: path={{ test_file }} {{ test_input }}
+ failed_when: False # ignore the module, just test the parser
+ tags: scenario1
+
diff --git a/test/integration/roles/test_bad_parsing/tasks/scenario2.yml b/test/integration/roles/test_bad_parsing/tasks/scenario2.yml
new file mode 100644
index 0000000000..4f14f81b23
--- /dev/null
+++ b/test/integration/roles/test_bad_parsing/tasks/scenario2.yml
@@ -0,0 +1,5 @@
+- name: test that we cannot duplicate arguments
+ file: path={{ test_file }} owner=test2 {{ test_input }}
+ failed_when: False # ignore the module, just test the parser
+ tags: scenario2
+
diff --git a/test/integration/roles/test_bad_parsing/tasks/scenario3.yml b/test/integration/roles/test_bad_parsing/tasks/scenario3.yml
new file mode 100644
index 0000000000..cd4da7baba
--- /dev/null
+++ b/test/integration/roles/test_bad_parsing/tasks/scenario3.yml
@@ -0,0 +1,5 @@
+- name: test that we can't do this for the shell module
+ shell: echo hi {{ chdir }}
+ failed_when: False
+ tags: scenario3
+
diff --git a/test/integration/roles/test_bad_parsing/tasks/scenario4.yml b/test/integration/roles/test_bad_parsing/tasks/scenario4.yml
new file mode 100644
index 0000000000..9ed1eae0b5
--- /dev/null
+++ b/test/integration/roles/test_bad_parsing/tasks/scenario4.yml
@@ -0,0 +1,5 @@
+- name: test that we can't go all Little Bobby Droptables on a quoted var to add more
+ file: "name={{ bad_var }}"
+ failed_when: False
+ tags: scenario4
+
From b91ce29007ff24c73a786afb80b721b6d8778362 Mon Sep 17 00:00:00 2001
From: Toshio Kuratomi
Date: Wed, 13 May 2015 12:52:51 -0700
Subject: [PATCH 080/971] Go to next task when we get an error in linear
---
lib/ansible/plugins/strategies/linear.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/lib/ansible/plugins/strategies/linear.py b/lib/ansible/plugins/strategies/linear.py
index f1efadd547..ec829c8996 100644
--- a/lib/ansible/plugins/strategies/linear.py
+++ b/lib/ansible/plugins/strategies/linear.py
@@ -280,6 +280,7 @@ class StrategyModule(StrategyBase):
iterator.mark_host_failed(host)
# FIXME: callback here?
print(e)
+ continue
for new_block in new_blocks:
noop_block = Block(parent_block=task._block)
From b7d644d484c11f6af4134af021b9d05037a48193 Mon Sep 17 00:00:00 2001
From: Aleksey Zhukov
Date: Thu, 14 May 2015 09:42:48 +0300
Subject: [PATCH 081/971] Fix broken cache logic
---
plugins/inventory/digital_ocean.py | 14 ++++++++++++--
1 file changed, 12 insertions(+), 2 deletions(-)
diff --git a/plugins/inventory/digital_ocean.py b/plugins/inventory/digital_ocean.py
index 9bfb184d57..1323a384ba 100755
--- a/plugins/inventory/digital_ocean.py
+++ b/plugins/inventory/digital_ocean.py
@@ -226,6 +226,9 @@ or environment variables (DO_API_TOKEN)'''
self.build_inventory()
json_data = self.inventory
+ if self.cache_refreshed:
+ self.write_to_cache()
+
if self.args.pretty:
print json.dumps(json_data, sort_keys=True, indent=2)
else:
@@ -309,23 +312,30 @@ or environment variables (DO_API_TOKEN)'''
'''Get JSON from DigitalOcean API'''
if self.args.force_cache:
return
+ # We always get fresh droplets
+ if self.is_cache_valid() and not (resource=='droplets' or resource is None):
+ return
if self.args.refresh_cache:
resource=None
if resource == 'droplets' or resource is None:
self.data['droplets'] = self.manager.all_active_droplets()
+ self.cache_refreshed = True
if resource == 'regions' or resource is None:
self.data['regions'] = self.manager.all_regions()
+ self.cache_refreshed = True
if resource == 'images' or resource is None:
self.data['images'] = self.manager.all_images(filter=None)
+ self.cache_refreshed = True
if resource == 'sizes' or resource is None:
self.data['sizes'] = self.manager.sizes()
+ self.cache_refreshed = True
if resource == 'ssh_keys' or resource is None:
self.data['ssh_keys'] = self.manager.all_ssh_keys()
+ self.cache_refreshed = True
if resource == 'domains' or resource is None:
self.data['domains'] = self.manager.all_domains()
-
- self.write_to_cache()
+ self.cache_refreshed = True
def build_inventory(self):
From 14719a6f08eb67d36d36acb2d3ce0ec3885047a3 Mon Sep 17 00:00:00 2001
From: Chen Zhidong
Date: Thu, 14 May 2015 22:02:30 +0800
Subject: [PATCH 082/971] Add judgment to to fix path0 if ANSIBLE_CONFIG is set
to a dir
---
lib/ansible/constants.py | 2 ++
1 file changed, 2 insertions(+)
diff --git a/lib/ansible/constants.py b/lib/ansible/constants.py
index 089de5b7c5..d09a8da5ca 100644
--- a/lib/ansible/constants.py
+++ b/lib/ansible/constants.py
@@ -65,6 +65,8 @@ def load_config_file():
path0 = os.getenv("ANSIBLE_CONFIG", None)
if path0 is not None:
path0 = os.path.expanduser(path0)
+ if os.path.isdir(path0):
+ path0 += "/ansible.cfg"
path1 = os.getcwd() + "/ansible.cfg"
path2 = os.path.expanduser("~/.ansible.cfg")
path3 = "/etc/ansible/ansible.cfg"
From a0509cda1ea6d05ed339a14f18697864f929ffcd Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Thu, 14 May 2015 14:31:11 -0500
Subject: [PATCH 083/971] Fix test_role unit tests to use unique role names to
avoid role caching errors
---
test/units/playbook/test_role.py | 62 ++++++++++++++++----------------
1 file changed, 31 insertions(+), 31 deletions(-)
diff --git a/test/units/playbook/test_role.py b/test/units/playbook/test_role.py
index 7aab5133da..031871ce32 100644
--- a/test/units/playbook/test_role.py
+++ b/test/units/playbook/test_role.py
@@ -41,28 +41,28 @@ class TestRole(unittest.TestCase):
def test_load_role_with_tasks(self):
fake_loader = DictDataLoader({
- "/etc/ansible/roles/foo/tasks/main.yml": """
+ "/etc/ansible/roles/foo_tasks/tasks/main.yml": """
- shell: echo 'hello world'
""",
})
- i = RoleInclude.load('foo', loader=fake_loader)
+ i = RoleInclude.load('foo_tasks', loader=fake_loader)
r = Role.load(i)
- self.assertEqual(str(r), 'foo')
+ self.assertEqual(str(r), 'foo_tasks')
self.assertEqual(len(r._task_blocks), 1)
assert isinstance(r._task_blocks[0], Block)
def test_load_role_with_handlers(self):
fake_loader = DictDataLoader({
- "/etc/ansible/roles/foo/handlers/main.yml": """
+ "/etc/ansible/roles/foo_handlers/handlers/main.yml": """
- name: test handler
shell: echo 'hello world'
""",
})
- i = RoleInclude.load('foo', loader=fake_loader)
+ i = RoleInclude.load('foo_handlers', loader=fake_loader)
r = Role.load(i)
self.assertEqual(len(r._handler_blocks), 1)
@@ -71,15 +71,15 @@ class TestRole(unittest.TestCase):
def test_load_role_with_vars(self):
fake_loader = DictDataLoader({
- "/etc/ansible/roles/foo/defaults/main.yml": """
+ "/etc/ansible/roles/foo_vars/defaults/main.yml": """
foo: bar
""",
- "/etc/ansible/roles/foo/vars/main.yml": """
+ "/etc/ansible/roles/foo_vars/vars/main.yml": """
foo: bam
""",
})
- i = RoleInclude.load('foo', loader=fake_loader)
+ i = RoleInclude.load('foo_vars', loader=fake_loader)
r = Role.load(i)
self.assertEqual(r._default_vars, dict(foo='bar'))
@@ -88,41 +88,41 @@ class TestRole(unittest.TestCase):
def test_load_role_with_metadata(self):
fake_loader = DictDataLoader({
- '/etc/ansible/roles/foo/meta/main.yml': """
+ '/etc/ansible/roles/foo_metadata/meta/main.yml': """
allow_duplicates: true
dependencies:
- - bar
+ - bar_metadata
galaxy_info:
a: 1
b: 2
c: 3
""",
- '/etc/ansible/roles/bar/meta/main.yml': """
+ '/etc/ansible/roles/bar_metadata/meta/main.yml': """
dependencies:
- - baz
+ - baz_metadata
""",
- '/etc/ansible/roles/baz/meta/main.yml': """
+ '/etc/ansible/roles/baz_metadata/meta/main.yml': """
dependencies:
- - bam
+ - bam_metadata
""",
- '/etc/ansible/roles/bam/meta/main.yml': """
+ '/etc/ansible/roles/bam_metadata/meta/main.yml': """
dependencies: []
""",
- '/etc/ansible/roles/bad1/meta/main.yml': """
+ '/etc/ansible/roles/bad1_metadata/meta/main.yml': """
1
""",
- '/etc/ansible/roles/bad2/meta/main.yml': """
+ '/etc/ansible/roles/bad2_metadata/meta/main.yml': """
foo: bar
""",
- '/etc/ansible/roles/recursive1/meta/main.yml': """
- dependencies: ['recursive2']
+ '/etc/ansible/roles/recursive1_metadata/meta/main.yml': """
+ dependencies: ['recursive2_metadata']
""",
- '/etc/ansible/roles/recursive2/meta/main.yml': """
- dependencies: ['recursive1']
+ '/etc/ansible/roles/recursive2_metadata/meta/main.yml': """
+ dependencies: ['recursive1_metadata']
""",
})
- i = RoleInclude.load('foo', loader=fake_loader)
+ i = RoleInclude.load('foo_metadata', loader=fake_loader)
r = Role.load(i)
role_deps = r.get_direct_dependencies()
@@ -136,17 +136,17 @@ class TestRole(unittest.TestCase):
all_deps = r.get_all_dependencies()
self.assertEqual(len(all_deps), 3)
- self.assertEqual(all_deps[0].get_name(), 'bar')
- self.assertEqual(all_deps[1].get_name(), 'baz')
- self.assertEqual(all_deps[2].get_name(), 'bam')
+ self.assertEqual(all_deps[0].get_name(), 'bam_metadata')
+ self.assertEqual(all_deps[1].get_name(), 'baz_metadata')
+ self.assertEqual(all_deps[2].get_name(), 'bar_metadata')
- i = RoleInclude.load('bad1', loader=fake_loader)
+ i = RoleInclude.load('bad1_metadata', loader=fake_loader)
self.assertRaises(AnsibleParserError, Role.load, i)
- i = RoleInclude.load('bad2', loader=fake_loader)
+ i = RoleInclude.load('bad2_metadata', loader=fake_loader)
self.assertRaises(AnsibleParserError, Role.load, i)
- i = RoleInclude.load('recursive1', loader=fake_loader)
+ i = RoleInclude.load('recursive1_metadata', loader=fake_loader)
self.assertRaises(AnsibleError, Role.load, i)
def test_load_role_complex(self):
@@ -155,13 +155,13 @@ class TestRole(unittest.TestCase):
# params and tags/when statements
fake_loader = DictDataLoader({
- "/etc/ansible/roles/foo/tasks/main.yml": """
+ "/etc/ansible/roles/foo_complex/tasks/main.yml": """
- shell: echo 'hello world'
""",
})
- i = RoleInclude.load(dict(role='foo'), loader=fake_loader)
+ i = RoleInclude.load(dict(role='foo_complex'), loader=fake_loader)
r = Role.load(i)
- self.assertEqual(r.get_name(), "foo")
+ self.assertEqual(r.get_name(), "foo_complex")
From 48d62fd9341dbe030380f0feab5dc7a9f9483a0f Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Thu, 14 May 2015 20:10:31 -0500
Subject: [PATCH 084/971] Cleaning up VariableManager tests (v2)
---
lib/ansible/vars/__init__.py | 9 +++---
test/units/vars/test_variable_manager.py | 41 ++++++++++++++++--------
2 files changed, 33 insertions(+), 17 deletions(-)
diff --git a/lib/ansible/vars/__init__.py b/lib/ansible/vars/__init__.py
index 736b9529ef..5a576daba7 100644
--- a/lib/ansible/vars/__init__.py
+++ b/lib/ansible/vars/__init__.py
@@ -72,7 +72,8 @@ class VariableManager:
''' ensures a clean copy of the extra_vars are made '''
return self._extra_vars.copy()
- def set_extra_vars(self, value):
+ @extra_vars.setter
+ def extra_vars(self, value):
''' ensures a clean copy of the extra_vars are used to set the value '''
assert isinstance(value, MutableMapping)
self._extra_vars = value.copy()
@@ -123,7 +124,7 @@ class VariableManager:
return result
- def get_vars(self, loader, play=None, host=None, task=None):
+ def get_vars(self, loader, play=None, host=None, task=None, use_cache=True):
'''
Returns the variables, with optional "context" given via the parameters
for the play, host, and task (which could possibly result in different
@@ -145,7 +146,7 @@ class VariableManager:
debug("in VariableManager get_vars()")
cache_entry = self._get_cache_entry(play=play, host=host, task=task)
- if cache_entry in CACHED_VARS:
+ if cache_entry in CACHED_VARS and use_cache:
debug("vars are cached, returning them now")
return CACHED_VARS[cache_entry]
@@ -229,7 +230,7 @@ class VariableManager:
# the 'omit' value alows params to be left out if the variable they are based on is undefined
all_vars['omit'] = self._omit_token
- CACHED_VARS[cache_entry] = all_vars
+ #CACHED_VARS[cache_entry] = all_vars
debug("done with get_vars()")
return all_vars
diff --git a/test/units/vars/test_variable_manager.py b/test/units/vars/test_variable_manager.py
index 9abed8f948..273f9238ed 100644
--- a/test/units/vars/test_variable_manager.py
+++ b/test/units/vars/test_variable_manager.py
@@ -38,7 +38,11 @@ class TestVariableManager(unittest.TestCase):
fake_loader = DictDataLoader({})
v = VariableManager()
- self.assertEqual(v.get_vars(loader=fake_loader), dict())
+ vars = v.get_vars(loader=fake_loader, use_cache=False)
+ if 'omit' in vars:
+ del vars['omit']
+
+ self.assertEqual(vars, dict())
self.assertEqual(
v._merge_dicts(
@@ -59,11 +63,14 @@ class TestVariableManager(unittest.TestCase):
extra_vars = dict(a=1, b=2, c=3)
v = VariableManager()
- v.set_extra_vars(extra_vars)
+ v.extra_vars = extra_vars
+
+ vars = v.get_vars(loader=fake_loader, use_cache=False)
for (key, val) in extra_vars.iteritems():
- self.assertEqual(v.get_vars(loader=fake_loader).get(key), val)
- self.assertIsNot(v.extra_vars.get(key), val)
+ self.assertEqual(vars.get(key), val)
+
+ self.assertIsNot(v.extra_vars, extra_vars)
def test_variable_manager_host_vars_file(self):
fake_loader = DictDataLoader({
@@ -82,30 +89,38 @@ class TestVariableManager(unittest.TestCase):
mock_host.get_vars.return_value = dict()
mock_host.get_groups.return_value = ()
- self.assertEqual(v.get_vars(loader=fake_loader, host=mock_host).get("foo"), "bar")
+ self.assertEqual(v.get_vars(loader=fake_loader, host=mock_host, use_cache=False).get("foo"), "bar")
def test_variable_manager_group_vars_file(self):
fake_loader = DictDataLoader({
- "group_vars/somegroup.yml": """
+ "group_vars/all.yml": """
foo: bar
+ """,
+ "group_vars/somegroup.yml": """
+ bam: baz
"""
})
v = VariableManager()
+ v.add_group_vars_file("group_vars/all.yml", loader=fake_loader)
v.add_group_vars_file("group_vars/somegroup.yml", loader=fake_loader)
self.assertIn("somegroup", v._group_vars_files)
- self.assertEqual(v._group_vars_files["somegroup"], dict(foo="bar"))
+ self.assertEqual(v._group_vars_files["all"], dict(foo="bar"))
+ self.assertEqual(v._group_vars_files["somegroup"], dict(bam="baz"))
mock_group = MagicMock()
- mock_group.name.return_value = "somegroup"
+ mock_group.name = "somegroup"
mock_group.get_ancestors.return_value = ()
+ mock_group.get_vars.return_value = dict()
mock_host = MagicMock()
mock_host.get_name.return_value = "hostname1"
mock_host.get_vars.return_value = dict()
- mock_host.get_groups.return_value = (mock_group)
+ mock_host.get_groups.return_value = (mock_group,)
- self.assertEqual(v.get_vars(loader=fake_loader, host=mock_host).get("foo"), "bar")
+ vars = v.get_vars(loader=fake_loader, host=mock_host, use_cache=False)
+ self.assertEqual(vars.get("foo"), "bar")
+ self.assertEqual(vars.get("bam"), "baz")
def test_variable_manager_play_vars(self):
fake_loader = DictDataLoader({})
@@ -116,7 +131,7 @@ class TestVariableManager(unittest.TestCase):
mock_play.get_vars_files.return_value = []
v = VariableManager()
- self.assertEqual(v.get_vars(loader=fake_loader, play=mock_play).get("foo"), "bar")
+ self.assertEqual(v.get_vars(loader=fake_loader, play=mock_play, use_cache=False).get("foo"), "bar")
def test_variable_manager_play_vars_files(self):
fake_loader = DictDataLoader({
@@ -131,7 +146,7 @@ class TestVariableManager(unittest.TestCase):
mock_play.get_vars_files.return_value = ['/path/to/somefile.yml']
v = VariableManager()
- self.assertEqual(v.get_vars(loader=fake_loader, play=mock_play).get("foo"), "bar")
+ self.assertEqual(v.get_vars(loader=fake_loader, play=mock_play, use_cache=False).get("foo"), "bar")
def test_variable_manager_task_vars(self):
fake_loader = DictDataLoader({})
@@ -141,5 +156,5 @@ class TestVariableManager(unittest.TestCase):
mock_task.get_vars.return_value = dict(foo="bar")
v = VariableManager()
- self.assertEqual(v.get_vars(loader=fake_loader, task=mock_task).get("foo"), "bar")
+ self.assertEqual(v.get_vars(loader=fake_loader, task=mock_task, use_cache=False).get("foo"), "bar")
From ac7dce4631dd073c68a8770a91bbb7dfb99ad96c Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Fri, 15 May 2015 10:45:55 -0500
Subject: [PATCH 085/971] Fixing broken set_extra_vars method after fixing unit
tests (v2)
---
lib/ansible/cli/playbook.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/lib/ansible/cli/playbook.py b/lib/ansible/cli/playbook.py
index 69e411dc87..97d4f0de3f 100644
--- a/lib/ansible/cli/playbook.py
+++ b/lib/ansible/cli/playbook.py
@@ -131,7 +131,7 @@ class PlaybookCLI(CLI):
# create the variable manager, which will be shared throughout
# the code, ensuring a consistent view of global variables
variable_manager = VariableManager()
- variable_manager.set_extra_vars(extra_vars)
+ variable_manager.extra_vars = extra_vars
# create the inventory, and filter it based on the subset specified (if any)
inventory = Inventory(loader=loader, variable_manager=variable_manager, host_list=self.options.inventory)
From 2e31a67532fa889dd6e201ad14a8cbb5f6a8d3f1 Mon Sep 17 00:00:00 2001
From: Toshio Kuratomi
Date: Fri, 15 May 2015 10:42:41 -0700
Subject: [PATCH 086/971] Update module refs in v2
---
lib/ansible/modules/core | 2 +-
lib/ansible/modules/extras | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/lib/ansible/modules/core b/lib/ansible/modules/core
index 46a5531893..b92ed6e9da 160000
--- a/lib/ansible/modules/core
+++ b/lib/ansible/modules/core
@@ -1 +1 @@
-Subproject commit 46a553189331dcbe2017aa47345c1c10640263bc
+Subproject commit b92ed6e9da7784743976ade2affef63c8ddfedaf
diff --git a/lib/ansible/modules/extras b/lib/ansible/modules/extras
index aa86c5ff90..8c8a0e1b8d 160000
--- a/lib/ansible/modules/extras
+++ b/lib/ansible/modules/extras
@@ -1 +1 @@
-Subproject commit aa86c5ff9010a5201c8ee5ffd2b0045abfaba899
+Subproject commit 8c8a0e1b8dc4b51721b313fcabb9bb5bd8a6d26f
From 0913b8263ca88400efb2efd4cb681f8d883cceeb Mon Sep 17 00:00:00 2001
From: Brian Coca
Date: Thu, 14 May 2015 10:50:22 -0400
Subject: [PATCH 087/971] made special treatment of certain filesystem for
selinux configurable
---
examples/ansible.cfg | 5 +++++
lib/ansible/constants.py | 5 ++++-
lib/ansible/inventory/__init__.py | 2 +-
lib/ansible/module_utils/basic.py | 24 +++++++++++++++---------
v1/ansible/module_common.py | 9 ++++++---
5 files changed, 31 insertions(+), 14 deletions(-)
diff --git a/examples/ansible.cfg b/examples/ansible.cfg
index 4cf9d513e5..85eada17cc 100644
--- a/examples/ansible.cfg
+++ b/examples/ansible.cfg
@@ -223,3 +223,8 @@ accelerate_daemon_timeout = 30
# is "no".
#accelerate_multi_key = yes
+[selinux]
+# file systems that require special treatment when dealing with security context
+# the default behaviour that copies the existing context or uses the user default
+# needs to be changed to use the file system dependant context.
+#special_context_filesystems=nfs,vboxsf,fuse
diff --git a/lib/ansible/constants.py b/lib/ansible/constants.py
index 456beb8bbc..d24dc311a7 100644
--- a/lib/ansible/constants.py
+++ b/lib/ansible/constants.py
@@ -142,7 +142,10 @@ DEFAULT_SU_FLAGS = get_config(p, DEFAULTS, 'su_flags', 'ANSIBLE_SU_FLAG
DEFAULT_SU_USER = get_config(p, DEFAULTS, 'su_user', 'ANSIBLE_SU_USER', 'root')
DEFAULT_ASK_SU_PASS = get_config(p, DEFAULTS, 'ask_su_pass', 'ANSIBLE_ASK_SU_PASS', False, boolean=True)
DEFAULT_GATHERING = get_config(p, DEFAULTS, 'gathering', 'ANSIBLE_GATHERING', 'implicit').lower()
-DEFAULT_LOG_PATH = shell_expand_path(get_config(p, DEFAULTS, 'log_path', 'ANSIBLE_LOG_PATH', ''))
+DEFAULT_LOG_PATH = shell_expand_path(get_config(p, DEFAULTS, 'log_path', 'ANSIBLE_LOG_PATH', ''))
+
+# selinux
+DEFAULT_SELINUX_SPECIAL_FS = get_config(p, 'selinux', 'special_context_filesystems', None, 'fuse, nfs, vboxsf', islist=True)
#TODO: get rid of ternary chain mess
BECOME_METHODS = ['sudo','su','pbrun','pfexec','runas']
diff --git a/lib/ansible/inventory/__init__.py b/lib/ansible/inventory/__init__.py
index 063398f17f..45bdaf8a6f 100644
--- a/lib/ansible/inventory/__init__.py
+++ b/lib/ansible/inventory/__init__.py
@@ -61,7 +61,7 @@ class Inventory(object):
self._vars_per_host = {}
self._vars_per_group = {}
self._hosts_cache = {}
- self._groups_list = {}
+ self._groups_list = {}
self._pattern_cache = {}
# to be set by calling set_playbook_basedir by playbook code
diff --git a/lib/ansible/module_utils/basic.py b/lib/ansible/module_utils/basic.py
index 8f9b03f882..1f0abb1776 100644
--- a/lib/ansible/module_utils/basic.py
+++ b/lib/ansible/module_utils/basic.py
@@ -38,6 +38,8 @@ BOOLEANS_TRUE = ['yes', 'on', '1', 'true', 1]
BOOLEANS_FALSE = ['no', 'off', '0', 'false', 0]
BOOLEANS = BOOLEANS_TRUE + BOOLEANS_FALSE
+SELINUX_SPECIAL_FS="<>"
+
# ansible modules can be written in any language. To simplify
# development of Python modules, the functions available here
# can be inserted in any module source automatically by including
@@ -529,10 +531,10 @@ class AnsibleModule(object):
path = os.path.dirname(path)
return path
- def is_nfs_path(self, path):
+ def is_special_selinux_path(self, path):
"""
- Returns a tuple containing (True, selinux_context) if the given path
- is on a NFS mount point, otherwise the return will be (False, None).
+ Returns a tuple containing (True, selinux_context) if the given path is on a
+ NFS or other 'special' fs mount point, otherwise the return will be (False, None).
"""
try:
f = open('/proc/mounts', 'r')
@@ -543,9 +545,13 @@ class AnsibleModule(object):
path_mount_point = self.find_mount_point(path)
for line in mount_data:
(device, mount_point, fstype, options, rest) = line.split(' ', 4)
- if path_mount_point == mount_point and 'nfs' in fstype:
- nfs_context = self.selinux_context(path_mount_point)
- return (True, nfs_context)
+
+ if path_mount_point == mount_point:
+ for fs in SELINUX_SPECIAL_FS.split(','):
+ if fs in fstype:
+ special_context = self.selinux_context(path_mount_point)
+ return (True, special_context)
+
return (False, None)
def set_default_selinux_context(self, path, changed):
@@ -563,9 +569,9 @@ class AnsibleModule(object):
# Iterate over the current context instead of the
# argument context, which may have selevel.
- (is_nfs, nfs_context) = self.is_nfs_path(path)
- if is_nfs:
- new_context = nfs_context
+ (is_special_se, sp_context) = self.is_special_selinux_path(path)
+ if is_special_se:
+ new_context = sp_context
else:
for i in range(len(cur_context)):
if len(context) > i:
diff --git a/v1/ansible/module_common.py b/v1/ansible/module_common.py
index 118c757f8d..fba5b9137d 100644
--- a/v1/ansible/module_common.py
+++ b/v1/ansible/module_common.py
@@ -33,6 +33,8 @@ REPLACER_ARGS = "\"<>\""
REPLACER_COMPLEX = "\"<>\""
REPLACER_WINDOWS = "# POWERSHELL_COMMON"
REPLACER_VERSION = "\"<>\""
+REPLACER_SELINUX = "<>"
+
class ModuleReplacer(object):
@@ -41,14 +43,14 @@ class ModuleReplacer(object):
transfer. Rather than doing classical python imports, this allows for more
efficient transfer in a no-bootstrapping scenario by not moving extra files
over the wire, and also takes care of embedding arguments in the transferred
- modules.
+ modules.
This version is done in such a way that local imports can still be
used in the module code, so IDEs don't have to be aware of what is going on.
Example:
- from ansible.module_utils.basic import *
+ from ansible.module_utils.basic import *
... will result in the insertion basic.py into the module
@@ -94,7 +96,7 @@ class ModuleReplacer(object):
module_style = 'new'
elif 'WANT_JSON' in module_data:
module_style = 'non_native_want_json'
-
+
output = StringIO()
lines = module_data.split('\n')
snippet_names = []
@@ -167,6 +169,7 @@ class ModuleReplacer(object):
# these strings should be part of the 'basic' snippet which is required to be included
module_data = module_data.replace(REPLACER_VERSION, repr(__version__))
+ module_data = module_data.replace(REPLACER_SELINUX, ','.join(C.DEFAULT_SELINUX_SPECIAL_FS))
module_data = module_data.replace(REPLACER_ARGS, encoded_args)
module_data = module_data.replace(REPLACER_COMPLEX, encoded_complex)
From e7846343e57691f827623047b140ccbe938a13eb Mon Sep 17 00:00:00 2001
From: Till Maas
Date: Fri, 15 May 2015 22:25:20 +0200
Subject: [PATCH 088/971] facts: Add ed25519 ssh pubkey
---
lib/ansible/module_utils/facts.py | 7 +++++++
1 file changed, 7 insertions(+)
diff --git a/lib/ansible/module_utils/facts.py b/lib/ansible/module_utils/facts.py
index b223c5f5f7..b95fccdcb7 100644
--- a/lib/ansible/module_utils/facts.py
+++ b/lib/ansible/module_utils/facts.py
@@ -474,14 +474,17 @@ class Facts(object):
dsa_filename = '/etc/ssh/ssh_host_dsa_key.pub'
rsa_filename = '/etc/ssh/ssh_host_rsa_key.pub'
ecdsa_filename = '/etc/ssh/ssh_host_ecdsa_key.pub'
+ ed25519_filename = '/etc/ssh/ssh_host_ed25519_key.pub'
if self.facts['system'] == 'Darwin':
dsa_filename = '/etc/ssh_host_dsa_key.pub'
rsa_filename = '/etc/ssh_host_rsa_key.pub'
ecdsa_filename = '/etc/ssh_host_ecdsa_key.pub'
+ ed25519_filename = '/etc/ssh_host_ed25519_key.pub'
dsa = get_file_content(dsa_filename)
rsa = get_file_content(rsa_filename)
ecdsa = get_file_content(ecdsa_filename)
+ ed25519 = get_file_content(ed25519_filename)
if dsa is None:
dsa = 'NA'
else:
@@ -494,6 +497,10 @@ class Facts(object):
ecdsa = 'NA'
else:
self.facts['ssh_host_key_ecdsa_public'] = ecdsa.split()[1]
+ if ed25519 is None:
+ ed25519 = 'NA'
+ else:
+ self.facts['ssh_host_key_ed25519_public'] = ed25519.split()[1]
def get_pkg_mgr_facts(self):
self.facts['pkg_mgr'] = 'unknown'
From 02d784598fcdbfd2bfc93c91ecff782a61dafcc3 Mon Sep 17 00:00:00 2001
From: Till Maas
Date: Fri, 15 May 2015 22:36:13 +0200
Subject: [PATCH 089/971] facts: Simplify ssh key fetching
---
lib/ansible/module_utils/facts.py | 37 +++++++++----------------------
1 file changed, 10 insertions(+), 27 deletions(-)
diff --git a/lib/ansible/module_utils/facts.py b/lib/ansible/module_utils/facts.py
index b95fccdcb7..6ddae5df85 100644
--- a/lib/ansible/module_utils/facts.py
+++ b/lib/ansible/module_utils/facts.py
@@ -471,36 +471,19 @@ class Facts(object):
pass
def get_public_ssh_host_keys(self):
- dsa_filename = '/etc/ssh/ssh_host_dsa_key.pub'
- rsa_filename = '/etc/ssh/ssh_host_rsa_key.pub'
- ecdsa_filename = '/etc/ssh/ssh_host_ecdsa_key.pub'
- ed25519_filename = '/etc/ssh/ssh_host_ed25519_key.pub'
+ keytypes = ('dsa', 'rsa', 'ecdsa', 'ed25519')
if self.facts['system'] == 'Darwin':
- dsa_filename = '/etc/ssh_host_dsa_key.pub'
- rsa_filename = '/etc/ssh_host_rsa_key.pub'
- ecdsa_filename = '/etc/ssh_host_ecdsa_key.pub'
- ed25519_filename = '/etc/ssh_host_ed25519_key.pub'
- dsa = get_file_content(dsa_filename)
- rsa = get_file_content(rsa_filename)
- ecdsa = get_file_content(ecdsa_filename)
- ed25519 = get_file_content(ed25519_filename)
- if dsa is None:
- dsa = 'NA'
+ keydir = '/etc'
else:
- self.facts['ssh_host_key_dsa_public'] = dsa.split()[1]
- if rsa is None:
- rsa = 'NA'
- else:
- self.facts['ssh_host_key_rsa_public'] = rsa.split()[1]
- if ecdsa is None:
- ecdsa = 'NA'
- else:
- self.facts['ssh_host_key_ecdsa_public'] = ecdsa.split()[1]
- if ed25519 is None:
- ed25519 = 'NA'
- else:
- self.facts['ssh_host_key_ed25519_public'] = ed25519.split()[1]
+ keydir = '/etc/ssh'
+
+ for type_ in keytypes:
+ key_filename = '%s/ssh_host_%s_key.pub' % (keydir, type_)
+ keydata = get_file_content(key_filename)
+ if keydata is not None:
+ factname = 'ssh_host_key_%s_public' % type_
+ self.facts[factname] = keydata.split()[1]
def get_pkg_mgr_facts(self):
self.facts['pkg_mgr'] = 'unknown'
From 23cd3294d0caaf5cf90de8d63b779d186e158abd Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Sat, 16 May 2015 15:45:01 -0500
Subject: [PATCH 090/971] Starting to add v2 tests for template
---
test/units/template/__init__.py | 21 ++++++++
test/units/template/test_safe_eval.py | 21 ++++++++
test/units/template/test_templar.py | 74 +++++++++++++++++++++++++++
test/units/template/test_vars.py | 21 ++++++++
4 files changed, 137 insertions(+)
create mode 100644 test/units/template/__init__.py
create mode 100644 test/units/template/test_safe_eval.py
create mode 100644 test/units/template/test_templar.py
create mode 100644 test/units/template/test_vars.py
diff --git a/test/units/template/__init__.py b/test/units/template/__init__.py
new file mode 100644
index 0000000000..785fc45992
--- /dev/null
+++ b/test/units/template/__init__.py
@@ -0,0 +1,21 @@
+# (c) 2012-2014, Michael DeHaan
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see .
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
diff --git a/test/units/template/test_safe_eval.py b/test/units/template/test_safe_eval.py
new file mode 100644
index 0000000000..785fc45992
--- /dev/null
+++ b/test/units/template/test_safe_eval.py
@@ -0,0 +1,21 @@
+# (c) 2012-2014, Michael DeHaan
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see .
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
diff --git a/test/units/template/test_templar.py b/test/units/template/test_templar.py
new file mode 100644
index 0000000000..f2f727d1c7
--- /dev/null
+++ b/test/units/template/test_templar.py
@@ -0,0 +1,74 @@
+# (c) 2012-2014, Michael DeHaan
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see .
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+from ansible.compat.tests import unittest
+from ansible.compat.tests.mock import patch, MagicMock
+
+from ansible import constants as C
+from ansible.plugins import filter_loader, lookup_loader, module_loader
+from ansible.plugins.strategies import SharedPluginLoaderObj
+from ansible.template import Templar
+
+from units.mock.loader import DictDataLoader
+
+class TestTemplar(unittest.TestCase):
+
+ def setUp(self):
+ pass
+
+ def tearDown(self):
+ pass
+
+ def test_templar_simple(self):
+ fake_loader = DictDataLoader({})
+ shared_loader = SharedPluginLoaderObj()
+ templar = Templar(loader=fake_loader, variables=dict(foo="bar", bam="{{foo}}", num=1, var_true=True, var_false=False, var_dict=dict(a="b"), bad_dict="{a='b'", var_list=[1]))
+
+ # test some basic templating
+ self.assertEqual(templar.template("{{foo}}"), "bar")
+ self.assertEqual(templar.template("{{foo}}\n"), "bar")
+ self.assertEqual(templar.template("{{foo}}\n", preserve_trailing_newlines=True), "bar\n")
+ self.assertEqual(templar.template("foo", convert_bare=True), "bar")
+ self.assertEqual(templar.template("{{bam}}"), "bar")
+ self.assertEqual(templar.template("{{num}}"), 1)
+ self.assertEqual(templar.template("{{var_true}}"), True)
+ self.assertEqual(templar.template("{{var_false}}"), False)
+ self.assertEqual(templar.template("{{var_dict}}"), dict(a="b"))
+ self.assertEqual(templar.template("{{bad_dict}}"), "{a='b'")
+ self.assertEqual(templar.template("{{var_list}}"), [1])
+
+ # test set_available_variables()
+ templar.set_available_variables(variables=dict(foo="bam"))
+ self.assertEqual(templar.template("{{foo}}"), "bam")
+ # variables must be a dict() for set_available_variables()
+ self.assertRaises(AssertionError, templar.set_available_variables, "foo=bam")
+
+ def test_template_jinja2_extensions(self):
+ fake_loader = DictDataLoader({})
+ templar = Templar(loader=fake_loader)
+
+ old_exts = C.DEFAULT_JINJA2_EXTENSIONS
+ try:
+ C.DEFAULT_JINJA2_EXTENSIONS = "foo,bar"
+ self.assertEqual(templar._get_extensions(), ['foo', 'bar'])
+ finally:
+ C.DEFAULT_JINJA2_EXTENSIONS = old_exts
+
diff --git a/test/units/template/test_vars.py b/test/units/template/test_vars.py
new file mode 100644
index 0000000000..785fc45992
--- /dev/null
+++ b/test/units/template/test_vars.py
@@ -0,0 +1,21 @@
+# (c) 2012-2014, Michael DeHaan
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see .
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
From 9aa8676bdd13a0636e5e7920713197972d56946d Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Sun, 17 May 2015 01:06:02 -0500
Subject: [PATCH 091/971] More template unit tests for v2
---
lib/ansible/plugins/lookup/file.py | 12 ++++++++----
lib/ansible/template/__init__.py | 2 +-
test/units/mock/loader.py | 6 ++++++
test/units/template/test_templar.py | 20 ++++++++++++++++++--
4 files changed, 33 insertions(+), 7 deletions(-)
diff --git a/lib/ansible/plugins/lookup/file.py b/lib/ansible/plugins/lookup/file.py
index efb039497d..ea53c37e03 100644
--- a/lib/ansible/plugins/lookup/file.py
+++ b/lib/ansible/plugins/lookup/file.py
@@ -42,18 +42,22 @@ class LookupModule(LookupBase):
# role/files/ directory, and finally the playbook directory
# itself (which will be relative to the current working dir)
+ if 'role_path' in variables:
+ relative_path = self._loader.path_dwim_relative(variables['role_path'], 'files', term, check=False)
+
# FIXME: the original file stuff still needs to be worked out, but the
# playbook_dir stuff should be able to be removed as it should
# be covered by the fact that the loader contains that info
- #if '_original_file' in variables:
- # relative_path = self._loader.path_dwim_relative(variables['_original_file'], 'files', term, self.basedir, check=False)
#if 'playbook_dir' in variables:
# playbook_path = os.path.join(variables['playbook_dir'], term)
for path in (basedir_path, relative_path, playbook_path):
- if path and os.path.exists(path):
- ret.append(codecs.open(path, encoding="utf8").read().rstrip())
+ try:
+ contents = self._loader._get_file_contents(path)
+ ret.append(contents.rstrip())
break
+ except AnsibleParserError:
+ continue
else:
raise AnsibleError("could not locate file in lookup: %s" % term)
diff --git a/lib/ansible/template/__init__.py b/lib/ansible/template/__init__.py
index 19e091b9b2..8ad9917d60 100644
--- a/lib/ansible/template/__init__.py
+++ b/lib/ansible/template/__init__.py
@@ -218,7 +218,7 @@ class Templar:
# safely catch run failures per #5059
try:
ran = instance.run(*args, variables=self._available_variables, **kwargs)
- except AnsibleUndefinedVariable:
+ except (AnsibleUndefinedVariable, UndefinedError):
raise
except Exception, e:
if self._fail_on_lookup_errors:
diff --git a/test/units/mock/loader.py b/test/units/mock/loader.py
index cf9d7ea72d..078ca3f0e6 100644
--- a/test/units/mock/loader.py
+++ b/test/units/mock/loader.py
@@ -38,6 +38,12 @@ class DictDataLoader(DataLoader):
return self.load(self._file_mapping[path], path)
return None
+ def _get_file_contents(self, path):
+ if path in self._file_mapping:
+ return self._file_mapping[path]
+ else:
+ raise AnsibleParserError("file not found: %s" % path)
+
def path_exists(self, path):
return path in self._file_mapping or path in self._known_directories
diff --git a/test/units/template/test_templar.py b/test/units/template/test_templar.py
index f2f727d1c7..eb634994fd 100644
--- a/test/units/template/test_templar.py
+++ b/test/units/template/test_templar.py
@@ -19,10 +19,13 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
+from jinja2.exceptions import UndefinedError
+
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import patch, MagicMock
from ansible import constants as C
+from ansible.errors import *
from ansible.plugins import filter_loader, lookup_loader, module_loader
from ansible.plugins.strategies import SharedPluginLoaderObj
from ansible.template import Templar
@@ -38,9 +41,11 @@ class TestTemplar(unittest.TestCase):
pass
def test_templar_simple(self):
- fake_loader = DictDataLoader({})
+ fake_loader = DictDataLoader({
+ "/path/to/my_file.txt": "foo\n",
+ })
shared_loader = SharedPluginLoaderObj()
- templar = Templar(loader=fake_loader, variables=dict(foo="bar", bam="{{foo}}", num=1, var_true=True, var_false=False, var_dict=dict(a="b"), bad_dict="{a='b'", var_list=[1]))
+ templar = Templar(loader=fake_loader, variables=dict(foo="bar", bam="{{foo}}", num=1, var_true=True, var_false=False, var_dict=dict(a="b"), bad_dict="{a='b'", var_list=[1], recursive="{{recursive}}"))
# test some basic templating
self.assertEqual(templar.template("{{foo}}"), "bar")
@@ -54,6 +59,17 @@ class TestTemplar(unittest.TestCase):
self.assertEqual(templar.template("{{var_dict}}"), dict(a="b"))
self.assertEqual(templar.template("{{bad_dict}}"), "{a='b'")
self.assertEqual(templar.template("{{var_list}}"), [1])
+ self.assertEqual(templar.template(1, convert_bare=True), 1)
+ self.assertRaises(UndefinedError, templar.template, "{{bad_var}}")
+ self.assertEqual(templar.template("{{lookup('file', '/path/to/my_file.txt')}}"), "foo")
+ self.assertRaises(UndefinedError, templar.template, "{{lookup('file', bad_var)}}")
+ self.assertRaises(AnsibleError, templar.template, "{{lookup('bad_lookup')}}")
+ self.assertRaises(AnsibleError, templar.template, "{{recursive}}")
+ self.assertRaises(AnsibleUndefinedVariable, templar.template, "{{foo-bar}}")
+
+ # test with fail_on_undefined=False
+ templar = Templar(loader=fake_loader, fail_on_undefined=False)
+ self.assertEqual(templar.template("{{bad_var}}"), "{{bad_var}}")
# test set_available_variables()
templar.set_available_variables(variables=dict(foo="bam"))
From 398b1d3e60e05585e81c9a47d00ab1077391813d Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Sun, 17 May 2015 01:13:22 -0500
Subject: [PATCH 092/971] Cleaning up template test syntax a bit
---
test/units/template/test_templar.py | 13 ++++++++++++-
1 file changed, 12 insertions(+), 1 deletion(-)
diff --git a/test/units/template/test_templar.py b/test/units/template/test_templar.py
index eb634994fd..ce40c73b0d 100644
--- a/test/units/template/test_templar.py
+++ b/test/units/template/test_templar.py
@@ -45,7 +45,18 @@ class TestTemplar(unittest.TestCase):
"/path/to/my_file.txt": "foo\n",
})
shared_loader = SharedPluginLoaderObj()
- templar = Templar(loader=fake_loader, variables=dict(foo="bar", bam="{{foo}}", num=1, var_true=True, var_false=False, var_dict=dict(a="b"), bad_dict="{a='b'", var_list=[1], recursive="{{recursive}}"))
+ variables = dict(
+ foo="bar",
+ bam="{{foo}}",
+ num=1,
+ var_true=True,
+ var_false=False,
+ var_dict=dict(a="b"),
+ bad_dict="{a='b'",
+ var_list=[1],
+ recursive="{{recursive}}",
+ )
+ templar = Templar(loader=fake_loader, variables=variables)
# test some basic templating
self.assertEqual(templar.template("{{foo}}"), "bar")
From a960fcd569c0fde85b27f3c34093634b37fa2759 Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Sun, 17 May 2015 01:29:40 -0500
Subject: [PATCH 093/971] Adding module_utils tests from v1 to v2
---
test/units/module_utils/__init__.py | 21 ++
test/units/module_utils/test_basic.py | 355 +++++++++++++++++++++++
test/units/module_utils/test_database.py | 118 ++++++++
3 files changed, 494 insertions(+)
create mode 100644 test/units/module_utils/__init__.py
create mode 100644 test/units/module_utils/test_basic.py
create mode 100644 test/units/module_utils/test_database.py
diff --git a/test/units/module_utils/__init__.py b/test/units/module_utils/__init__.py
new file mode 100644
index 0000000000..785fc45992
--- /dev/null
+++ b/test/units/module_utils/__init__.py
@@ -0,0 +1,21 @@
+# (c) 2012-2014, Michael DeHaan
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see .
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
diff --git a/test/units/module_utils/test_basic.py b/test/units/module_utils/test_basic.py
new file mode 100644
index 0000000000..60f501ba28
--- /dev/null
+++ b/test/units/module_utils/test_basic.py
@@ -0,0 +1,355 @@
+# (c) 2012-2014, Michael DeHaan
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see .
+
+# Make coding more python3-ish
+#from __future__ import (absolute_import, division, print_function)
+from __future__ import (absolute_import, division)
+__metaclass__ = type
+
+import os
+import tempfile
+
+from ansible.compat.tests import unittest
+from ansible.compat.tests.mock import patch, MagicMock
+
+from ansible.errors import *
+from ansible.executor.module_common import modify_module
+from ansible.module_utils.basic import heuristic_log_sanitize
+from ansible.utils.hashing import checksum as utils_checksum
+
+TEST_MODULE_DATA = """
+from ansible.module_utils.basic import *
+
+def get_module():
+ return AnsibleModule(
+ argument_spec = dict(),
+ supports_check_mode = True,
+ no_log = True,
+ )
+
+get_module()
+
+"""
+
+class TestModuleUtilsBasic(unittest.TestCase):
+
+ def cleanup_temp_file(self, fd, path):
+ try:
+ os.close(fd)
+ os.remove(path)
+ except:
+ pass
+
+ def cleanup_temp_dir(self, path):
+ try:
+ os.rmdir(path)
+ except:
+ pass
+
+ def setUp(self):
+ # create a temporary file for the test module
+ # we're about to generate
+ self.tmp_fd, self.tmp_path = tempfile.mkstemp()
+ os.write(self.tmp_fd, TEST_MODULE_DATA)
+
+ # template the module code and eval it
+ module_data, module_style, shebang = modify_module(self.tmp_path, {})
+
+ d = {}
+ exec(module_data, d, d)
+ self.module = d['get_module']()
+
+ # module_utils/basic.py screws with CWD, let's save it and reset
+ self.cwd = os.getcwd()
+
+ def tearDown(self):
+ self.cleanup_temp_file(self.tmp_fd, self.tmp_path)
+ # Reset CWD back to what it was before basic.py changed it
+ os.chdir(self.cwd)
+
+ #################################################################################
+ # run_command() tests
+
+ # test run_command with a string command
+ def test_run_command_string(self):
+ (rc, out, err) = self.module.run_command("/bin/echo -n 'foo bar'")
+ self.assertEqual(rc, 0)
+ self.assertEqual(out, 'foo bar')
+ (rc, out, err) = self.module.run_command("/bin/echo -n 'foo bar'", use_unsafe_shell=True)
+ self.assertEqual(rc, 0)
+ self.assertEqual(out, 'foo bar')
+
+ # test run_command with an array of args (with both use_unsafe_shell=True|False)
+ def test_run_command_args(self):
+ (rc, out, err) = self.module.run_command(['/bin/echo', '-n', "foo bar"])
+ self.assertEqual(rc, 0)
+ self.assertEqual(out, 'foo bar')
+ (rc, out, err) = self.module.run_command(['/bin/echo', '-n', "foo bar"], use_unsafe_shell=True)
+ self.assertEqual(rc, 0)
+ self.assertEqual(out, 'foo bar')
+
+ # test run_command with leading environment variables
+ #@raises(SystemExit)
+ def test_run_command_string_with_env_variables(self):
+ self.assertRaises(SystemExit, self.module.run_command, 'FOO=bar /bin/echo -n "foo bar"')
+
+ #@raises(SystemExit)
+ def test_run_command_args_with_env_variables(self):
+ self.assertRaises(SystemExit, self.module.run_command, ['FOO=bar', '/bin/echo', '-n', 'foo bar'])
+
+ def test_run_command_string_unsafe_with_env_variables(self):
+ (rc, out, err) = self.module.run_command('FOO=bar /bin/echo -n "foo bar"', use_unsafe_shell=True)
+ self.assertEqual(rc, 0)
+ self.assertEqual(out, 'foo bar')
+
+ # test run_command with a command pipe (with both use_unsafe_shell=True|False)
+ def test_run_command_string_unsafe_with_pipe(self):
+ (rc, out, err) = self.module.run_command('echo "foo bar" | cat', use_unsafe_shell=True)
+ self.assertEqual(rc, 0)
+ self.assertEqual(out, 'foo bar\n')
+
+ # test run_command with a shell redirect in (with both use_unsafe_shell=True|False)
+ def test_run_command_string_unsafe_with_redirect_in(self):
+ (rc, out, err) = self.module.run_command('cat << EOF\nfoo bar\nEOF', use_unsafe_shell=True)
+ self.assertEqual(rc, 0)
+ self.assertEqual(out, 'foo bar\n')
+
+ # test run_command with a shell redirect out (with both use_unsafe_shell=True|False)
+ def test_run_command_string_unsafe_with_redirect_out(self):
+ tmp_fd, tmp_path = tempfile.mkstemp()
+ try:
+ (rc, out, err) = self.module.run_command('echo "foo bar" > %s' % tmp_path, use_unsafe_shell=True)
+ self.assertEqual(rc, 0)
+ self.assertTrue(os.path.exists(tmp_path))
+ checksum = utils_checksum(tmp_path)
+ self.assertEqual(checksum, 'd53a205a336e07cf9eac45471b3870f9489288ec')
+ except:
+ raise
+ finally:
+ self.cleanup_temp_file(tmp_fd, tmp_path)
+
+ # test run_command with a double shell redirect out (append) (with both use_unsafe_shell=True|False)
+ def test_run_command_string_unsafe_with_double_redirect_out(self):
+ tmp_fd, tmp_path = tempfile.mkstemp()
+ try:
+ (rc, out, err) = self.module.run_command('echo "foo bar" >> %s' % tmp_path, use_unsafe_shell=True)
+ self.assertEqual(rc, 0)
+ self.assertTrue(os.path.exists(tmp_path))
+ checksum = utils_checksum(tmp_path)
+ self.assertEqual(checksum, 'd53a205a336e07cf9eac45471b3870f9489288ec')
+ except:
+ raise
+ finally:
+ self.cleanup_temp_file(tmp_fd, tmp_path)
+
+ # test run_command with data
+ def test_run_command_string_with_data(self):
+ (rc, out, err) = self.module.run_command('cat', data='foo bar')
+ self.assertEqual(rc, 0)
+ self.assertEqual(out, 'foo bar\n')
+
+ # test run_command with binary data
+ def test_run_command_string_with_binary_data(self):
+ (rc, out, err) = self.module.run_command('cat', data='\x41\x42\x43\x44', binary_data=True)
+ self.assertEqual(rc, 0)
+ self.assertEqual(out, 'ABCD')
+
+ # test run_command with a cwd set
+ def test_run_command_string_with_cwd(self):
+ tmp_path = tempfile.mkdtemp()
+ try:
+ (rc, out, err) = self.module.run_command('pwd', cwd=tmp_path)
+ self.assertEqual(rc, 0)
+ self.assertTrue(os.path.exists(tmp_path))
+ self.assertEqual(out.strip(), os.path.realpath(tmp_path))
+ except:
+ raise
+ finally:
+ self.cleanup_temp_dir(tmp_path)
+
+
+class TestModuleUtilsBasicHelpers(unittest.TestCase):
+ ''' Test some implementation details of AnsibleModule
+
+ Some pieces of AnsibleModule are implementation details but they have
+ potential cornercases that we need to check. Go ahead and test at
+ this level that the functions are behaving even though their API may
+ change and we'd have to rewrite these tests so that we know that we
+ need to check for those problems in any rewrite.
+
+ In the future we might want to restructure higher level code to be
+ friendlier to unittests so that we can test at the level that the public
+ is interacting with the APIs.
+ '''
+
+ MANY_RECORDS = 7000
+ URL_SECRET = 'http://username:pas:word@foo.com/data'
+ SSH_SECRET = 'username:pas:word@foo.com/data'
+
+ def cleanup_temp_file(self, fd, path):
+ try:
+ os.close(fd)
+ os.remove(path)
+ except:
+ pass
+
+ def cleanup_temp_dir(self, path):
+ try:
+ os.rmdir(path)
+ except:
+ pass
+
+ def _gen_data(self, records, per_rec, top_level, secret_text):
+ hostvars = {'hostvars': {}}
+ for i in range(1, records, 1):
+ host_facts = {'host%s' % i:
+ {'pstack':
+ {'running': '875.1',
+ 'symlinked': '880.0',
+ 'tars': [],
+ 'versions': ['885.0']},
+ }}
+
+ if per_rec:
+ host_facts['host%s' % i]['secret'] = secret_text
+ hostvars['hostvars'].update(host_facts)
+ if top_level:
+ hostvars['secret'] = secret_text
+ return hostvars
+
+ def setUp(self):
+ self.many_url = repr(self._gen_data(self.MANY_RECORDS, True, True,
+ self.URL_SECRET))
+ self.many_ssh = repr(self._gen_data(self.MANY_RECORDS, True, True,
+ self.SSH_SECRET))
+ self.one_url = repr(self._gen_data(self.MANY_RECORDS, False, True,
+ self.URL_SECRET))
+ self.one_ssh = repr(self._gen_data(self.MANY_RECORDS, False, True,
+ self.SSH_SECRET))
+ self.zero_secrets = repr(self._gen_data(self.MANY_RECORDS, False,
+ False, ''))
+ self.few_url = repr(self._gen_data(2, True, True, self.URL_SECRET))
+ self.few_ssh = repr(self._gen_data(2, True, True, self.SSH_SECRET))
+
+ # create a temporary file for the test module
+ # we're about to generate
+ self.tmp_fd, self.tmp_path = tempfile.mkstemp()
+ os.write(self.tmp_fd, TEST_MODULE_DATA)
+
+ # template the module code and eval it
+ module_data, module_style, shebang = modify_module(self.tmp_path, {})
+
+ d = {}
+ exec(module_data, d, d)
+ self.module = d['get_module']()
+
+ # module_utils/basic.py screws with CWD, let's save it and reset
+ self.cwd = os.getcwd()
+
+ def tearDown(self):
+ self.cleanup_temp_file(self.tmp_fd, self.tmp_path)
+ # Reset CWD back to what it was before basic.py changed it
+ os.chdir(self.cwd)
+
+
+ #################################################################################
+
+ #
+ # Speed tests
+ #
+
+ # Previously, we used regexes which had some pathologically slow cases for
+ # parameters with large amounts of data with many ':' but no '@'. The
+ # present function gets slower when there are many replacements so we may
+ # want to explore regexes in the future (for the speed when substituting
+ # or flexibility). These speed tests will hopefully tell us if we're
+ # introducing code that has cases that are simply too slow.
+ #
+ # Some regex notes:
+ # * re.sub() is faster than re.match() + str.join().
+ # * We may be able to detect a large number of '@' symbols and then use
+ # a regex else use the present function.
+
+ #@timed(5)
+ #def test_log_sanitize_speed_many_url(self):
+ # heuristic_log_sanitize(self.many_url)
+
+ #@timed(5)
+ #def test_log_sanitize_speed_many_ssh(self):
+ # heuristic_log_sanitize(self.many_ssh)
+
+ #@timed(5)
+ #def test_log_sanitize_speed_one_url(self):
+ # heuristic_log_sanitize(self.one_url)
+
+ #@timed(5)
+ #def test_log_sanitize_speed_one_ssh(self):
+ # heuristic_log_sanitize(self.one_ssh)
+
+ #@timed(5)
+ #def test_log_sanitize_speed_zero_secrets(self):
+ # heuristic_log_sanitize(self.zero_secrets)
+
+ #
+ # Test that the password obfuscation sanitizes somewhat cleanly.
+ #
+
+ def test_log_sanitize_correctness(self):
+ url_data = repr(self._gen_data(3, True, True, self.URL_SECRET))
+ ssh_data = repr(self._gen_data(3, True, True, self.SSH_SECRET))
+
+ url_output = heuristic_log_sanitize(url_data)
+ ssh_output = heuristic_log_sanitize(ssh_data)
+
+ # Basic functionality: Successfully hid the password
+ try:
+ self.assertNotIn('pas:word', url_output)
+ self.assertNotIn('pas:word', ssh_output)
+
+ # Slightly more advanced, we hid all of the password despite the ":"
+ self.assertNotIn('pas', url_output)
+ self.assertNotIn('pas', ssh_output)
+ except AttributeError:
+ # python2.6 or less's unittest
+ self.assertFalse('pas:word' in url_output, '%s is present in %s' % ('"pas:word"', url_output))
+ self.assertFalse('pas:word' in ssh_output, '%s is present in %s' % ('"pas:word"', ssh_output))
+
+ self.assertFalse('pas' in url_output, '%s is present in %s' % ('"pas"', url_output))
+ self.assertFalse('pas' in ssh_output, '%s is present in %s' % ('"pas"', ssh_output))
+
+ # In this implementation we replace the password with 8 "*" which is
+ # also the length of our password. The url fields should be able to
+ # accurately detect where the password ends so the length should be
+ # the same:
+ self.assertEqual(len(url_output), len(url_data))
+
+ # ssh checking is harder as the heuristic is overzealous in many
+ # cases. Since the input will have at least one ":" present before
+ # the password we can tell some things about the beginning and end of
+ # the data, though:
+ self.assertTrue(ssh_output.startswith("{'"))
+ self.assertTrue(ssh_output.endswith("}"))
+ try:
+ self.assertIn(":********@foo.com/data'", ssh_output)
+ except AttributeError:
+ # python2.6 or less's unittest
+ self.assertTrue(":********@foo.com/data'" in ssh_output, '%s is not present in %s' % (":********@foo.com/data'", ssh_output))
+
+ # The overzealous-ness here may lead to us changing the algorithm in
+ # the future. We could make it consume less of the data (with the
+ # possibility of leaving partial passwords exposed) and encourage
+ # people to use no_log instead of relying on this obfuscation.
diff --git a/test/units/module_utils/test_database.py b/test/units/module_utils/test_database.py
new file mode 100644
index 0000000000..67da0b60e0
--- /dev/null
+++ b/test/units/module_utils/test_database.py
@@ -0,0 +1,118 @@
+import collections
+import mock
+import os
+import re
+
+from nose.tools import eq_
+try:
+ from nose.tools import assert_raises_regexp
+except ImportError:
+ # Python < 2.7
+ def assert_raises_regexp(expected, regexp, callable, *a, **kw):
+ try:
+ callable(*a, **kw)
+ except expected as e:
+ if isinstance(regexp, basestring):
+ regexp = re.compile(regexp)
+ if not regexp.search(str(e)):
+ raise Exception('"%s" does not match "%s"' %
+ (regexp.pattern, str(e)))
+ else:
+ if hasattr(expected,'__name__'): excName = expected.__name__
+ else: excName = str(expected)
+ raise AssertionError("%s not raised" % excName)
+
+from ansible.module_utils.database import (
+ pg_quote_identifier,
+ SQLParseError,
+)
+
+
+# Note: Using nose's generator test cases here so we can't inherit from
+# unittest.TestCase
+class TestQuotePgIdentifier(object):
+
+ # These are all valid strings
+ # The results are based on interpreting the identifier as a table name
+ valid = {
+ # User quoted
+ '"public.table"': '"public.table"',
+ '"public"."table"': '"public"."table"',
+ '"schema test"."table test"': '"schema test"."table test"',
+
+ # We quote part
+ 'public.table': '"public"."table"',
+ '"public".table': '"public"."table"',
+ 'public."table"': '"public"."table"',
+ 'schema test.table test': '"schema test"."table test"',
+ '"schema test".table test': '"schema test"."table test"',
+ 'schema test."table test"': '"schema test"."table test"',
+
+ # Embedded double quotes
+ 'table "test"': '"table ""test"""',
+ 'public."table ""test"""': '"public"."table ""test"""',
+ 'public.table "test"': '"public"."table ""test"""',
+ 'schema "test".table': '"schema ""test"""."table"',
+ '"schema ""test""".table': '"schema ""test"""."table"',
+ '"""wat"""."""test"""': '"""wat"""."""test"""',
+ # Sigh, handle these as well:
+ '"no end quote': '"""no end quote"',
+ 'schema."table': '"schema"."""table"',
+ '"schema.table': '"""schema"."table"',
+ 'schema."table.something': '"schema"."""table"."something"',
+
+ # Embedded dots
+ '"schema.test"."table.test"': '"schema.test"."table.test"',
+ '"schema.".table': '"schema."."table"',
+ '"schema."."table"': '"schema."."table"',
+ 'schema.".table"': '"schema".".table"',
+ '"schema".".table"': '"schema".".table"',
+ '"schema.".".table"': '"schema.".".table"',
+ # These are valid but maybe not what the user intended
+ '."table"': '".""table"""',
+ 'table.': '"table."',
+ }
+
+ invalid = {
+ ('test.too.many.dots', 'table'): 'PostgreSQL does not support table with more than 3 dots',
+ ('"test.too".many.dots', 'database'): 'PostgreSQL does not support database with more than 1 dots',
+ ('test.too."many.dots"', 'database'): 'PostgreSQL does not support database with more than 1 dots',
+ ('"test"."too"."many"."dots"', 'database'): "PostgreSQL does not support database with more than 1 dots",
+ ('"test"."too"."many"."dots"', 'schema'): "PostgreSQL does not support schema with more than 2 dots",
+ ('"test"."too"."many"."dots"', 'table'): "PostgreSQL does not support table with more than 3 dots",
+ ('"test"."too"."many"."dots"."for"."column"', 'column'): "PostgreSQL does not support column with more than 4 dots",
+ ('"table "invalid" double quote"', 'table'): 'User escaped identifiers must escape extra quotes',
+ ('"schema "invalid"""."table "invalid"', 'table'): 'User escaped identifiers must escape extra quotes',
+ ('"schema."table"','table'): 'User escaped identifiers must escape extra quotes',
+ ('"schema".', 'table'): 'Identifier name unspecified or unquoted trailing dot',
+ }
+
+ def check_valid_quotes(self, identifier, quoted_identifier):
+ eq_(pg_quote_identifier(identifier, 'table'), quoted_identifier)
+
+ def test_valid_quotes(self):
+ for identifier in self.valid:
+ yield self.check_valid_quotes, identifier, self.valid[identifier]
+
+ def check_invalid_quotes(self, identifier, id_type, msg):
+ assert_raises_regexp(SQLParseError, msg, pg_quote_identifier, *(identifier, id_type))
+
+ def test_invalid_quotes(self):
+ for test in self.invalid:
+ yield self.check_invalid_quotes, test[0], test[1], self.invalid[test]
+
+ def test_how_many_dots(self):
+ eq_(pg_quote_identifier('role', 'role'), '"role"')
+ assert_raises_regexp(SQLParseError, "PostgreSQL does not support role with more than 1 dots", pg_quote_identifier, *('role.more', 'role'))
+
+ eq_(pg_quote_identifier('db', 'database'), '"db"')
+ assert_raises_regexp(SQLParseError, "PostgreSQL does not support database with more than 1 dots", pg_quote_identifier, *('db.more', 'database'))
+
+ eq_(pg_quote_identifier('db.schema', 'schema'), '"db"."schema"')
+ assert_raises_regexp(SQLParseError, "PostgreSQL does not support schema with more than 2 dots", pg_quote_identifier, *('db.schema.more', 'schema'))
+
+ eq_(pg_quote_identifier('db.schema.table', 'table'), '"db"."schema"."table"')
+ assert_raises_regexp(SQLParseError, "PostgreSQL does not support table with more than 3 dots", pg_quote_identifier, *('db.schema.table.more', 'table'))
+
+ eq_(pg_quote_identifier('db.schema.table.column', 'column'), '"db"."schema"."table"."column"')
+ assert_raises_regexp(SQLParseError, "PostgreSQL does not support column with more than 4 dots", pg_quote_identifier, *('db.schema.table.column.more', 'column'))
From 8f71e47a73ad2be41a27e9a0a55a480e67389bd4 Mon Sep 17 00:00:00 2001
From: Toshio Kuratomi
Date: Sun, 17 May 2015 09:23:39 -0700
Subject: [PATCH 094/971] Update core and extras module refs
---
lib/ansible/modules/core | 2 +-
lib/ansible/modules/extras | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/lib/ansible/modules/core b/lib/ansible/modules/core
index b92ed6e9da..71f16f5d41 160000
--- a/lib/ansible/modules/core
+++ b/lib/ansible/modules/core
@@ -1 +1 @@
-Subproject commit b92ed6e9da7784743976ade2affef63c8ddfedaf
+Subproject commit 71f16f5d418149057c85b34a2916d7421c7cc67c
diff --git a/lib/ansible/modules/extras b/lib/ansible/modules/extras
index 8c8a0e1b8d..d590de8c4e 160000
--- a/lib/ansible/modules/extras
+++ b/lib/ansible/modules/extras
@@ -1 +1 @@
-Subproject commit 8c8a0e1b8dc4b51721b313fcabb9bb5bd8a6d26f
+Subproject commit d590de8c4ef976d571264d6050b0abc59a82bde2
From 684e30a5f4cd6e56a1531dd6652b33b1ed78e4bd Mon Sep 17 00:00:00 2001
From: Toshio Kuratomi
Date: Mon, 18 May 2015 09:00:16 -0700
Subject: [PATCH 095/971] Update submodule refs
---
lib/ansible/modules/core | 2 +-
lib/ansible/modules/extras | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/lib/ansible/modules/core b/lib/ansible/modules/core
index 71f16f5d41..3dd0f2c40f 160000
--- a/lib/ansible/modules/core
+++ b/lib/ansible/modules/core
@@ -1 +1 @@
-Subproject commit 71f16f5d418149057c85b34a2916d7421c7cc67c
+Subproject commit 3dd0f2c40f9dbc2311021e072a06671cd3da681a
diff --git a/lib/ansible/modules/extras b/lib/ansible/modules/extras
index d590de8c4e..20bf6d825e 160000
--- a/lib/ansible/modules/extras
+++ b/lib/ansible/modules/extras
@@ -1 +1 @@
-Subproject commit d590de8c4ef976d571264d6050b0abc59a82bde2
+Subproject commit 20bf6d825e807a590585f944c405d83c53704f43
From 2e07567c16bdd339f2305ee67e23ede60ba9a3ce Mon Sep 17 00:00:00 2001
From: Hugh Saunders
Date: Fri, 27 Mar 2015 18:24:33 +0000
Subject: [PATCH 096/971] Retry exec command via ssh_retry
This PR adds the option to retry failed ssh executions, if the failure
is caused by ssh itself, not the remote command. This can be helpful if
there are transient network issues. Retries are only implemented in the
openssh connection plugin and are disabled by default. Retries are
enabled by setting ssh_connection > retries to an integer greater
than 0.
Running a long series of playbooks, or a short playbook against a large
cluster may result in transient ssh failures, some examples logged
[here](https://trello.com/c/1yh6csEQ/13-ssh-errors).
Ansible should be able to retry an ssh connection in order to survive
transient failures.
Ansible marks a host as failed the first time it fails to contact it.
---
lib/ansible/constants.py | 2 +
v1/ansible/runner/connection_plugins/ssh.py | 67 +++++++++++++++++----
2 files changed, 58 insertions(+), 11 deletions(-)
diff --git a/lib/ansible/constants.py b/lib/ansible/constants.py
index d24dc311a7..9c1c820421 100644
--- a/lib/ansible/constants.py
+++ b/lib/ansible/constants.py
@@ -195,7 +195,9 @@ RETRY_FILES_SAVE_PATH = get_config(p, DEFAULTS, 'retry_files_save_path'
ANSIBLE_SSH_ARGS = get_config(p, 'ssh_connection', 'ssh_args', 'ANSIBLE_SSH_ARGS', None)
ANSIBLE_SSH_CONTROL_PATH = get_config(p, 'ssh_connection', 'control_path', 'ANSIBLE_SSH_CONTROL_PATH', "%(directory)s/ansible-ssh-%%h-%%p-%%r")
ANSIBLE_SSH_PIPELINING = get_config(p, 'ssh_connection', 'pipelining', 'ANSIBLE_SSH_PIPELINING', False, boolean=True)
+ANSIBLE_SSH_RETRIES = get_config(p, 'ssh_connection', 'retries', 'ANSIBLE_SSH_RETRIES', 0, integer=True)
PARAMIKO_RECORD_HOST_KEYS = get_config(p, 'paramiko_connection', 'record_host_keys', 'ANSIBLE_PARAMIKO_RECORD_HOST_KEYS', True, boolean=True)
+
# obsolete -- will be formally removed
ZEROMQ_PORT = get_config(p, 'fireball_connection', 'zeromq_port', 'ANSIBLE_ZEROMQ_PORT', 5099, integer=True)
ACCELERATE_PORT = get_config(p, 'accelerate', 'accelerate_port', 'ACCELERATE_PORT', 5099, integer=True)
diff --git a/v1/ansible/runner/connection_plugins/ssh.py b/v1/ansible/runner/connection_plugins/ssh.py
index 036175f6a9..ff7e8e03c8 100644
--- a/v1/ansible/runner/connection_plugins/ssh.py
+++ b/v1/ansible/runner/connection_plugins/ssh.py
@@ -16,21 +16,22 @@
# along with Ansible. If not, see .
#
-import os
-import re
-import subprocess
-import shlex
-import pipes
-import random
-import select
import fcntl
-import hmac
-import pwd
import gettext
+import hmac
+import os
+import pipes
import pty
+import pwd
+import random
+import re
+import select
+import shlex
+import subprocess
+import time
from hashlib import sha1
import ansible.constants as C
-from ansible.callbacks import vvv
+from ansible.callbacks import vvv, vv
from ansible import errors
from ansible import utils
@@ -256,7 +257,51 @@ class Connection(object):
vvv("EXEC previous known host file not found for %s" % host)
return True
- def exec_command(self, cmd, tmp_path, become_user=None, sudoable=False, executable='/bin/sh', in_data=None):
+ def exec_command(self, *args, **kwargs):
+ """ Wrapper around _exec_command to retry in the case of an ssh
+ failure
+
+ Will retry if:
+ * an exception is caught
+ * ssh returns 255
+
+ Will not retry if
+ * remaining_tries is <2
+ * retries limit reached
+ """
+ remaining_tries = C.get_config(
+ C.p, 'ssh_connection', 'retries',
+ 'ANSIBLE_SSH_RETRIES', 3, integer=True) + 1
+ cmd_summary = "%s %s..." % (args[0], str(kwargs)[:200])
+ for attempt in xrange(remaining_tries):
+ pause = 2 ** attempt - 1
+ if pause > 30:
+ pause = 30
+ time.sleep(pause)
+ try:
+ return_tuple = self._exec_command(*args, **kwargs)
+ except Exception as e:
+ msg = ("ssh_retry: attempt: %d, caught exception(%s) from cmd "
+ "(%s).") % (attempt, e, cmd_summary)
+ vv(msg)
+ if attempt == remaining_tries - 1:
+ raise e
+ else:
+ continue
+ # 0 = success
+ # 1-254 = remote command return code
+ # 255 = failure from the ssh command itself
+ if return_tuple[0] != 255:
+ break
+ else:
+ msg = ('ssh_retry: attempt: %d, ssh return code is 255. cmd '
+ '(%s).') % (attempt, cmd_summary)
+ vv(msg)
+
+ return return_tuple
+
+
+ def _exec_command(self, cmd, tmp_path, become_user=None, sudoable=False, executable='/bin/sh', in_data=None):
''' run a command on the remote host '''
if sudoable and self.runner.become and self.runner.become_method not in self.become_methods_supported:
From 21fa385ce72d337434e462e33b4b9dcaecceda52 Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Mon, 18 May 2015 17:26:59 -0700
Subject: [PATCH 097/971] Reorganizing plugin unit tests and adding start of
strategy tests (v2)
---
lib/ansible/plugins/strategies/__init__.py | 7 +-
test/units/plugins/action/__init__.py | 21 +++
test/units/plugins/cache/__init__.py | 21 +++
test/units/plugins/{ => cache}/test_cache.py | 0
test/units/plugins/callback/__init__.py | 21 +++
test/units/plugins/connections/__init__.py | 21 +++
.../{ => connections}/test_connection.py | 0
test/units/plugins/filter/__init__.py | 21 +++
test/units/plugins/inventory/__init__.py | 21 +++
test/units/plugins/lookup/__init__.py | 21 +++
test/units/plugins/shell/__init__.py | 21 +++
test/units/plugins/strategies/__init__.py | 21 +++
.../plugins/strategies/test_strategy_base.py | 127 ++++++++++++++++++
test/units/plugins/vars/__init__.py | 21 +++
14 files changed, 339 insertions(+), 5 deletions(-)
create mode 100644 test/units/plugins/action/__init__.py
create mode 100644 test/units/plugins/cache/__init__.py
rename test/units/plugins/{ => cache}/test_cache.py (100%)
create mode 100644 test/units/plugins/callback/__init__.py
create mode 100644 test/units/plugins/connections/__init__.py
rename test/units/plugins/{ => connections}/test_connection.py (100%)
create mode 100644 test/units/plugins/filter/__init__.py
create mode 100644 test/units/plugins/inventory/__init__.py
create mode 100644 test/units/plugins/lookup/__init__.py
create mode 100644 test/units/plugins/shell/__init__.py
create mode 100644 test/units/plugins/strategies/__init__.py
create mode 100644 test/units/plugins/strategies/test_strategy_base.py
create mode 100644 test/units/plugins/vars/__init__.py
diff --git a/lib/ansible/plugins/strategies/__init__.py b/lib/ansible/plugins/strategies/__init__.py
index a3668ba089..7cc1709e08 100644
--- a/lib/ansible/plugins/strategies/__init__.py
+++ b/lib/ansible/plugins/strategies/__init__.py
@@ -61,7 +61,6 @@ class StrategyBase:
self._inventory = tqm.get_inventory()
self._workers = tqm.get_workers()
self._notified_handlers = tqm.get_notified_handlers()
- #self._callback = tqm.get_callback()
self._variable_manager = tqm.get_variable_manager()
self._loader = tqm.get_loader()
self._final_q = tqm._final_q
@@ -80,8 +79,6 @@ class StrategyBase:
num_failed = len(self._tqm._failed_hosts)
num_unreachable = len(self._tqm._unreachable_hosts)
- #debug("running the cleanup portion of the play")
- #result &= self.cleanup(iterator, connection_info)
debug("running handlers")
result &= self.run_handlers(iterator, connection_info)
@@ -99,6 +96,7 @@ class StrategyBase:
return 0
def get_hosts_remaining(self, play):
+ print("inventory get hosts: %s" % self._inventory.get_hosts(play.hosts))
return [host for host in self._inventory.get_hosts(play.hosts) if host.name not in self._tqm._failed_hosts and host.name not in self._tqm._unreachable_hosts]
def get_failed_hosts(self, play):
@@ -119,13 +117,12 @@ class StrategyBase:
if self._cur_worker >= len(self._workers):
self._cur_worker = 0
- self._pending_results += 1
-
# create a dummy object with plugin loaders set as an easier
# way to share them with the forked processes
shared_loader_obj = SharedPluginLoaderObj()
main_q.put((host, task, self._loader.get_basedir(), task_vars, connection_info, shared_loader_obj), block=False)
+ self._pending_results += 1
except (EOFError, IOError, AssertionError) as e:
# most likely an abort
debug("got an error while queuing: %s" % e)
diff --git a/test/units/plugins/action/__init__.py b/test/units/plugins/action/__init__.py
new file mode 100644
index 0000000000..785fc45992
--- /dev/null
+++ b/test/units/plugins/action/__init__.py
@@ -0,0 +1,21 @@
+# (c) 2012-2014, Michael DeHaan
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see .
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
diff --git a/test/units/plugins/cache/__init__.py b/test/units/plugins/cache/__init__.py
new file mode 100644
index 0000000000..785fc45992
--- /dev/null
+++ b/test/units/plugins/cache/__init__.py
@@ -0,0 +1,21 @@
+# (c) 2012-2014, Michael DeHaan
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see .
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
diff --git a/test/units/plugins/test_cache.py b/test/units/plugins/cache/test_cache.py
similarity index 100%
rename from test/units/plugins/test_cache.py
rename to test/units/plugins/cache/test_cache.py
diff --git a/test/units/plugins/callback/__init__.py b/test/units/plugins/callback/__init__.py
new file mode 100644
index 0000000000..785fc45992
--- /dev/null
+++ b/test/units/plugins/callback/__init__.py
@@ -0,0 +1,21 @@
+# (c) 2012-2014, Michael DeHaan
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see .
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
diff --git a/test/units/plugins/connections/__init__.py b/test/units/plugins/connections/__init__.py
new file mode 100644
index 0000000000..785fc45992
--- /dev/null
+++ b/test/units/plugins/connections/__init__.py
@@ -0,0 +1,21 @@
+# (c) 2012-2014, Michael DeHaan
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see .
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
diff --git a/test/units/plugins/test_connection.py b/test/units/plugins/connections/test_connection.py
similarity index 100%
rename from test/units/plugins/test_connection.py
rename to test/units/plugins/connections/test_connection.py
diff --git a/test/units/plugins/filter/__init__.py b/test/units/plugins/filter/__init__.py
new file mode 100644
index 0000000000..785fc45992
--- /dev/null
+++ b/test/units/plugins/filter/__init__.py
@@ -0,0 +1,21 @@
+# (c) 2012-2014, Michael DeHaan
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see .
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
diff --git a/test/units/plugins/inventory/__init__.py b/test/units/plugins/inventory/__init__.py
new file mode 100644
index 0000000000..785fc45992
--- /dev/null
+++ b/test/units/plugins/inventory/__init__.py
@@ -0,0 +1,21 @@
+# (c) 2012-2014, Michael DeHaan
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see .
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
diff --git a/test/units/plugins/lookup/__init__.py b/test/units/plugins/lookup/__init__.py
new file mode 100644
index 0000000000..785fc45992
--- /dev/null
+++ b/test/units/plugins/lookup/__init__.py
@@ -0,0 +1,21 @@
+# (c) 2012-2014, Michael DeHaan
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see .
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
diff --git a/test/units/plugins/shell/__init__.py b/test/units/plugins/shell/__init__.py
new file mode 100644
index 0000000000..785fc45992
--- /dev/null
+++ b/test/units/plugins/shell/__init__.py
@@ -0,0 +1,21 @@
+# (c) 2012-2014, Michael DeHaan
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see .
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
diff --git a/test/units/plugins/strategies/__init__.py b/test/units/plugins/strategies/__init__.py
new file mode 100644
index 0000000000..785fc45992
--- /dev/null
+++ b/test/units/plugins/strategies/__init__.py
@@ -0,0 +1,21 @@
+# (c) 2012-2014, Michael DeHaan
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see .
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
diff --git a/test/units/plugins/strategies/test_strategy_base.py b/test/units/plugins/strategies/test_strategy_base.py
new file mode 100644
index 0000000000..36e22a9719
--- /dev/null
+++ b/test/units/plugins/strategies/test_strategy_base.py
@@ -0,0 +1,127 @@
+# (c) 2012-2014, Michael DeHaan
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see .
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+from ansible.compat.tests import unittest
+from ansible.compat.tests.mock import patch, MagicMock
+
+from ansible.plugins.strategies import StrategyBase
+from ansible.executor.task_queue_manager import TaskQueueManager
+
+from units.mock.loader import DictDataLoader
+
+class TestVariableManager(unittest.TestCase):
+
+ def setUp(self):
+ pass
+
+ def tearDown(self):
+ pass
+
+ def test_strategy_base_init(self):
+ mock_tqm = MagicMock(TaskQueueManager)
+ mock_tqm._final_q = MagicMock()
+ strategy_base = StrategyBase(tqm=mock_tqm)
+
+ def test_strategy_base_run(self):
+ mock_tqm = MagicMock(TaskQueueManager)
+ mock_tqm._final_q = MagicMock()
+ mock_tqm._stats = MagicMock()
+ mock_tqm.send_callback.return_value = None
+
+ mock_iterator = MagicMock()
+ mock_iterator._play = MagicMock()
+ mock_iterator._play.handlers = []
+
+ mock_conn_info = MagicMock()
+
+ mock_tqm._failed_hosts = []
+ mock_tqm._unreachable_hosts = []
+ strategy_base = StrategyBase(tqm=mock_tqm)
+
+ self.assertEqual(strategy_base.run(iterator=mock_iterator, connection_info=mock_conn_info), 0)
+ self.assertEqual(strategy_base.run(iterator=mock_iterator, connection_info=mock_conn_info, result=False), 1)
+ mock_tqm._failed_hosts = ["host1"]
+ self.assertEqual(strategy_base.run(iterator=mock_iterator, connection_info=mock_conn_info, result=False), 2)
+ mock_tqm._unreachable_hosts = ["host1"]
+ self.assertEqual(strategy_base.run(iterator=mock_iterator, connection_info=mock_conn_info, result=False), 3)
+
+ def test_strategy_base_get_hosts(self):
+ mock_hosts = []
+ for i in range(0, 5):
+ mock_host = MagicMock()
+ mock_host.name = "host%02d" % (i+1)
+ mock_hosts.append(mock_host)
+
+ mock_inventory = MagicMock()
+ mock_inventory.get_hosts.return_value = mock_hosts
+
+ mock_tqm = MagicMock()
+ mock_tqm._final_q = MagicMock()
+ mock_tqm.get_inventory.return_value = mock_inventory
+
+ mock_play = MagicMock()
+ mock_play.hosts = ["host%02d" % (i+1) for i in range(0, 5)]
+
+ strategy_base = StrategyBase(tqm=mock_tqm)
+
+ mock_tqm._failed_hosts = []
+ mock_tqm._unreachable_hosts = []
+ self.assertEqual(strategy_base.get_hosts_remaining(play=mock_play), mock_hosts)
+
+ mock_tqm._failed_hosts = ["host01"]
+ self.assertEqual(strategy_base.get_hosts_remaining(play=mock_play), mock_hosts[1:])
+ self.assertEqual(strategy_base.get_failed_hosts(play=mock_play), [mock_hosts[0]])
+
+ mock_tqm._unreachable_hosts = ["host02"]
+ self.assertEqual(strategy_base.get_hosts_remaining(play=mock_play), mock_hosts[2:])
+
+ def test_strategy_base_queue_task(self):
+ fake_loader = DictDataLoader()
+
+ workers = []
+ for i in range(0, 3):
+ worker_main_q = MagicMock()
+ worker_main_q.put.return_value = None
+ worker_result_q = MagicMock()
+ workers.append([i, worker_main_q, worker_result_q])
+
+ mock_tqm = MagicMock()
+ mock_tqm._final_q = MagicMock()
+ mock_tqm.get_workers.return_value = workers
+ mock_tqm.get_loader.return_value = fake_loader
+
+ strategy_base = StrategyBase(tqm=mock_tqm)
+ strategy_base._cur_worker = 0
+ strategy_base._pending_results = 0
+ strategy_base._queue_task(host=MagicMock(), task=MagicMock(), task_vars=dict(), connection_info=MagicMock())
+ self.assertEqual(strategy_base._cur_worker, 1)
+ self.assertEqual(strategy_base._pending_results, 1)
+ strategy_base._queue_task(host=MagicMock(), task=MagicMock(), task_vars=dict(), connection_info=MagicMock())
+ self.assertEqual(strategy_base._cur_worker, 2)
+ self.assertEqual(strategy_base._pending_results, 2)
+ strategy_base._queue_task(host=MagicMock(), task=MagicMock(), task_vars=dict(), connection_info=MagicMock())
+ self.assertEqual(strategy_base._cur_worker, 0)
+ self.assertEqual(strategy_base._pending_results, 3)
+ workers[0][1].put.side_effect = EOFError
+ strategy_base._queue_task(host=MagicMock(), task=MagicMock(), task_vars=dict(), connection_info=MagicMock())
+ self.assertEqual(strategy_base._cur_worker, 1)
+ self.assertEqual(strategy_base._pending_results, 3)
+
diff --git a/test/units/plugins/vars/__init__.py b/test/units/plugins/vars/__init__.py
new file mode 100644
index 0000000000..785fc45992
--- /dev/null
+++ b/test/units/plugins/vars/__init__.py
@@ -0,0 +1,21 @@
+# (c) 2012-2014, Michael DeHaan
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see .
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
From 9a88e0fc8e0ba40cf60cb6d1e021e2080863df19 Mon Sep 17 00:00:00 2001
From: Brian Coca
Date: Tue, 19 May 2015 10:45:48 -0400
Subject: [PATCH 098/971] removed empty choices from files
---
lib/ansible/utils/module_docs_fragments/files.py | 7 -------
1 file changed, 7 deletions(-)
diff --git a/lib/ansible/utils/module_docs_fragments/files.py b/lib/ansible/utils/module_docs_fragments/files.py
index adff1f2f1b..5087c0cf50 100644
--- a/lib/ansible/utils/module_docs_fragments/files.py
+++ b/lib/ansible/utils/module_docs_fragments/files.py
@@ -24,25 +24,21 @@ options:
mode:
required: false
default: null
- choices: []
description:
- mode the file or directory should be, such as 0644 as would be fed to I(chmod). As of version 1.8, the mode may be specified as a symbolic mode (for example, C(u+rwx) or C(u=rw,g=r,o=r)).
owner:
required: false
default: null
- choices: []
description:
- name of the user that should own the file/directory, as would be fed to I(chown)
group:
required: false
default: null
- choices: []
description:
- name of the group that should own the file/directory, as would be fed to I(chown)
seuser:
required: false
default: null
- choices: []
description:
- user part of SELinux file context. Will default to system policy, if
applicable. If set to C(_default), it will use the C(user) portion of the
@@ -50,19 +46,16 @@ options:
serole:
required: false
default: null
- choices: []
description:
- role part of SELinux file context, C(_default) feature works as for I(seuser).
setype:
required: false
default: null
- choices: []
description:
- type part of SELinux file context, C(_default) feature works as for I(seuser).
selevel:
required: false
default: "s0"
- choices: []
description:
- level part of the SELinux file context. This is the MLS/MCS attribute,
sometimes known as the C(range). C(_default) feature works as for
From 8da580a29c0722e6c939677e155e9780a3fac821 Mon Sep 17 00:00:00 2001
From: Rene Moser
Date: Tue, 19 May 2015 17:34:39 +0200
Subject: [PATCH 099/971] basic: fix ValueError if value of a type='int' is not
an int
With this fix, we get a friendly error message:
failed: [localhost] => {"failed": true}
msg: value of argument start_port is not of type int and we were unable to automatically convert
---
lib/ansible/module_utils/basic.py | 101 +++++++++++++++---------------
1 file changed, 52 insertions(+), 49 deletions(-)
diff --git a/lib/ansible/module_utils/basic.py b/lib/ansible/module_utils/basic.py
index 1f0abb1776..237cb5b106 100644
--- a/lib/ansible/module_utils/basic.py
+++ b/lib/ansible/module_utils/basic.py
@@ -1016,57 +1016,60 @@ class AnsibleModule(object):
value = self.params[k]
is_invalid = False
- if wanted == 'str':
- if not isinstance(value, basestring):
- self.params[k] = str(value)
- elif wanted == 'list':
- if not isinstance(value, list):
- if isinstance(value, basestring):
- self.params[k] = value.split(",")
- elif isinstance(value, int) or isinstance(value, float):
- self.params[k] = [ str(value) ]
- else:
- is_invalid = True
- elif wanted == 'dict':
- if not isinstance(value, dict):
- if isinstance(value, basestring):
- if value.startswith("{"):
- try:
- self.params[k] = json.loads(value)
- except:
- (result, exc) = self.safe_eval(value, dict(), include_exceptions=True)
- if exc is not None:
- self.fail_json(msg="unable to evaluate dictionary for %s" % k)
- self.params[k] = result
- elif '=' in value:
- self.params[k] = dict([x.strip().split("=", 1) for x in value.split(",")])
+ try:
+ if wanted == 'str':
+ if not isinstance(value, basestring):
+ self.params[k] = str(value)
+ elif wanted == 'list':
+ if not isinstance(value, list):
+ if isinstance(value, basestring):
+ self.params[k] = value.split(",")
+ elif isinstance(value, int) or isinstance(value, float):
+ self.params[k] = [ str(value) ]
else:
- self.fail_json(msg="dictionary requested, could not parse JSON or key=value")
- else:
- is_invalid = True
- elif wanted == 'bool':
- if not isinstance(value, bool):
- if isinstance(value, basestring):
- self.params[k] = self.boolean(value)
- else:
- is_invalid = True
- elif wanted == 'int':
- if not isinstance(value, int):
- if isinstance(value, basestring):
- self.params[k] = int(value)
- else:
- is_invalid = True
- elif wanted == 'float':
- if not isinstance(value, float):
- if isinstance(value, basestring):
- self.params[k] = float(value)
- else:
- is_invalid = True
- else:
- self.fail_json(msg="implementation error: unknown type %s requested for %s" % (wanted, k))
+ is_invalid = True
+ elif wanted == 'dict':
+ if not isinstance(value, dict):
+ if isinstance(value, basestring):
+ if value.startswith("{"):
+ try:
+ self.params[k] = json.loads(value)
+ except:
+ (result, exc) = self.safe_eval(value, dict(), include_exceptions=True)
+ if exc is not None:
+ self.fail_json(msg="unable to evaluate dictionary for %s" % k)
+ self.params[k] = result
+ elif '=' in value:
+ self.params[k] = dict([x.strip().split("=", 1) for x in value.split(",")])
+ else:
+ self.fail_json(msg="dictionary requested, could not parse JSON or key=value")
+ else:
+ is_invalid = True
+ elif wanted == 'bool':
+ if not isinstance(value, bool):
+ if isinstance(value, basestring):
+ self.params[k] = self.boolean(value)
+ else:
+ is_invalid = True
+ elif wanted == 'int':
+ if not isinstance(value, int):
+ if isinstance(value, basestring):
+ self.params[k] = int(value)
+ else:
+ is_invalid = True
+ elif wanted == 'float':
+ if not isinstance(value, float):
+ if isinstance(value, basestring):
+ self.params[k] = float(value)
+ else:
+ is_invalid = True
+ else:
+ self.fail_json(msg="implementation error: unknown type %s requested for %s" % (wanted, k))
- if is_invalid:
- self.fail_json(msg="argument %s is of invalid type: %s, required: %s" % (k, type(value), wanted))
+ if is_invalid:
+ self.fail_json(msg="argument %s is of invalid type: %s, required: %s" % (k, type(value), wanted))
+ except ValueError, e:
+ self.fail_json(msg="value of argument %s is not of type %s and we were unable to automatically convert" % (k, wanted))
def _set_defaults(self, pre=True):
for (k,v) in self.argument_spec.iteritems():
From b48be7c484a723fdd73f08e6bb5d725b24eeea02 Mon Sep 17 00:00:00 2001
From: Toshio Kuratomi
Date: Tue, 19 May 2015 14:27:54 -0700
Subject: [PATCH 100/971] Update submodule refs for v2
---
lib/ansible/modules/core | 2 +-
lib/ansible/modules/extras | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/lib/ansible/modules/core b/lib/ansible/modules/core
index 3dd0f2c40f..c935d4dc08 160000
--- a/lib/ansible/modules/core
+++ b/lib/ansible/modules/core
@@ -1 +1 @@
-Subproject commit 3dd0f2c40f9dbc2311021e072a06671cd3da681a
+Subproject commit c935d4dc08949df92fd08c28caf6419687f21df8
diff --git a/lib/ansible/modules/extras b/lib/ansible/modules/extras
index 20bf6d825e..fefbf7c41a 160000
--- a/lib/ansible/modules/extras
+++ b/lib/ansible/modules/extras
@@ -1 +1 @@
-Subproject commit 20bf6d825e807a590585f944c405d83c53704f43
+Subproject commit fefbf7c41a0b24097e9696aafcb57154eee6665b
From cc51e6b7c217816836901aa312195de80ba4c9fb Mon Sep 17 00:00:00 2001
From: Toshio Kuratomi
Date: Wed, 20 May 2015 18:12:09 -0700
Subject: [PATCH 101/971] Update submodule refs in v2
---
lib/ansible/modules/core | 2 +-
lib/ansible/modules/extras | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/lib/ansible/modules/core b/lib/ansible/modules/core
index c935d4dc08..cbbe4196bd 160000
--- a/lib/ansible/modules/core
+++ b/lib/ansible/modules/core
@@ -1 +1 @@
-Subproject commit c935d4dc08949df92fd08c28caf6419687f21df8
+Subproject commit cbbe4196bdb047a2d8e9f1132519a0de55fa0c5a
diff --git a/lib/ansible/modules/extras b/lib/ansible/modules/extras
index fefbf7c41a..8fb19f0e47 160000
--- a/lib/ansible/modules/extras
+++ b/lib/ansible/modules/extras
@@ -1 +1 @@
-Subproject commit fefbf7c41a0b24097e9696aafcb57154eee6665b
+Subproject commit 8fb19f0e47b6992db89adcaade7f38225c552107
From 9921a1d2be0a254fe17e40d925a3fe36399e2f87 Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Thu, 21 May 2015 02:03:38 -0500
Subject: [PATCH 102/971] Unit tests for base strategy class (v2)
---
lib/ansible/plugins/strategies/__init__.py | 59 -----
.../plugins/strategies/test_strategy_base.py | 230 +++++++++++++++++-
2 files changed, 229 insertions(+), 60 deletions(-)
diff --git a/lib/ansible/plugins/strategies/__init__.py b/lib/ansible/plugins/strategies/__init__.py
index 7cc1709e08..e933ca73d4 100644
--- a/lib/ansible/plugins/strategies/__init__.py
+++ b/lib/ansible/plugins/strategies/__init__.py
@@ -236,8 +236,6 @@ class StrategyBase:
debug("waiting for pending results (%d left)" % self._pending_results)
results = self._process_pending_results(iterator)
ret_results.extend(results)
- if self._tqm._terminated:
- break
time.sleep(0.01)
return ret_results
@@ -336,63 +334,6 @@ class StrategyBase:
return block_list
- def cleanup(self, iterator, connection_info):
- '''
- Iterates through failed hosts and runs any outstanding rescue/always blocks
- and handlers which may still need to be run after a failure.
- '''
-
- debug("in cleanup")
- result = True
-
- debug("getting failed hosts")
- failed_hosts = self.get_failed_hosts(iterator._play)
- if len(failed_hosts) == 0:
- debug("there are no failed hosts")
- return result
-
- debug("marking hosts failed in the iterator")
- # mark the host as failed in the iterator so it will take
- # any required rescue paths which may be outstanding
- for host in failed_hosts:
- iterator.mark_host_failed(host)
-
- debug("clearing the failed hosts list")
- # clear the failed hosts dictionary now while also
- for entry in self._tqm._failed_hosts.keys():
- del self._tqm._failed_hosts[entry]
-
- work_to_do = True
- while work_to_do:
- work_to_do = False
- for host in failed_hosts:
- host_name = host.name
-
- if host_name in self._tqm._failed_hosts:
- iterator.mark_host_failed(host)
- del self._tqm._failed_hosts[host_name]
-
- if host_name in self._blocked_hosts:
- work_to_do = True
- continue
- elif iterator.get_next_task_for_host(host, peek=True) and host_name not in self._tqm._unreachable_hosts:
- work_to_do = True
-
- # pop the task, mark the host blocked, and queue it
- self._blocked_hosts[host_name] = True
- task = iterator.get_next_task_for_host(host)
- task_vars = self._variable_manager.get_vars(loader=self._loader, play=iterator._play, host=host, task=task)
- self._tqm.send_callback('v2_playbook_on_cleanup_task_start', task)
- self._queue_task(host, task, task_vars, connection_info)
-
- self._process_pending_results(iterator)
- time.sleep(0.01)
-
- # no more work, wait until the queue is drained
- self._wait_on_pending_results(iterator)
-
- return result
-
def run_handlers(self, iterator, connection_info):
'''
Runs handlers on those hosts which have been notified.
diff --git a/test/units/plugins/strategies/test_strategy_base.py b/test/units/plugins/strategies/test_strategy_base.py
index 36e22a9719..7d8cb42ee6 100644
--- a/test/units/plugins/strategies/test_strategy_base.py
+++ b/test/units/plugins/strategies/test_strategy_base.py
@@ -22,12 +22,15 @@ __metaclass__ = type
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import patch, MagicMock
+from ansible.errors import AnsibleError, AnsibleParserError
from ansible.plugins.strategies import StrategyBase
from ansible.executor.task_queue_manager import TaskQueueManager
+from ansible.executor.task_result import TaskResult
+from six.moves import queue as Queue
from units.mock.loader import DictDataLoader
-class TestVariableManager(unittest.TestCase):
+class TestStrategyBase(unittest.TestCase):
def setUp(self):
pass
@@ -125,3 +128,228 @@ class TestVariableManager(unittest.TestCase):
self.assertEqual(strategy_base._cur_worker, 1)
self.assertEqual(strategy_base._pending_results, 3)
+ def test_strategy_base_process_pending_results(self):
+ mock_tqm = MagicMock()
+ mock_tqm._terminated = False
+ mock_tqm._failed_hosts = dict()
+ mock_tqm._unreachable_hosts = dict()
+ mock_tqm.send_callback.return_value = None
+
+ queue_items = []
+ def _queue_empty(*args, **kwargs):
+ return len(queue_items) == 0
+ def _queue_get(*args, **kwargs):
+ if len(queue_items) == 0:
+ raise Queue.Empty
+ else:
+ return queue_items.pop()
+
+ mock_queue = MagicMock()
+ mock_queue.empty.side_effect = _queue_empty
+ mock_queue.get.side_effect = _queue_get
+ mock_tqm._final_q = mock_queue
+
+ mock_tqm._stats = MagicMock()
+ mock_tqm._stats.increment.return_value = None
+
+ mock_iterator = MagicMock()
+ mock_iterator.mark_host_failed.return_value = None
+
+ mock_host = MagicMock()
+ mock_host.name = 'test01'
+ mock_host.vars = dict()
+
+ mock_task = MagicMock()
+ mock_task._role = None
+ mock_task.ignore_errors = False
+
+ mock_group = MagicMock()
+ mock_group.add_host.return_value = None
+
+ def _get_host(host_name):
+ if host_name == 'test01':
+ return mock_host
+ return None
+ def _get_group(group_name):
+ if group_name in ('all', 'foo'):
+ return mock_group
+ return None
+
+ mock_inventory = MagicMock()
+ mock_inventory._hosts_cache = dict()
+ mock_inventory.get_host.side_effect = _get_host
+ mock_inventory.get_group.side_effect = _get_group
+ mock_inventory.clear_pattern_cache.return_value = None
+
+ mock_var_mgr = MagicMock()
+ mock_var_mgr.set_host_variable.return_value = None
+ mock_var_mgr.set_host_facts.return_value = None
+
+ strategy_base = StrategyBase(tqm=mock_tqm)
+ strategy_base._inventory = mock_inventory
+ strategy_base._variable_manager = mock_var_mgr
+ strategy_base._blocked_hosts = dict()
+ strategy_base._notified_handlers = dict()
+
+ results = strategy_base._wait_on_pending_results(iterator=mock_iterator)
+ self.assertEqual(len(results), 0)
+
+ task_result = TaskResult(host=mock_host, task=mock_task, return_data=dict(changed=True))
+ queue_items.append(('host_task_ok', task_result))
+ strategy_base._blocked_hosts['test01'] = True
+ strategy_base._pending_results = 1
+ results = strategy_base._wait_on_pending_results(iterator=mock_iterator)
+ self.assertEqual(len(results), 1)
+ self.assertEqual(results[0], task_result)
+ self.assertEqual(strategy_base._pending_results, 0)
+ self.assertNotIn('test01', strategy_base._blocked_hosts)
+
+ task_result = TaskResult(host=mock_host, task=mock_task, return_data='{"failed":true}')
+ queue_items.append(('host_task_failed', task_result))
+ strategy_base._blocked_hosts['test01'] = True
+ strategy_base._pending_results = 1
+ results = strategy_base._process_pending_results(iterator=mock_iterator)
+ self.assertEqual(len(results), 1)
+ self.assertEqual(results[0], task_result)
+ self.assertEqual(strategy_base._pending_results, 0)
+ self.assertNotIn('test01', strategy_base._blocked_hosts)
+ self.assertIn('test01', mock_tqm._failed_hosts)
+ del mock_tqm._failed_hosts['test01']
+
+ task_result = TaskResult(host=mock_host, task=mock_task, return_data='{}')
+ queue_items.append(('host_unreachable', task_result))
+ strategy_base._blocked_hosts['test01'] = True
+ strategy_base._pending_results = 1
+ results = strategy_base._wait_on_pending_results(iterator=mock_iterator)
+ self.assertEqual(len(results), 1)
+ self.assertEqual(results[0], task_result)
+ self.assertEqual(strategy_base._pending_results, 0)
+ self.assertNotIn('test01', strategy_base._blocked_hosts)
+ self.assertIn('test01', mock_tqm._unreachable_hosts)
+ del mock_tqm._unreachable_hosts['test01']
+
+ task_result = TaskResult(host=mock_host, task=mock_task, return_data='{}')
+ queue_items.append(('host_task_skipped', task_result))
+ strategy_base._blocked_hosts['test01'] = True
+ strategy_base._pending_results = 1
+ results = strategy_base._wait_on_pending_results(iterator=mock_iterator)
+ self.assertEqual(len(results), 1)
+ self.assertEqual(results[0], task_result)
+ self.assertEqual(strategy_base._pending_results, 0)
+ self.assertNotIn('test01', strategy_base._blocked_hosts)
+
+ strategy_base._blocked_hosts['test01'] = True
+ strategy_base._pending_results = 1
+
+ queue_items.append(('add_host', dict(add_host=dict(host_name='newhost01', new_groups=['foo']))))
+ results = strategy_base._process_pending_results(iterator=mock_iterator)
+ self.assertEqual(len(results), 0)
+ self.assertEqual(strategy_base._pending_results, 1)
+ self.assertIn('test01', strategy_base._blocked_hosts)
+
+ queue_items.append(('add_group', mock_host, dict(add_group=dict(group_name='foo'))))
+ results = strategy_base._process_pending_results(iterator=mock_iterator)
+ self.assertEqual(len(results), 0)
+ self.assertEqual(strategy_base._pending_results, 1)
+ self.assertIn('test01', strategy_base._blocked_hosts)
+
+ queue_items.append(('notify_handler', mock_host, 'test handler'))
+ results = strategy_base._process_pending_results(iterator=mock_iterator)
+ self.assertEqual(len(results), 0)
+ self.assertEqual(strategy_base._pending_results, 1)
+ self.assertIn('test01', strategy_base._blocked_hosts)
+ self.assertIn('test handler', strategy_base._notified_handlers)
+ self.assertIn(mock_host, strategy_base._notified_handlers['test handler'])
+
+ queue_items.append(('set_host_var', mock_host, 'foo', 'bar'))
+ results = strategy_base._process_pending_results(iterator=mock_iterator)
+ self.assertEqual(len(results), 0)
+ self.assertEqual(strategy_base._pending_results, 1)
+
+ queue_items.append(('set_host_facts', mock_host, 'foo', dict()))
+ results = strategy_base._process_pending_results(iterator=mock_iterator)
+ self.assertEqual(len(results), 0)
+ self.assertEqual(strategy_base._pending_results, 1)
+
+ queue_items.append(('bad'))
+ self.assertRaises(AnsibleError, strategy_base._process_pending_results, iterator=mock_iterator)
+
+ def test_strategy_base_load_included_file(self):
+ fake_loader = DictDataLoader({
+ "test.yml": """
+ - debug: msg='foo'
+ """,
+ "bad.yml": """
+ """,
+ })
+
+ mock_tqm = MagicMock()
+ mock_tqm._final_q = MagicMock()
+
+ strategy_base = StrategyBase(tqm=mock_tqm)
+ strategy_base._loader = fake_loader
+
+ mock_play = MagicMock()
+
+ mock_block = MagicMock()
+ mock_block._play = mock_play
+ mock_block.vars = dict()
+
+ mock_task = MagicMock()
+ mock_task._block = mock_block
+ mock_task._role = None
+
+ mock_inc_file = MagicMock()
+ mock_inc_file._task = mock_task
+
+ mock_inc_file._filename = "test.yml"
+ res = strategy_base._load_included_file(included_file=mock_inc_file)
+
+ mock_inc_file._filename = "bad.yml"
+ self.assertRaises(AnsibleParserError, strategy_base._load_included_file, included_file=mock_inc_file)
+
+ def test_strategy_base_run_handlers(self):
+ workers = []
+ for i in range(0, 3):
+ worker_main_q = MagicMock()
+ worker_main_q.put.return_value = None
+ worker_result_q = MagicMock()
+ workers.append([i, worker_main_q, worker_result_q])
+
+ mock_tqm = MagicMock()
+ mock_tqm._final_q = MagicMock()
+ mock_tqm.get_workers.return_value = workers
+ mock_tqm.send_callback.return_value = None
+
+ mock_conn_info = MagicMock()
+
+ mock_handler_task = MagicMock()
+ mock_handler_task.get_name.return_value = "test handler"
+ mock_handler_task.has_triggered.return_value = False
+
+ mock_handler = MagicMock()
+ mock_handler.block = [mock_handler_task]
+ mock_handler.flag_for_host.return_value = False
+
+ mock_play = MagicMock()
+ mock_play.handlers = [mock_handler]
+
+ mock_host = MagicMock()
+ mock_host.name = "test01"
+
+ mock_iterator = MagicMock()
+
+ mock_inventory = MagicMock()
+ mock_inventory.get_hosts.return_value = [mock_host]
+
+ mock_var_mgr = MagicMock()
+ mock_var_mgr.get_vars.return_value = dict()
+
+ mock_iterator = MagicMock
+ mock_iterator._play = mock_play
+
+ strategy_base = StrategyBase(tqm=mock_tqm)
+ strategy_base._inventory = mock_inventory
+ strategy_base._notified_handlers = {"test handler": [mock_host]}
+
+ result = strategy_base.run_handlers(iterator=mock_iterator, connection_info=mock_conn_info)
From 04e15ab54f0edab7c89895dafe7d5ec2a9b60ae5 Mon Sep 17 00:00:00 2001
From: Toshio Kuratomi
Date: Thu, 21 May 2015 07:53:00 -0700
Subject: [PATCH 103/971] Update v2 submodule refs
---
lib/ansible/modules/core | 2 +-
lib/ansible/modules/extras | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/lib/ansible/modules/core b/lib/ansible/modules/core
index cbbe4196bd..e10a581abd 160000
--- a/lib/ansible/modules/core
+++ b/lib/ansible/modules/core
@@ -1 +1 @@
-Subproject commit cbbe4196bdb047a2d8e9f1132519a0de55fa0c5a
+Subproject commit e10a581abdf375b855418897944d5206682994b6
diff --git a/lib/ansible/modules/extras b/lib/ansible/modules/extras
index 8fb19f0e47..24390f1ac6 160000
--- a/lib/ansible/modules/extras
+++ b/lib/ansible/modules/extras
@@ -1 +1 @@
-Subproject commit 8fb19f0e47b6992db89adcaade7f38225c552107
+Subproject commit 24390f1ac69fe4731e143eab16120bc422fd6233
From ecd5eb902db1156206f2eb35aac42b340759d310 Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Fri, 22 May 2015 03:32:40 -0500
Subject: [PATCH 104/971] Adding unit tests for ConnectionInformation (v2)
---
lib/ansible/executor/connection_info.py | 10 +-
.../executor/test_connection_information.py | 153 ++++++++++++++++++
2 files changed, 154 insertions(+), 9 deletions(-)
create mode 100644 test/units/executor/test_connection_information.py
diff --git a/lib/ansible/executor/connection_info.py b/lib/ansible/executor/connection_info.py
index bf78cf63a5..424ac062b3 100644
--- a/lib/ansible/executor/connection_info.py
+++ b/lib/ansible/executor/connection_info.py
@@ -88,14 +88,6 @@ class ConnectionInformation:
if play:
self.set_play(play)
- def __repr__(self):
- value = "CONNECTION INFO:\n"
- fields = self._get_fields()
- fields.sort()
- for field in fields:
- value += "%20s : %s\n" % (field, getattr(self, field))
- return value
-
def set_play(self, play):
'''
Configures this connection information instance with data from
@@ -199,7 +191,7 @@ class ConnectionInformation:
for attr in ('connection', 'remote_user', 'become', 'become_user', 'become_pass', 'become_method', 'environment', 'no_log'):
if hasattr(task, attr):
attr_val = getattr(task, attr)
- if attr_val:
+ if attr_val is not None:
setattr(new_info, attr, attr_val)
# finally, use the MAGIC_VARIABLE_MAPPING dictionary to update this
diff --git a/test/units/executor/test_connection_information.py b/test/units/executor/test_connection_information.py
new file mode 100644
index 0000000000..13b14c25de
--- /dev/null
+++ b/test/units/executor/test_connection_information.py
@@ -0,0 +1,153 @@
+# (c) 2012-2014, Michael DeHaan
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see .
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+from ansible.compat.tests import unittest
+from ansible.compat.tests.mock import patch, MagicMock
+
+from ansible import constants as C
+from ansible.cli import CLI
+from ansible.errors import AnsibleError, AnsibleParserError
+from ansible.executor.connection_info import ConnectionInformation
+
+from units.mock.loader import DictDataLoader
+
+class TestConnectionInformation(unittest.TestCase):
+
+ def setUp(self):
+ self._parser = CLI.base_parser(
+ runas_opts = True,
+ meta_opts = True,
+ runtask_opts = True,
+ vault_opts = True,
+ async_opts = True,
+ connect_opts = True,
+ subset_opts = True,
+ check_opts = True,
+ diff_opts = True,
+ )
+
+ def tearDown(self):
+ pass
+
+ def test_connection_info(self):
+ (options, args) = self._parser.parse_args(['-vv', '--check'])
+ conn_info = ConnectionInformation(options=options)
+ self.assertEqual(conn_info.connection, 'smart')
+ self.assertEqual(conn_info.remote_addr, None)
+ self.assertEqual(conn_info.remote_user, 'root')
+ self.assertEqual(conn_info.password, '')
+ self.assertEqual(conn_info.port, None)
+ self.assertEqual(conn_info.private_key_file, C.DEFAULT_PRIVATE_KEY_FILE)
+ self.assertEqual(conn_info.timeout, C.DEFAULT_TIMEOUT)
+ self.assertEqual(conn_info.shell, None)
+ self.assertEqual(conn_info.verbosity, 2)
+ self.assertEqual(conn_info.check_mode, True)
+ self.assertEqual(conn_info.no_log, False)
+
+ mock_play = MagicMock()
+ mock_play.connection = 'mock'
+ mock_play.remote_user = 'mock'
+ mock_play.port = 1234
+ mock_play.become = True
+ mock_play.become_method = 'mock'
+ mock_play.become_user = 'mockroot'
+ mock_play.become_pass = 'mockpass'
+ mock_play.no_log = True
+ mock_play.environment = dict(mock='mockenv')
+
+ conn_info = ConnectionInformation(play=mock_play, options=options)
+ self.assertEqual(conn_info.connection, 'mock')
+ self.assertEqual(conn_info.remote_user, 'mock')
+ self.assertEqual(conn_info.password, '')
+ self.assertEqual(conn_info.port, 1234)
+ self.assertEqual(conn_info.no_log, True)
+ self.assertEqual(conn_info.environment, dict(mock="mockenv"))
+ self.assertEqual(conn_info.become, True)
+ self.assertEqual(conn_info.become_method, "mock")
+ self.assertEqual(conn_info.become_user, "mockroot")
+ self.assertEqual(conn_info.become_pass, "mockpass")
+
+ mock_task = MagicMock()
+ mock_task.connection = 'mocktask'
+ mock_task.remote_user = 'mocktask'
+ mock_task.become = True
+ mock_task.become_method = 'mocktask'
+ mock_task.become_user = 'mocktaskroot'
+ mock_task.become_pass = 'mocktaskpass'
+ mock_task.no_log = False
+ mock_task.environment = dict(mock='mocktaskenv')
+
+ mock_host = MagicMock()
+ mock_host.get_vars.return_value = dict(
+ ansible_connection = 'mock_inventory',
+ ansible_ssh_port = 4321,
+ )
+
+ conn_info = ConnectionInformation(play=mock_play, options=options)
+ conn_info = conn_info.set_task_and_host_override(task=mock_task, host=mock_host)
+ self.assertEqual(conn_info.connection, 'mock_inventory')
+ self.assertEqual(conn_info.remote_user, 'mocktask')
+ self.assertEqual(conn_info.port, 4321)
+ self.assertEqual(conn_info.no_log, False)
+ self.assertEqual(conn_info.environment, dict(mock="mocktaskenv"))
+ self.assertEqual(conn_info.become, True)
+ self.assertEqual(conn_info.become_method, "mocktask")
+ self.assertEqual(conn_info.become_user, "mocktaskroot")
+ self.assertEqual(conn_info.become_pass, "mocktaskpass")
+
+ def test_connection_info_make_become_cmd(self):
+ (options, args) = self._parser.parse_args([])
+ conn_info = ConnectionInformation(options=options)
+
+ default_cmd = "/bin/foo"
+ default_exe = "/bin/bash"
+ sudo_exe = C.DEFAULT_SUDO_EXE
+ sudo_flags = C.DEFAULT_SUDO_FLAGS
+ su_exe = C.DEFAULT_SU_EXE
+ su_flags = C.DEFAULT_SU_FLAGS
+ pbrun_exe = 'pbrun'
+ pbrun_flags = ''
+
+ (cmd, prompt, key) = conn_info.make_become_cmd(cmd=default_cmd, executable=default_exe)
+ self.assertEqual(cmd, default_cmd)
+
+ conn_info.become = True
+ conn_info.become_user = 'foo'
+
+ conn_info.become_method = 'sudo'
+ (cmd, prompt, key) = conn_info.make_become_cmd(cmd=default_cmd, executable="/bin/bash")
+ self.assertEqual(cmd, """%s -c '%s -k && %s %s -S -p "%s" -u %s %s -c '"'"'echo %s; %s'"'"''""" % (default_exe, sudo_exe, sudo_exe, sudo_flags, prompt, conn_info.become_user, default_exe, key, default_cmd))
+
+ conn_info.become_method = 'su'
+ (cmd, prompt, key) = conn_info.make_become_cmd(cmd=default_cmd, executable="/bin/bash")
+ self.assertEqual(cmd, """%s -c '%s %s -c "%s -c '"'"'echo %s; %s'"'"'"'""" % (default_exe, su_exe, conn_info.become_user, default_exe, key, default_cmd))
+
+ conn_info.become_method = 'pbrun'
+ (cmd, prompt, key) = conn_info.make_become_cmd(cmd=default_cmd, executable="/bin/bash")
+ self.assertEqual(cmd, """%s -c '%s -b -l %s -u %s '"'"'echo %s; %s'"'"''""" % (default_exe, pbrun_exe, pbrun_flags, conn_info.become_user, key, default_cmd))
+
+ conn_info.become_method = 'pfexec'
+ (cmd, prompt, key) = conn_info.make_become_cmd(cmd=default_cmd, executable="/bin/bash")
+ self.assertEqual(cmd, """%s -c '%s %s "'"'"'echo %s; %s'"'"'"'""" % (default_exe, pbrun_exe, pbrun_flags, key, default_cmd))
+
+ conn_info.become_method = 'bad'
+ self.assertRaises(AnsibleError, conn_info.make_become_cmd, cmd=default_cmd, executable="/bin/bash")
+
From 838ff320019d4858024950977279a62ad2bed10d Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Fri, 22 May 2015 08:38:39 -0500
Subject: [PATCH 105/971] Fix unit test for conn_info (v2)
The default user expected in the connection information is the current
user, not root
---
test/units/executor/test_connection_information.py | 5 ++++-
1 file changed, 4 insertions(+), 1 deletion(-)
diff --git a/test/units/executor/test_connection_information.py b/test/units/executor/test_connection_information.py
index 13b14c25de..65575c0f93 100644
--- a/test/units/executor/test_connection_information.py
+++ b/test/units/executor/test_connection_information.py
@@ -19,6 +19,9 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
+import pwd
+import os
+
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import patch, MagicMock
@@ -52,7 +55,7 @@ class TestConnectionInformation(unittest.TestCase):
conn_info = ConnectionInformation(options=options)
self.assertEqual(conn_info.connection, 'smart')
self.assertEqual(conn_info.remote_addr, None)
- self.assertEqual(conn_info.remote_user, 'root')
+ self.assertEqual(conn_info.remote_user, pwd.getpwuid(os.geteuid())[0])
self.assertEqual(conn_info.password, '')
self.assertEqual(conn_info.port, None)
self.assertEqual(conn_info.private_key_file, C.DEFAULT_PRIVATE_KEY_FILE)
From 301019059272ab0a1b288a20c9772107b592dccd Mon Sep 17 00:00:00 2001
From: Florian Apolloner
Date: Sat, 23 May 2015 08:42:17 -0500
Subject: [PATCH 106/971] Fixing up the hacking module_formatter code for v2
---
hacking/module_formatter.py | 15 ++++++++-------
1 file changed, 8 insertions(+), 7 deletions(-)
diff --git a/hacking/module_formatter.py b/hacking/module_formatter.py
index c3aca94949..9002b9d8d1 100755
--- a/hacking/module_formatter.py
+++ b/hacking/module_formatter.py
@@ -33,8 +33,8 @@ import subprocess
import cgi
from jinja2 import Environment, FileSystemLoader
-import ansible.utils
-import ansible.utils.module_docs as module_docs
+from ansible.utils import module_docs
+from ansible.utils.vars import merge_hash
#####################################################################################
# constants and paths
@@ -135,7 +135,7 @@ def list_modules(module_dir, depth=0):
res = list_modules(d, depth + 1)
for key in res.keys():
if key in categories:
- categories[key] = ansible.utils.merge_hash(categories[key], res[key])
+ categories[key] = merge_hash(categories[key], res[key])
res.pop(key, None)
if depth < 2:
@@ -236,11 +236,11 @@ def process_module(module, options, env, template, outputname, module_map, alias
print "rendering: %s" % module
# use ansible core library to parse out doc metadata YAML and plaintext examples
- doc, examples, returndocs= ansible.utils.module_docs.get_docstring(fname, verbose=options.verbose)
+ doc, examples, returndocs = module_docs.get_docstring(fname, verbose=options.verbose)
# crash if module is missing documentation and not explicitly hidden from docs index
if doc is None:
- if module in ansible.utils.module_docs.BLACKLIST_MODULES:
+ if module in module_docs.BLACKLIST_MODULES:
return "SKIPPED"
else:
sys.stderr.write("*** ERROR: MODULE MISSING DOCUMENTATION: %s, %s ***\n" % (fname, module))
@@ -278,8 +278,9 @@ def process_module(module, options, env, template, outputname, module_map, alias
if added and added_float < TO_OLD_TO_BE_NOTABLE:
del doc['version_added']
- for (k,v) in doc['options'].iteritems():
- all_keys.append(k)
+ if 'options' in doc:
+ for (k,v) in doc['options'].iteritems():
+ all_keys.append(k)
all_keys = sorted(all_keys)
From d4a31e8d26e22f160a6a433fd6f21da8c0435b70 Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Sun, 24 May 2015 07:47:06 -0500
Subject: [PATCH 107/971] Adding unit tests for TaskExecutor (v2)
---
test/units/executor/test_task_executor.py | 324 ++++++++++++++++++++++
1 file changed, 324 insertions(+)
create mode 100644 test/units/executor/test_task_executor.py
diff --git a/test/units/executor/test_task_executor.py b/test/units/executor/test_task_executor.py
new file mode 100644
index 0000000000..64ce1d5faa
--- /dev/null
+++ b/test/units/executor/test_task_executor.py
@@ -0,0 +1,324 @@
+# (c) 2012-2014, Michael DeHaan
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see .
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+from ansible.compat.tests import unittest
+from ansible.compat.tests.mock import patch, MagicMock
+
+from ansible.errors import AnsibleError, AnsibleParserError
+from ansible.executor.connection_info import ConnectionInformation
+from ansible.executor.task_executor import TaskExecutor
+from ansible.plugins import action_loader
+
+from units.mock.loader import DictDataLoader
+
+class TestTaskExecutor(unittest.TestCase):
+
+ def setUp(self):
+ pass
+
+ def tearDown(self):
+ pass
+
+ def test_task_executor_init(self):
+ fake_loader = DictDataLoader({})
+ mock_host = MagicMock()
+ mock_task = MagicMock()
+ mock_conn_info = MagicMock()
+ mock_shared_loader = MagicMock()
+ new_stdin = None
+ job_vars = dict()
+ te = TaskExecutor(
+ host = mock_host,
+ task = mock_task,
+ job_vars = job_vars,
+ connection_info = mock_conn_info,
+ new_stdin = new_stdin,
+ loader = fake_loader,
+ shared_loader_obj = mock_shared_loader,
+ )
+
+ def test_task_executor_run(self):
+ fake_loader = DictDataLoader({})
+
+ mock_host = MagicMock()
+
+ mock_task = MagicMock()
+ mock_task._role._role_path = '/path/to/role/foo'
+
+ mock_conn_info = MagicMock()
+
+ mock_shared_loader = MagicMock()
+
+ new_stdin = None
+ job_vars = dict()
+
+ te = TaskExecutor(
+ host = mock_host,
+ task = mock_task,
+ job_vars = job_vars,
+ connection_info = mock_conn_info,
+ new_stdin = new_stdin,
+ loader = fake_loader,
+ shared_loader_obj = mock_shared_loader,
+ )
+
+ te._get_loop_items = MagicMock(return_value=None)
+ te._execute = MagicMock(return_value=dict())
+ res = te.run()
+
+ te._get_loop_items = MagicMock(return_value=[])
+ res = te.run()
+
+ te._get_loop_items = MagicMock(return_value=['a','b','c'])
+ te._run_loop = MagicMock(return_value=[dict(item='a', changed=True), dict(item='b', failed=True), dict(item='c')])
+ res = te.run()
+
+ te._get_loop_items = MagicMock(side_effect=AnsibleError(""))
+ res = te.run()
+ self.assertIn("failed", res)
+
+ def test_task_executor_get_loop_items(self):
+ fake_loader = DictDataLoader({})
+
+ mock_host = MagicMock()
+
+ mock_task = MagicMock()
+ mock_task.loop = 'items'
+ mock_task.loop_args = ['a', 'b', 'c']
+
+ mock_conn_info = MagicMock()
+
+ mock_shared_loader = MagicMock()
+
+ new_stdin = None
+ job_vars = dict()
+
+ te = TaskExecutor(
+ host = mock_host,
+ task = mock_task,
+ job_vars = job_vars,
+ connection_info = mock_conn_info,
+ new_stdin = new_stdin,
+ loader = fake_loader,
+ shared_loader_obj = mock_shared_loader,
+ )
+
+ items = te._get_loop_items()
+ self.assertEqual(items, ['a', 'b', 'c'])
+
+ def test_task_executor_run_loop(self):
+ items = ['a', 'b', 'c']
+
+ fake_loader = DictDataLoader({})
+
+ mock_host = MagicMock()
+
+ def _copy():
+ new_item = MagicMock()
+ return new_item
+
+ mock_task = MagicMock()
+ mock_task.copy.side_effect = _copy
+
+ mock_conn_info = MagicMock()
+
+ mock_shared_loader = MagicMock()
+
+ new_stdin = None
+ job_vars = dict()
+
+ te = TaskExecutor(
+ host = mock_host,
+ task = mock_task,
+ job_vars = job_vars,
+ connection_info = mock_conn_info,
+ new_stdin = new_stdin,
+ loader = fake_loader,
+ shared_loader_obj = mock_shared_loader,
+ )
+
+ def _execute(variables):
+ return dict(item=variables.get('item'))
+
+ te._squash_items = MagicMock(return_value=items)
+ te._execute = MagicMock(side_effect=_execute)
+
+ res = te._run_loop(items)
+ self.assertEqual(len(res), 3)
+
+ def test_task_executor_squash_items(self):
+ items = ['a', 'b', 'c']
+
+ fake_loader = DictDataLoader({})
+
+ mock_host = MagicMock()
+
+ def _evaluate_conditional(templar, variables):
+ item = variables.get('item')
+ if item == 'b':
+ return False
+ return True
+
+ mock_task = MagicMock()
+ mock_task.evaluate_conditional.side_effect = _evaluate_conditional
+
+ mock_conn_info = MagicMock()
+
+ mock_shared_loader = None
+
+ new_stdin = None
+ job_vars = dict()
+
+ te = TaskExecutor(
+ host = mock_host,
+ task = mock_task,
+ job_vars = job_vars,
+ connection_info = mock_conn_info,
+ new_stdin = new_stdin,
+ loader = fake_loader,
+ shared_loader_obj = mock_shared_loader,
+ )
+
+ mock_task.action = 'foo'
+ new_items = te._squash_items(items=items, variables=job_vars)
+ self.assertEqual(new_items, ['a', 'b', 'c'])
+
+ mock_task.action = 'yum'
+ new_items = te._squash_items(items=items, variables=job_vars)
+ self.assertEqual(new_items, ['a,c'])
+
+ def test_task_executor_execute(self):
+ fake_loader = DictDataLoader({})
+
+ mock_host = MagicMock()
+
+ mock_task = MagicMock()
+ mock_task.args = dict()
+ mock_task.retries = 0
+ mock_task.delay = -1
+ mock_task.register = 'foo'
+ mock_task.until = None
+ mock_task.changed_when = None
+ mock_task.failed_when = None
+ mock_task.post_validate.return_value = None
+
+ mock_conn_info = MagicMock()
+ mock_conn_info.post_validate.return_value = None
+ mock_conn_info.update_vars.return_value = None
+
+ mock_connection = MagicMock()
+ mock_connection.set_host_overrides.return_value = None
+ mock_connection._connect.return_value = None
+
+ mock_action = MagicMock()
+
+ shared_loader = None
+ new_stdin = None
+ job_vars = dict(omit="XXXXXXXXXXXXXXXXXXX")
+
+ te = TaskExecutor(
+ host = mock_host,
+ task = mock_task,
+ job_vars = job_vars,
+ connection_info = mock_conn_info,
+ new_stdin = new_stdin,
+ loader = fake_loader,
+ shared_loader_obj = shared_loader,
+ )
+
+ te._get_connection = MagicMock(return_value=mock_connection)
+ te._get_action_handler = MagicMock(return_value=mock_action)
+
+ mock_action.run.return_value = dict(ansible_facts=dict())
+ res = te._execute()
+
+ mock_task.changed_when = "1 == 1"
+ res = te._execute()
+
+ mock_task.changed_when = None
+ mock_task.failed_when = "1 == 1"
+ res = te._execute()
+
+ mock_task.failed_when = None
+ mock_task.evaluate_conditional.return_value = False
+ res = te._execute()
+
+ mock_task.evaluate_conditional.return_value = True
+ mock_task.args = dict(_raw_params='foo.yml', a='foo', b='bar')
+ mock_task.action = 'include'
+ res = te._execute()
+
+ def test_task_executor_poll_async_result(self):
+ fake_loader = DictDataLoader({})
+
+ mock_host = MagicMock()
+
+ mock_task = MagicMock()
+ mock_task.async = 3
+ mock_task.poll = 1
+
+ mock_conn_info = MagicMock()
+
+ mock_connection = MagicMock()
+
+ mock_action = MagicMock()
+
+ shared_loader = None
+ new_stdin = None
+ job_vars = dict(omit="XXXXXXXXXXXXXXXXXXX")
+
+ te = TaskExecutor(
+ host = mock_host,
+ task = mock_task,
+ job_vars = job_vars,
+ connection_info = mock_conn_info,
+ new_stdin = new_stdin,
+ loader = fake_loader,
+ shared_loader_obj = shared_loader,
+ )
+
+ te._connection = MagicMock()
+
+ def _get(*args, **kwargs):
+ mock_action = MagicMock()
+ mock_action.run.return_value = dict()
+ return mock_action
+
+ # testing with some bad values in the result passed to poll async,
+ # and with a bad value returned from the mock action
+ with patch.object(action_loader, 'get', _get):
+ mock_templar = MagicMock()
+ res = te._poll_async_result(result=dict(), templar=mock_templar)
+ self.assertIn('failed', res)
+ res = te._poll_async_result(result=dict(ansible_job_id=1), templar=mock_templar)
+ self.assertIn('failed', res)
+
+ def _get(*args, **kwargs):
+ mock_action = MagicMock()
+ mock_action.run.return_value = dict(finished=1)
+ return mock_action
+
+ # now testing with good values
+ with patch.object(action_loader, 'get', _get):
+ mock_templar = MagicMock()
+ res = te._poll_async_result(result=dict(ansible_job_id=1), templar=mock_templar)
+ self.assertEqual(res, dict(finished=1))
+
From 3775dd5ec82265fe5aec909accffe950d08a38d2 Mon Sep 17 00:00:00 2001
From: Etienne CARRIERE
Date: Mon, 25 May 2015 09:53:23 +0200
Subject: [PATCH 108/971] Factor F5 primitives
---
lib/ansible/module_utils/f5.py | 64 ++++++++++++++++++++++++++++++++++
1 file changed, 64 insertions(+)
create mode 100644 lib/ansible/module_utils/f5.py
diff --git a/lib/ansible/module_utils/f5.py b/lib/ansible/module_utils/f5.py
new file mode 100644
index 0000000000..2d97662a0b
--- /dev/null
+++ b/lib/ansible/module_utils/f5.py
@@ -0,0 +1,64 @@
+# This code is part of Ansible, but is an independent component.
+# This particular file snippet, and this file snippet only, is BSD licensed.
+# Modules you write using this snippet, which is embedded dynamically by Ansible
+# still belong to the author of the module, and may assign their own license
+# to the complete work.
+#
+# Copyright (c), Etienne Carrière ,2015
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without modification,
+# are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
+# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
+# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+try:
+ import bigsuds
+except ImportError:
+ bigsuds_found = False
+else:
+ bigsuds_found = True
+
+
+def f5_argument_spec():
+ return dict(
+ server=dict(type='str', required=True),
+ user=dict(type='str', required=True),
+ password=dict(type='str', aliases=['pass', 'pwd'], required=True, no_log=True),
+ validate_certs = dict(default='yes', type='bool'),
+ state = dict(type='str', default='present', choices=['present', 'absent']),
+ partition = dict(type='str', default='Common')
+ )
+
+
+def f5_parse_arguments(module):
+ if not bigsuds_found:
+ module.fail_json(msg="the python bigsuds module is required")
+ if not module.params['validate_certs']:
+ disable_ssl_cert_validation()
+ return (module.params['server'],module.params['user'],module.params['password'],module.params['state'],module.params['partition'])
+
+def bigip_api(bigip, user, password):
+ api = bigsuds.BIGIP(hostname=bigip, username=user, password=password)
+ return api
+
+def disable_ssl_cert_validation():
+ # You probably only want to do this for testing and never in production.
+ # From https://www.python.org/dev/peps/pep-0476/#id29
+ import ssl
+ ssl._create_default_https_context = ssl._create_unverified_context
+
From eaddc0b309bb55fec9fc72a0a4a073aedb3bc930 Mon Sep 17 00:00:00 2001
From: Brian Coca
Date: Mon, 25 May 2015 11:05:47 -0400
Subject: [PATCH 109/971] removed duplicate retry config entries
---
lib/ansible/constants.py | 3 ---
1 file changed, 3 deletions(-)
diff --git a/lib/ansible/constants.py b/lib/ansible/constants.py
index 9c1c820421..98f058e21c 100644
--- a/lib/ansible/constants.py
+++ b/lib/ansible/constants.py
@@ -188,9 +188,6 @@ DEFAULT_LOAD_CALLBACK_PLUGINS = get_config(p, DEFAULTS, 'bin_ansible_callbacks'
RETRY_FILES_ENABLED = get_config(p, DEFAULTS, 'retry_files_enabled', 'ANSIBLE_RETRY_FILES_ENABLED', True, boolean=True)
RETRY_FILES_SAVE_PATH = get_config(p, DEFAULTS, 'retry_files_save_path', 'ANSIBLE_RETRY_FILES_SAVE_PATH', '~/')
-RETRY_FILES_ENABLED = get_config(p, DEFAULTS, 'retry_files_enabled', 'ANSIBLE_RETRY_FILES_ENABLED', True, boolean=True)
-RETRY_FILES_SAVE_PATH = get_config(p, DEFAULTS, 'retry_files_save_path', 'ANSIBLE_RETRY_FILES_SAVE_PATH', '~/')
-
# CONNECTION RELATED
ANSIBLE_SSH_ARGS = get_config(p, 'ssh_connection', 'ssh_args', 'ANSIBLE_SSH_ARGS', None)
ANSIBLE_SSH_CONTROL_PATH = get_config(p, 'ssh_connection', 'control_path', 'ANSIBLE_SSH_CONTROL_PATH', "%(directory)s/ansible-ssh-%%h-%%p-%%r")
From 16c70dd7d459372318aaf60bfd3708dda6abc3f6 Mon Sep 17 00:00:00 2001
From: Brian Coca
Date: Tue, 26 May 2015 11:55:52 -0400
Subject: [PATCH 110/971] added equivalent of #9636 to v2
---
lib/ansible/module_utils/basic.py | 1 +
lib/ansible/plugins/shell/sh.py | 5 +++--
2 files changed, 4 insertions(+), 2 deletions(-)
diff --git a/lib/ansible/module_utils/basic.py b/lib/ansible/module_utils/basic.py
index 237cb5b106..2da2bad3ef 100644
--- a/lib/ansible/module_utils/basic.py
+++ b/lib/ansible/module_utils/basic.py
@@ -868,6 +868,7 @@ class AnsibleModule(object):
locale.setlocale(locale.LC_ALL, 'C')
os.environ['LANG'] = 'C'
os.environ['LC_CTYPE'] = 'C'
+ os.environ['LC_MESSAGES'] = 'C'
except Exception, e:
self.fail_json(msg="An unknown error was encountered while attempting to validate the locale: %s" % e)
diff --git a/lib/ansible/plugins/shell/sh.py b/lib/ansible/plugins/shell/sh.py
index 628df9bbfb..f7ba06d931 100644
--- a/lib/ansible/plugins/shell/sh.py
+++ b/lib/ansible/plugins/shell/sh.py
@@ -34,8 +34,9 @@ class ShellModule(object):
def env_prefix(self, **kwargs):
'''Build command prefix with environment variables.'''
env = dict(
- LANG = C.DEFAULT_MODULE_LANG,
- LC_CTYPE = C.DEFAULT_MODULE_LANG,
+ LANG = C.DEFAULT_MODULE_LANG,
+ LC_CTYPE = C.DEFAULT_MODULE_LANG,
+ LC_MESSAGES = C.DEFAULT_MODULE_LANG,
)
env.update(kwargs)
return ' '.join(['%s=%s' % (k, pipes.quote(unicode(v))) for k,v in env.items()])
From 31609e1b16e8edd9ff5911097d3d33733a2817e5 Mon Sep 17 00:00:00 2001
From: Monty Taylor
Date: Sun, 26 Oct 2014 10:41:58 -0700
Subject: [PATCH 111/971] Add required_if to AnsibleModule
There is a common pattern in modules where some parameters are required
only if another parameter is present AND set to a particular value. For
instance, if a cloud server state is "present" it's important to
indicate the image to be used, but if it's "absent", the image that was
used to launch it is not necessary. Provide a check that takes as an
input a list of 3-element tuples containing parameter to depend on, the
value it should be set to, and a list of parameters which are required
if the required parameter is set to the required value.
---
lib/ansible/module_utils/basic.py | 18 +++++++++++++++++-
1 file changed, 17 insertions(+), 1 deletion(-)
diff --git a/lib/ansible/module_utils/basic.py b/lib/ansible/module_utils/basic.py
index 2da2bad3ef..446cf56f07 100644
--- a/lib/ansible/module_utils/basic.py
+++ b/lib/ansible/module_utils/basic.py
@@ -337,7 +337,8 @@ class AnsibleModule(object):
def __init__(self, argument_spec, bypass_checks=False, no_log=False,
check_invalid_arguments=True, mutually_exclusive=None, required_together=None,
- required_one_of=None, add_file_common_args=False, supports_check_mode=False):
+ required_one_of=None, add_file_common_args=False, supports_check_mode=False,
+ required_if=None):
'''
common code for quickly building an ansible module in Python
@@ -385,6 +386,7 @@ class AnsibleModule(object):
self._check_argument_types()
self._check_required_together(required_together)
self._check_required_one_of(required_one_of)
+ self._check_required_if(required_if)
self._set_defaults(pre=False)
if not self.no_log:
@@ -958,6 +960,20 @@ class AnsibleModule(object):
if len(missing) > 0:
self.fail_json(msg="missing required arguments: %s" % ",".join(missing))
+ def _check_required_if(self, spec):
+ ''' ensure that parameters which conditionally required are present '''
+ if spec is None:
+ return
+ for (key, val, requirements) in spec:
+ missing = []
+ if key in self.params and self.params[key] == val:
+ for check in requirements:
+ count = self._count_terms(check)
+ if count == 0:
+ missing.append(check)
+ if len(missing) > 0:
+ self.fail_json(msg="%s is %s but the following are missing: %s" % (key, val, ','.join(missing))
+
def _check_argument_values(self):
''' ensure all arguments have the requested values, and there are no stray arguments '''
for (k,v) in self.argument_spec.iteritems():
From d793ed360b65f991e384a7839c7456830c445778 Mon Sep 17 00:00:00 2001
From: Toshio Kuratomi
Date: Tue, 26 May 2015 11:28:30 -0700
Subject: [PATCH 112/971] Fix syntaxerror in the required_if arg spec check
---
lib/ansible/module_utils/basic.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/lib/ansible/module_utils/basic.py b/lib/ansible/module_utils/basic.py
index 446cf56f07..2e4805cb86 100644
--- a/lib/ansible/module_utils/basic.py
+++ b/lib/ansible/module_utils/basic.py
@@ -972,7 +972,7 @@ class AnsibleModule(object):
if count == 0:
missing.append(check)
if len(missing) > 0:
- self.fail_json(msg="%s is %s but the following are missing: %s" % (key, val, ','.join(missing))
+ self.fail_json(msg="%s is %s but the following are missing: %s" % (key, val, ','.join(missing)))
def _check_argument_values(self):
''' ensure all arguments have the requested values, and there are no stray arguments '''
From f1ab1c48f4f19867a537c9ac5ef7656b0b05901e Mon Sep 17 00:00:00 2001
From: Toshio Kuratomi
Date: Tue, 26 May 2015 12:38:26 -0700
Subject: [PATCH 113/971] Update submodule refs for v2
---
lib/ansible/modules/core | 2 +-
lib/ansible/modules/extras | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/lib/ansible/modules/core b/lib/ansible/modules/core
index e10a581abd..9cc23c749a 160000
--- a/lib/ansible/modules/core
+++ b/lib/ansible/modules/core
@@ -1 +1 @@
-Subproject commit e10a581abdf375b855418897944d5206682994b6
+Subproject commit 9cc23c749a8cd5039db7aa1998d310bbb04d1e13
diff --git a/lib/ansible/modules/extras b/lib/ansible/modules/extras
index 24390f1ac6..a07fc88ba0 160000
--- a/lib/ansible/modules/extras
+++ b/lib/ansible/modules/extras
@@ -1 +1 @@
-Subproject commit 24390f1ac69fe4731e143eab16120bc422fd6233
+Subproject commit a07fc88ba0d2546b92fbe93b2bede699fdf2bc48
From 339a02c3847ce41ac8560b3e1f429f8d1d2e88f3 Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Wed, 27 May 2015 03:20:54 -0500
Subject: [PATCH 114/971] Started reworking module_utils/basic unit tests (v2)
---
lib/ansible/module_utils/basic.py | 4 +-
test/units/module_utils/test_basic.py | 454 +++++++++++---------------
2 files changed, 199 insertions(+), 259 deletions(-)
diff --git a/lib/ansible/module_utils/basic.py b/lib/ansible/module_utils/basic.py
index 2e4805cb86..c222bb4d16 100644
--- a/lib/ansible/module_utils/basic.py
+++ b/lib/ansible/module_utils/basic.py
@@ -930,7 +930,7 @@ class AnsibleModule(object):
for check in spec:
count = self._count_terms(check)
if count > 1:
- self.fail_json(msg="parameters are mutually exclusive: %s" % check)
+ self.fail_json(msg="parameters are mutually exclusive: %s" % (check,))
def _check_required_one_of(self, spec):
if spec is None:
@@ -948,7 +948,7 @@ class AnsibleModule(object):
non_zero = [ c for c in counts if c > 0 ]
if len(non_zero) > 0:
if 0 in counts:
- self.fail_json(msg="parameters are required together: %s" % check)
+ self.fail_json(msg="parameters are required together: %s" % (check,))
def _check_required_arguments(self):
''' ensure all required arguments are present '''
diff --git a/test/units/module_utils/test_basic.py b/test/units/module_utils/test_basic.py
index 60f501ba28..c3db5138bf 100644
--- a/test/units/module_utils/test_basic.py
+++ b/test/units/module_utils/test_basic.py
@@ -1,3 +1,4 @@
+# -*- coding: utf-8 -*-
# (c) 2012-2014, Michael DeHaan
#
# This file is part of Ansible
@@ -16,301 +17,167 @@
# along with Ansible. If not, see .
# Make coding more python3-ish
-#from __future__ import (absolute_import, division, print_function)
from __future__ import (absolute_import, division)
__metaclass__ = type
-import os
-import tempfile
+import __builtin__
+
+from nose.tools import timed
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import patch, MagicMock
-from ansible.errors import *
-from ansible.executor.module_common import modify_module
-from ansible.module_utils.basic import heuristic_log_sanitize
-from ansible.utils.hashing import checksum as utils_checksum
-
-TEST_MODULE_DATA = """
-from ansible.module_utils.basic import *
-
-def get_module():
- return AnsibleModule(
- argument_spec = dict(),
- supports_check_mode = True,
- no_log = True,
- )
-
-get_module()
-
-"""
-
class TestModuleUtilsBasic(unittest.TestCase):
- def cleanup_temp_file(self, fd, path):
- try:
- os.close(fd)
- os.remove(path)
- except:
- pass
-
- def cleanup_temp_dir(self, path):
- try:
- os.rmdir(path)
- except:
- pass
-
def setUp(self):
- # create a temporary file for the test module
- # we're about to generate
- self.tmp_fd, self.tmp_path = tempfile.mkstemp()
- os.write(self.tmp_fd, TEST_MODULE_DATA)
-
- # template the module code and eval it
- module_data, module_style, shebang = modify_module(self.tmp_path, {})
-
- d = {}
- exec(module_data, d, d)
- self.module = d['get_module']()
-
- # module_utils/basic.py screws with CWD, let's save it and reset
- self.cwd = os.getcwd()
+ pass
def tearDown(self):
- self.cleanup_temp_file(self.tmp_fd, self.tmp_path)
- # Reset CWD back to what it was before basic.py changed it
- os.chdir(self.cwd)
+ pass
- #################################################################################
- # run_command() tests
+ def test_module_utils_basic_imports(self):
+ realimport = __builtin__.__import__
- # test run_command with a string command
- def test_run_command_string(self):
- (rc, out, err) = self.module.run_command("/bin/echo -n 'foo bar'")
- self.assertEqual(rc, 0)
- self.assertEqual(out, 'foo bar')
- (rc, out, err) = self.module.run_command("/bin/echo -n 'foo bar'", use_unsafe_shell=True)
- self.assertEqual(rc, 0)
- self.assertEqual(out, 'foo bar')
+ def _mock_import(name, *args, **kwargs):
+ if name == 'json':
+ raise ImportError()
+ realimport(name, *args, **kwargs)
- # test run_command with an array of args (with both use_unsafe_shell=True|False)
- def test_run_command_args(self):
- (rc, out, err) = self.module.run_command(['/bin/echo', '-n', "foo bar"])
- self.assertEqual(rc, 0)
- self.assertEqual(out, 'foo bar')
- (rc, out, err) = self.module.run_command(['/bin/echo', '-n', "foo bar"], use_unsafe_shell=True)
- self.assertEqual(rc, 0)
- self.assertEqual(out, 'foo bar')
+ with patch.object(__builtin__, '__import__', _mock_import, create=True) as m:
+ m('ansible.module_utils.basic')
+ __builtin__.__import__('ansible.module_utils.basic')
- # test run_command with leading environment variables
- #@raises(SystemExit)
- def test_run_command_string_with_env_variables(self):
- self.assertRaises(SystemExit, self.module.run_command, 'FOO=bar /bin/echo -n "foo bar"')
-
- #@raises(SystemExit)
- def test_run_command_args_with_env_variables(self):
- self.assertRaises(SystemExit, self.module.run_command, ['FOO=bar', '/bin/echo', '-n', 'foo bar'])
+ def test_module_utils_basic_get_platform(self):
+ with patch('platform.system', return_value='foo'):
+ from ansible.module_utils.basic import get_platform
+ self.assertEqual(get_platform(), 'foo')
- def test_run_command_string_unsafe_with_env_variables(self):
- (rc, out, err) = self.module.run_command('FOO=bar /bin/echo -n "foo bar"', use_unsafe_shell=True)
- self.assertEqual(rc, 0)
- self.assertEqual(out, 'foo bar')
+ def test_module_utils_basic_get_distribution(self):
+ from ansible.module_utils.basic import get_distribution
- # test run_command with a command pipe (with both use_unsafe_shell=True|False)
- def test_run_command_string_unsafe_with_pipe(self):
- (rc, out, err) = self.module.run_command('echo "foo bar" | cat', use_unsafe_shell=True)
- self.assertEqual(rc, 0)
- self.assertEqual(out, 'foo bar\n')
+ with patch('platform.system', return_value='Foo'):
+ self.assertEqual(get_distribution(), None)
- # test run_command with a shell redirect in (with both use_unsafe_shell=True|False)
- def test_run_command_string_unsafe_with_redirect_in(self):
- (rc, out, err) = self.module.run_command('cat << EOF\nfoo bar\nEOF', use_unsafe_shell=True)
- self.assertEqual(rc, 0)
- self.assertEqual(out, 'foo bar\n')
+ with patch('platform.system', return_value='Linux'):
+ with patch('platform.linux_distribution', return_value=("foo", "1", "One")):
+ self.assertEqual(get_distribution(), "Foo")
- # test run_command with a shell redirect out (with both use_unsafe_shell=True|False)
- def test_run_command_string_unsafe_with_redirect_out(self):
- tmp_fd, tmp_path = tempfile.mkstemp()
- try:
- (rc, out, err) = self.module.run_command('echo "foo bar" > %s' % tmp_path, use_unsafe_shell=True)
- self.assertEqual(rc, 0)
- self.assertTrue(os.path.exists(tmp_path))
- checksum = utils_checksum(tmp_path)
- self.assertEqual(checksum, 'd53a205a336e07cf9eac45471b3870f9489288ec')
- except:
- raise
- finally:
- self.cleanup_temp_file(tmp_fd, tmp_path)
+ with patch('os.path.isfile', return_value=True):
+ def _dist(distname='', version='', id='', supported_dists=(), full_distribution_name=1):
+ if supported_dists != ():
+ return ("AmazonFooBar", "", "")
+ else:
+ return ("", "", "")
+
+ with patch('platform.linux_distribution', side_effect=_dist):
+ self.assertEqual(get_distribution(), "Amazon")
- # test run_command with a double shell redirect out (append) (with both use_unsafe_shell=True|False)
- def test_run_command_string_unsafe_with_double_redirect_out(self):
- tmp_fd, tmp_path = tempfile.mkstemp()
- try:
- (rc, out, err) = self.module.run_command('echo "foo bar" >> %s' % tmp_path, use_unsafe_shell=True)
- self.assertEqual(rc, 0)
- self.assertTrue(os.path.exists(tmp_path))
- checksum = utils_checksum(tmp_path)
- self.assertEqual(checksum, 'd53a205a336e07cf9eac45471b3870f9489288ec')
- except:
- raise
- finally:
- self.cleanup_temp_file(tmp_fd, tmp_path)
+ def _dist(distname='', version='', id='', supported_dists=(), full_distribution_name=1):
+ if supported_dists != ():
+ return ("Bar", "2", "Two")
+ else:
+ return ("", "", "")
+
+ with patch('platform.linux_distribution', side_effect=_dist):
+ self.assertEqual(get_distribution(), "OtherLinux")
+
+ with patch('platform.linux_distribution', side_effect=Exception("boo")):
+ with patch('platform.dist', return_value=("bar", "2", "Two")):
+ self.assertEqual(get_distribution(), "Bar")
- # test run_command with data
- def test_run_command_string_with_data(self):
- (rc, out, err) = self.module.run_command('cat', data='foo bar')
- self.assertEqual(rc, 0)
- self.assertEqual(out, 'foo bar\n')
+ def test_module_utils_basic_get_distribution_version(self):
+ from ansible.module_utils.basic import get_distribution_version
- # test run_command with binary data
- def test_run_command_string_with_binary_data(self):
- (rc, out, err) = self.module.run_command('cat', data='\x41\x42\x43\x44', binary_data=True)
- self.assertEqual(rc, 0)
- self.assertEqual(out, 'ABCD')
+ with patch('platform.system', return_value='Foo'):
+ self.assertEqual(get_distribution_version(), None)
- # test run_command with a cwd set
- def test_run_command_string_with_cwd(self):
- tmp_path = tempfile.mkdtemp()
- try:
- (rc, out, err) = self.module.run_command('pwd', cwd=tmp_path)
- self.assertEqual(rc, 0)
- self.assertTrue(os.path.exists(tmp_path))
- self.assertEqual(out.strip(), os.path.realpath(tmp_path))
- except:
- raise
- finally:
- self.cleanup_temp_dir(tmp_path)
+ with patch('platform.system', return_value='Linux'):
+ with patch('platform.linux_distribution', return_value=("foo", "1", "One")):
+ self.assertEqual(get_distribution_version(), "1")
+ with patch('os.path.isfile', return_value=True):
+ def _dist(distname='', version='', id='', supported_dists=(), full_distribution_name=1):
+ if supported_dists != ():
+ return ("AmazonFooBar", "2", "")
+ else:
+ return ("", "", "")
-class TestModuleUtilsBasicHelpers(unittest.TestCase):
- ''' Test some implementation details of AnsibleModule
+ with patch('platform.linux_distribution', side_effect=_dist):
+ self.assertEqual(get_distribution_version(), "2")
- Some pieces of AnsibleModule are implementation details but they have
- potential cornercases that we need to check. Go ahead and test at
- this level that the functions are behaving even though their API may
- change and we'd have to rewrite these tests so that we know that we
- need to check for those problems in any rewrite.
+ with patch('platform.linux_distribution', side_effect=Exception("boo")):
+ with patch('platform.dist', return_value=("bar", "3", "Three")):
+ self.assertEqual(get_distribution_version(), "3")
- In the future we might want to restructure higher level code to be
- friendlier to unittests so that we can test at the level that the public
- is interacting with the APIs.
- '''
-
- MANY_RECORDS = 7000
- URL_SECRET = 'http://username:pas:word@foo.com/data'
- SSH_SECRET = 'username:pas:word@foo.com/data'
-
- def cleanup_temp_file(self, fd, path):
- try:
- os.close(fd)
- os.remove(path)
- except:
+ def test_module_utils_basic_load_platform_subclass(self):
+ class LinuxTest:
pass
- def cleanup_temp_dir(self, path):
- try:
- os.rmdir(path)
- except:
- pass
+ class Foo(LinuxTest):
+ platform = "Linux"
+ distribution = None
- def _gen_data(self, records, per_rec, top_level, secret_text):
- hostvars = {'hostvars': {}}
- for i in range(1, records, 1):
- host_facts = {'host%s' % i:
- {'pstack':
- {'running': '875.1',
- 'symlinked': '880.0',
- 'tars': [],
- 'versions': ['885.0']},
- }}
+ class Bar(LinuxTest):
+ platform = "Linux"
+ distribution = "Bar"
- if per_rec:
- host_facts['host%s' % i]['secret'] = secret_text
- hostvars['hostvars'].update(host_facts)
- if top_level:
- hostvars['secret'] = secret_text
- return hostvars
+ from ansible.module_utils.basic import load_platform_subclass
- def setUp(self):
- self.many_url = repr(self._gen_data(self.MANY_RECORDS, True, True,
- self.URL_SECRET))
- self.many_ssh = repr(self._gen_data(self.MANY_RECORDS, True, True,
- self.SSH_SECRET))
- self.one_url = repr(self._gen_data(self.MANY_RECORDS, False, True,
- self.URL_SECRET))
- self.one_ssh = repr(self._gen_data(self.MANY_RECORDS, False, True,
- self.SSH_SECRET))
- self.zero_secrets = repr(self._gen_data(self.MANY_RECORDS, False,
- False, ''))
- self.few_url = repr(self._gen_data(2, True, True, self.URL_SECRET))
- self.few_ssh = repr(self._gen_data(2, True, True, self.SSH_SECRET))
+ # match just the platform class, not a specific distribution
+ with patch('ansible.module_utils.basic.get_platform', return_value="Linux"):
+ with patch('ansible.module_utils.basic.get_distribution', return_value=None):
+ self.assertIs(type(load_platform_subclass(LinuxTest)), Foo)
- # create a temporary file for the test module
- # we're about to generate
- self.tmp_fd, self.tmp_path = tempfile.mkstemp()
- os.write(self.tmp_fd, TEST_MODULE_DATA)
+ # match both the distribution and platform class
+ with patch('ansible.module_utils.basic.get_platform', return_value="Linux"):
+ with patch('ansible.module_utils.basic.get_distribution', return_value="Bar"):
+ self.assertIs(type(load_platform_subclass(LinuxTest)), Bar)
- # template the module code and eval it
- module_data, module_style, shebang = modify_module(self.tmp_path, {})
+ # if neither match, the fallback should be the top-level class
+ with patch('ansible.module_utils.basic.get_platform', return_value="Foo"):
+ with patch('ansible.module_utils.basic.get_distribution', return_value=None):
+ self.assertIs(type(load_platform_subclass(LinuxTest)), LinuxTest)
- d = {}
- exec(module_data, d, d)
- self.module = d['get_module']()
+ def test_module_utils_basic_json_dict_converters(self):
+ from ansible.module_utils.basic import json_dict_unicode_to_bytes, json_dict_bytes_to_unicode
- # module_utils/basic.py screws with CWD, let's save it and reset
- self.cwd = os.getcwd()
+ test_data = dict(
+ item1 = u"Fóo",
+ item2 = [u"Bár", u"Bam"],
+ item3 = dict(sub1=u"Súb"),
+ item4 = (u"föo", u"bär", u"©"),
+ item5 = 42,
+ )
+ res = json_dict_unicode_to_bytes(test_data)
+ res2 = json_dict_bytes_to_unicode(res)
- def tearDown(self):
- self.cleanup_temp_file(self.tmp_fd, self.tmp_path)
- # Reset CWD back to what it was before basic.py changed it
- os.chdir(self.cwd)
+ self.assertEqual(test_data, res2)
+ def test_module_utils_basic_heuristic_log_sanitize(self):
+ from ansible.module_utils.basic import heuristic_log_sanitize
- #################################################################################
+ URL_SECRET = 'http://username:pas:word@foo.com/data'
+ SSH_SECRET = 'username:pas:word@foo.com/data'
- #
- # Speed tests
- #
+ def _gen_data(records, per_rec, top_level, secret_text):
+ hostvars = {'hostvars': {}}
+ for i in range(1, records, 1):
+ host_facts = {'host%s' % i:
+ {'pstack':
+ {'running': '875.1',
+ 'symlinked': '880.0',
+ 'tars': [],
+ 'versions': ['885.0']},
+ }}
+ if per_rec:
+ host_facts['host%s' % i]['secret'] = secret_text
+ hostvars['hostvars'].update(host_facts)
+ if top_level:
+ hostvars['secret'] = secret_text
+ return hostvars
- # Previously, we used regexes which had some pathologically slow cases for
- # parameters with large amounts of data with many ':' but no '@'. The
- # present function gets slower when there are many replacements so we may
- # want to explore regexes in the future (for the speed when substituting
- # or flexibility). These speed tests will hopefully tell us if we're
- # introducing code that has cases that are simply too slow.
- #
- # Some regex notes:
- # * re.sub() is faster than re.match() + str.join().
- # * We may be able to detect a large number of '@' symbols and then use
- # a regex else use the present function.
-
- #@timed(5)
- #def test_log_sanitize_speed_many_url(self):
- # heuristic_log_sanitize(self.many_url)
-
- #@timed(5)
- #def test_log_sanitize_speed_many_ssh(self):
- # heuristic_log_sanitize(self.many_ssh)
-
- #@timed(5)
- #def test_log_sanitize_speed_one_url(self):
- # heuristic_log_sanitize(self.one_url)
-
- #@timed(5)
- #def test_log_sanitize_speed_one_ssh(self):
- # heuristic_log_sanitize(self.one_ssh)
-
- #@timed(5)
- #def test_log_sanitize_speed_zero_secrets(self):
- # heuristic_log_sanitize(self.zero_secrets)
-
- #
- # Test that the password obfuscation sanitizes somewhat cleanly.
- #
-
- def test_log_sanitize_correctness(self):
- url_data = repr(self._gen_data(3, True, True, self.URL_SECRET))
- ssh_data = repr(self._gen_data(3, True, True, self.SSH_SECRET))
+ url_data = repr(_gen_data(3, True, True, URL_SECRET))
+ ssh_data = repr(_gen_data(3, True, True, SSH_SECRET))
url_output = heuristic_log_sanitize(url_data)
ssh_output = heuristic_log_sanitize(ssh_data)
@@ -349,7 +216,80 @@ class TestModuleUtilsBasicHelpers(unittest.TestCase):
# python2.6 or less's unittest
self.assertTrue(":********@foo.com/data'" in ssh_output, '%s is not present in %s' % (":********@foo.com/data'", ssh_output))
- # The overzealous-ness here may lead to us changing the algorithm in
- # the future. We could make it consume less of the data (with the
- # possibility of leaving partial passwords exposed) and encourage
- # people to use no_log instead of relying on this obfuscation.
+
+ def test_module_utils_basic_ansible_module_creation(self):
+ from ansible.module_utils import basic
+
+ basic.MODULE_COMPLEX_ARGS = '{}'
+ am = basic.AnsibleModule(
+ argument_spec=dict(),
+ )
+
+ arg_spec = dict(
+ foo = dict(required=True),
+ bar = dict(),
+ bam = dict(),
+ baz = dict(),
+ )
+ mut_ex = (('bar', 'bam'),)
+ req_to = (('bam', 'baz'),)
+
+ # should test ok
+ basic.MODULE_COMPLEX_ARGS = '{"foo":"hello"}'
+ am = basic.AnsibleModule(
+ argument_spec = arg_spec,
+ mutually_exclusive = mut_ex,
+ required_together = req_to,
+ no_log=True,
+ check_invalid_arguments=False,
+ add_file_common_args=True,
+ supports_check_mode=True,
+ )
+
+ # fail, because a required param was not specified
+ basic.MODULE_COMPLEX_ARGS = '{}'
+ self.assertRaises(
+ SystemExit,
+ basic.AnsibleModule,
+ argument_spec = arg_spec,
+ mutually_exclusive = mut_ex,
+ required_together = req_to,
+ no_log=True,
+ check_invalid_arguments=False,
+ add_file_common_args=True,
+ supports_check_mode=True,
+ )
+
+ # fail because of mutually exclusive parameters
+ basic.MODULE_COMPLEX_ARGS = '{"foo":"hello", "bar": "bad", "bam": "bad"}'
+ self.assertRaises(
+ SystemExit,
+ basic.AnsibleModule,
+ argument_spec = arg_spec,
+ mutually_exclusive = mut_ex,
+ required_together = req_to,
+ no_log=True,
+ check_invalid_arguments=False,
+ add_file_common_args=True,
+ supports_check_mode=True,
+ )
+
+ # fail because a param required due to another param was not specified
+ basic.MODULE_COMPLEX_ARGS = '{"bam":"bad"}'
+ self.assertRaises(
+ SystemExit,
+ basic.AnsibleModule,
+ argument_spec = arg_spec,
+ mutually_exclusive = mut_ex,
+ required_together = req_to,
+ no_log=True,
+ check_invalid_arguments=False,
+ add_file_common_args=True,
+ supports_check_mode=True,
+ )
+
+ def test_module_utils_basic_get_module_path(self):
+ from ansible.module_utils.basic import get_module_path
+ with patch('os.path.realpath', return_value='/path/to/foo/'):
+ self.assertEqual(get_module_path(), '/path/to/foo')
+
From 7508709045c68738990b28e030cb80928d19a3e6 Mon Sep 17 00:00:00 2001
From: Brian Coca
Date: Wed, 27 May 2015 07:27:31 -0400
Subject: [PATCH 115/971] updated as per feedback
---
docsite/rst/developing_modules.rst | 48 ++++++++++++++++++++++++++++++
1 file changed, 48 insertions(+)
diff --git a/docsite/rst/developing_modules.rst b/docsite/rst/developing_modules.rst
index ddd4e90c82..0763814a1a 100644
--- a/docsite/rst/developing_modules.rst
+++ b/docsite/rst/developing_modules.rst
@@ -490,6 +490,54 @@ Module checklist
* If you are asking 'how can i have a module execute other modules' ... you want to write a role
+Windows modules checklist
+`````````````````````````
+* Favour native powershell and .net ways of doing things over calls to COM libraries or calls to native executables which may or may not be present in all versions of windows
+* modules are in powershell (.ps1 files) but the docs reside in same name python file (.py)
+* look at ansible/lib/ansible/module_utils/powershell.ps1 for commmon code, avoid duplication
+* start with::
+
+ #!powershell
+
+then::
+
+then::
+ # WANT_JSON
+ # POWERSHELL_COMMON
+
+* Arguments:
+ * Try and use state present and state absent like other modules
+ * You need to check that all your mandatory args are present::
+
+ If ($params.state) {
+ $state = $params.state.ToString().ToLower()
+ If (($state -ne 'started') -and ($state -ne 'stopped') -and ($state -ne 'restarted')) {
+ Fail-Json $result "state is '$state'; must be 'started', 'stopped', or 'restarted'"
+ }
+ }
+
+ * Look at existing modules for more examples of argument checking.
+
+* Results
+ * The result object should allways contain an attribute called changed set to either $true or $false
+ * Create your result object like this::
+
+ $result = New-Object psobject @{
+ changed = $false
+ other_result_attribute = $some_value
+ };
+
+ If all is well, exit with a
+ Exit-Json $result
+
+ * Ensure anything you return, including errors can be converted to json.
+ * Be aware that because exception messages could contain almost anything.
+ * ConvertTo-Json will fail if it encounters a trailing \ in a string.
+ * If all is not well use Fail-Json to exit.
+
+* Have you tested for powershell 3.0 and 4.0 compliance?
+
+
Deprecating and making module aliases
``````````````````````````````````````
From 83074f4d93f628f1d4563687000a5cb51fd3f979 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Ren=C3=A9=20Moser?=
Date: Wed, 20 May 2015 16:31:17 +0200
Subject: [PATCH 116/971] doc: we need GPLv3 license headers
GPLv2 only headers are incompatible with GPLv3
---
docsite/rst/developing_modules.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docsite/rst/developing_modules.rst b/docsite/rst/developing_modules.rst
index ddd4e90c82..46cb36f634 100644
--- a/docsite/rst/developing_modules.rst
+++ b/docsite/rst/developing_modules.rst
@@ -464,7 +464,7 @@ Module checklist
* Requirements should be documented, using the `requirements=[]` field
* Author should be set, name and github id at least
* Made use of U() for urls, C() for files and options, I() for params, M() for modules?
- * GPL License header
+ * GPL 3 License header
* Does module use check_mode? Could it be modified to use it? Document it
* Examples: make sure they are reproducible
* Return: document the return structure of the module
From b91532aff358826dd9d3c04588b0cd8dcebe5a69 Mon Sep 17 00:00:00 2001
From: Toshio Kuratomi
Date: Wed, 27 May 2015 13:39:09 -0700
Subject: [PATCH 117/971] Drop the mysql test db first so that we test with a
clean slate.
---
test/integration/roles/test_mysql_db/tasks/main.yml | 5 +++++
1 file changed, 5 insertions(+)
diff --git a/test/integration/roles/test_mysql_db/tasks/main.yml b/test/integration/roles/test_mysql_db/tasks/main.yml
index 60a573bd0b..a059cd212a 100644
--- a/test/integration/roles/test_mysql_db/tasks/main.yml
+++ b/test/integration/roles/test_mysql_db/tasks/main.yml
@@ -17,6 +17,11 @@
# along with Ansible. If not, see .
# ============================================================
+
+- name: make sure the test database is not there
+ command: mysql "-e drop database '{{db_name}}';"
+ ignore_errors: True
+
- name: test state=present for a database name (expect changed=true)
mysql_db: name={{ db_name }} state=present
register: result
From 388827a636337df9f255aeec882b6440658abf9a Mon Sep 17 00:00:00 2001
From: Toshio Kuratomi
Date: Wed, 27 May 2015 20:28:29 -0700
Subject: [PATCH 118/971] Update submodule ref
---
lib/ansible/modules/core | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/lib/ansible/modules/core b/lib/ansible/modules/core
index 44ef8b3bc6..2b5e932cfb 160000
--- a/lib/ansible/modules/core
+++ b/lib/ansible/modules/core
@@ -1 +1 @@
-Subproject commit 44ef8b3bc66365a0ca89411041eb0d51c541d6db
+Subproject commit 2b5e932cfb4df42f46812aee2476fdf5aabab172
From e59d4f3b51665b5e24132bb9303c682a56b63604 Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Thu, 28 May 2015 01:26:04 -0500
Subject: [PATCH 119/971] More module_utils/basic.py unit tests for v2
---
lib/ansible/module_utils/basic.py | 2 +-
test/units/module_utils/test_basic.py | 451 +++++++++++++++++++++++++-
2 files changed, 447 insertions(+), 6 deletions(-)
diff --git a/lib/ansible/module_utils/basic.py b/lib/ansible/module_utils/basic.py
index c222bb4d16..793223b165 100644
--- a/lib/ansible/module_utils/basic.py
+++ b/lib/ansible/module_utils/basic.py
@@ -579,7 +579,7 @@ class AnsibleModule(object):
if len(context) > i:
if context[i] is not None and context[i] != cur_context[i]:
new_context[i] = context[i]
- if context[i] is None:
+ elif context[i] is None:
new_context[i] = cur_context[i]
if cur_context != new_context:
diff --git a/test/units/module_utils/test_basic.py b/test/units/module_utils/test_basic.py
index c3db5138bf..cd2bf0536e 100644
--- a/test/units/module_utils/test_basic.py
+++ b/test/units/module_utils/test_basic.py
@@ -21,11 +21,12 @@ from __future__ import (absolute_import, division)
__metaclass__ = type
import __builtin__
+import errno
from nose.tools import timed
from ansible.compat.tests import unittest
-from ansible.compat.tests.mock import patch, MagicMock
+from ansible.compat.tests.mock import patch, MagicMock, mock_open
class TestModuleUtilsBasic(unittest.TestCase):
@@ -216,6 +217,10 @@ class TestModuleUtilsBasic(unittest.TestCase):
# python2.6 or less's unittest
self.assertTrue(":********@foo.com/data'" in ssh_output, '%s is not present in %s' % (":********@foo.com/data'", ssh_output))
+ def test_module_utils_basic_get_module_path(self):
+ from ansible.module_utils.basic import get_module_path
+ with patch('os.path.realpath', return_value='/path/to/foo/'):
+ self.assertEqual(get_module_path(), '/path/to/foo')
def test_module_utils_basic_ansible_module_creation(self):
from ansible.module_utils import basic
@@ -246,6 +251,8 @@ class TestModuleUtilsBasic(unittest.TestCase):
supports_check_mode=True,
)
+ # FIXME: add asserts here to verify the basic config
+
# fail, because a required param was not specified
basic.MODULE_COMPLEX_ARGS = '{}'
self.assertRaises(
@@ -288,8 +295,442 @@ class TestModuleUtilsBasic(unittest.TestCase):
supports_check_mode=True,
)
- def test_module_utils_basic_get_module_path(self):
- from ansible.module_utils.basic import get_module_path
- with patch('os.path.realpath', return_value='/path/to/foo/'):
- self.assertEqual(get_module_path(), '/path/to/foo')
+ def test_module_utils_basic_ansible_module_load_file_common_arguments(self):
+ from ansible.module_utils import basic
+
+ basic.MODULE_COMPLEX_ARGS = '{}'
+ am = basic.AnsibleModule(
+ argument_spec = dict(),
+ )
+
+ am.selinux_mls_enabled = MagicMock()
+ am.selinux_mls_enabled.return_value = True
+ am.selinux_default_context = MagicMock()
+ am.selinux_default_context.return_value = 'unconfined_u:object_r:default_t:s0'.split(':', 3)
+
+ # with no params, the result should be an empty dict
+ res = am.load_file_common_arguments(params=dict())
+ self.assertEqual(res, dict())
+
+ base_params = dict(
+ path = '/path/to/file',
+ mode = 0600,
+ owner = 'root',
+ group = 'root',
+ seuser = '_default',
+ serole = '_default',
+ setype = '_default',
+ selevel = '_default',
+ )
+
+ extended_params = base_params.copy()
+ extended_params.update(dict(
+ follow = True,
+ foo = 'bar',
+ ))
+
+ final_params = base_params.copy()
+ final_params.update(dict(
+ path = '/path/to/real_file',
+ secontext=['unconfined_u', 'object_r', 'default_t', 's0'],
+ ))
+
+ # with the proper params specified, the returned dictionary should represent
+ # only those params which have something to do with the file arguments, excluding
+ # other params and updated as required with proper values which may have been
+ # massaged by the method
+ with patch('os.path.islink', return_value=True):
+ with patch('os.path.realpath', return_value='/path/to/real_file'):
+ res = am.load_file_common_arguments(params=extended_params)
+ self.assertEqual(res, final_params)
+
+ def test_module_utils_basic_ansible_module_selinux_mls_enabled(self):
+ from ansible.module_utils import basic
+
+ basic.MODULE_COMPLEX_ARGS = '{}'
+ am = basic.AnsibleModule(
+ argument_spec = dict(),
+ )
+
+ basic.HAVE_SELINUX = False
+ self.assertEqual(am.selinux_mls_enabled(), False)
+
+ basic.HAVE_SELINUX = True
+ with patch('selinux.is_selinux_mls_enabled', return_value=0):
+ self.assertEqual(am.selinux_mls_enabled(), False)
+ with patch('selinux.is_selinux_mls_enabled', return_value=1):
+ self.assertEqual(am.selinux_mls_enabled(), True)
+
+ def test_module_utils_basic_ansible_module_selinux_initial_context(self):
+ from ansible.module_utils import basic
+
+ basic.MODULE_COMPLEX_ARGS = '{}'
+ am = basic.AnsibleModule(
+ argument_spec = dict(),
+ )
+
+ am.selinux_mls_enabled = MagicMock()
+ am.selinux_mls_enabled.return_value = False
+ self.assertEqual(am.selinux_initial_context(), [None, None, None])
+ am.selinux_mls_enabled.return_value = True
+ self.assertEqual(am.selinux_initial_context(), [None, None, None, None])
+
+ def test_module_utils_basic_ansible_module_selinux_enabled(self):
+ from ansible.module_utils import basic
+
+ basic.MODULE_COMPLEX_ARGS = '{}'
+ am = basic.AnsibleModule(
+ argument_spec = dict(),
+ )
+
+ # we first test the cases where the python selinux lib is
+ # not installed, which has two paths: one in which the system
+ # does have selinux installed (and the selinuxenabled command
+ # is present and returns 0 when run), or selinux is not installed
+ basic.HAVE_SELINUX = False
+ am.get_bin_path = MagicMock()
+ am.get_bin_path.return_value = '/path/to/selinuxenabled'
+ am.run_command = MagicMock()
+ am.run_command.return_value=(0, '', '')
+ self.assertRaises(SystemExit, am.selinux_enabled)
+ am.get_bin_path.return_value = None
+ self.assertEqual(am.selinux_enabled(), False)
+
+ # finally we test the case where the python selinux lib is installed,
+ # and both possibilities there (enabled vs. disabled)
+ basic.HAVE_SELINUX = True
+ with patch('selinux.is_selinux_enabled', return_value=0):
+ self.assertEqual(am.selinux_enabled(), False)
+ with patch('selinux.is_selinux_enabled', return_value=1):
+ self.assertEqual(am.selinux_enabled(), True)
+
+ def test_module_utils_basic_ansible_module_selinux_default_context(self):
+ from ansible.module_utils import basic
+
+ basic.MODULE_COMPLEX_ARGS = '{}'
+ am = basic.AnsibleModule(
+ argument_spec = dict(),
+ )
+
+ am.selinux_initial_context = MagicMock(return_value=[None, None, None, None])
+ am.selinux_enabled = MagicMock(return_value=True)
+
+ # we first test the cases where the python selinux lib is not installed
+ basic.HAVE_SELINUX = False
+ self.assertEqual(am.selinux_default_context(path='/foo/bar'), [None, None, None, None])
+
+ # all following tests assume the python selinux bindings are installed
+ basic.HAVE_SELINUX = True
+
+ # next, we test with a mocked implementation of selinux.matchpathcon to simulate
+ # an actual context being found
+ with patch('selinux.matchpathcon', return_value=[0, 'unconfined_u:object_r:default_t:s0']):
+ self.assertEqual(am.selinux_default_context(path='/foo/bar'), ['unconfined_u', 'object_r', 'default_t', 's0'])
+
+ # we also test the case where matchpathcon returned a failure
+ with patch('selinux.matchpathcon', return_value=[-1, '']):
+ self.assertEqual(am.selinux_default_context(path='/foo/bar'), [None, None, None, None])
+
+ # finally, we test where an OSError occurred during matchpathcon's call
+ with patch('selinux.matchpathcon', side_effect=OSError):
+ self.assertEqual(am.selinux_default_context(path='/foo/bar'), [None, None, None, None])
+
+ def test_module_utils_basic_ansible_module_selinux_context(self):
+ from ansible.module_utils import basic
+
+ basic.MODULE_COMPLEX_ARGS = '{}'
+ am = basic.AnsibleModule(
+ argument_spec = dict(),
+ )
+
+ am.selinux_initial_context = MagicMock(return_value=[None, None, None, None])
+ am.selinux_enabled = MagicMock(return_value=True)
+
+ # we first test the cases where the python selinux lib is not installed
+ basic.HAVE_SELINUX = False
+ self.assertEqual(am.selinux_context(path='/foo/bar'), [None, None, None, None])
+
+ # all following tests assume the python selinux bindings are installed
+ basic.HAVE_SELINUX = True
+
+ # next, we test with a mocked implementation of selinux.lgetfilecon_raw to simulate
+ # an actual context being found
+ with patch('selinux.lgetfilecon_raw', return_value=[0, 'unconfined_u:object_r:default_t:s0']):
+ self.assertEqual(am.selinux_context(path='/foo/bar'), ['unconfined_u', 'object_r', 'default_t', 's0'])
+
+ # we also test the case where matchpathcon returned a failure
+ with patch('selinux.lgetfilecon_raw', return_value=[-1, '']):
+ self.assertEqual(am.selinux_context(path='/foo/bar'), [None, None, None, None])
+
+ # finally, we test where an OSError occurred during matchpathcon's call
+ e = OSError()
+ e.errno = errno.ENOENT
+ with patch('selinux.lgetfilecon_raw', side_effect=e):
+ self.assertRaises(SystemExit, am.selinux_context, path='/foo/bar')
+
+ e = OSError()
+ with patch('selinux.lgetfilecon_raw', side_effect=e):
+ self.assertRaises(SystemExit, am.selinux_context, path='/foo/bar')
+
+ def test_module_utils_basic_ansible_module_is_special_selinux_path(self):
+ from ansible.module_utils import basic
+
+ basic.MODULE_COMPLEX_ARGS = '{}'
+ basic.SELINUX_SPECIAL_FS = 'nfs,nfsd,foos'
+ am = basic.AnsibleModule(
+ argument_spec = dict(),
+ )
+
+ def _mock_find_mount_point(path):
+ if path.startswith('/some/path'):
+ return '/some/path'
+ elif path.startswith('/weird/random/fstype'):
+ return '/weird/random/fstype'
+ return '/'
+
+ am.find_mount_point = MagicMock(side_effect=_mock_find_mount_point)
+ am.selinux_context = MagicMock(return_value=['foo_u', 'foo_r', 'foo_t', 's0'])
+
+ m = mock_open()
+ m.side_effect = OSError
+
+ with patch('__builtin__.open', m, create=True):
+ self.assertEqual(am.is_special_selinux_path('/some/path/that/should/be/nfs'), (False, None))
+
+ mount_data = [
+ '/dev/disk1 / ext4 rw,seclabel,relatime,data=ordered 0 0\n',
+ '1.1.1.1:/path/to/nfs /some/path nfs ro 0 0\n',
+ 'whatever /weird/random/fstype foos rw 0 0\n',
+ ]
+
+ # mock_open has a broken readlines() implementation apparently...
+ # this should work by default but doesn't, so we fix it
+ m = mock_open(read_data=''.join(mount_data))
+ m.return_value.readlines.return_value = mount_data
+
+ with patch('__builtin__.open', m, create=True):
+ self.assertEqual(am.is_special_selinux_path('/some/random/path'), (False, None))
+ self.assertEqual(am.is_special_selinux_path('/some/path/that/should/be/nfs'), (True, ['foo_u', 'foo_r', 'foo_t', 's0']))
+ self.assertEqual(am.is_special_selinux_path('/weird/random/fstype/path'), (True, ['foo_u', 'foo_r', 'foo_t', 's0']))
+
+ def test_module_utils_basic_ansible_module_to_filesystem_str(self):
+ from ansible.module_utils import basic
+
+ basic.MODULE_COMPLEX_ARGS = '{}'
+ am = basic.AnsibleModule(
+ argument_spec = dict(),
+ )
+
+ self.assertEqual(am._to_filesystem_str(u'foo'), 'foo')
+ self.assertEqual(am._to_filesystem_str(u'föö'), 'f\xc3\xb6\xc3\xb6')
+
+ def test_module_utils_basic_ansible_module_user_and_group(self):
+ from ansible.module_utils import basic
+
+ basic.MODULE_COMPLEX_ARGS = '{}'
+ am = basic.AnsibleModule(
+ argument_spec = dict(),
+ )
+
+ mock_stat = MagicMock()
+ mock_stat.st_uid = 0
+ mock_stat.st_gid = 0
+
+ with patch('os.lstat', return_value=mock_stat):
+ self.assertEqual(am.user_and_group('/path/to/file'), (0, 0))
+
+ def test_module_utils_basic_ansible_module_find_mount_point(self):
+ from ansible.module_utils import basic
+
+ basic.MODULE_COMPLEX_ARGS = '{}'
+ am = basic.AnsibleModule(
+ argument_spec = dict(),
+ )
+
+ def _mock_ismount(path):
+ if path == '/':
+ return True
+ return False
+
+ with patch('os.path.ismount', side_effect=_mock_ismount):
+ self.assertEqual(am.find_mount_point('/root/fs/../mounted/path/to/whatever'), '/')
+
+ def _mock_ismount(path):
+ if path == '/subdir/mount':
+ return True
+ return False
+
+ with patch('os.path.ismount', side_effect=_mock_ismount):
+ self.assertEqual(am.find_mount_point('/subdir/mount/path/to/whatever'), '/subdir/mount')
+
+ def test_module_utils_basic_ansible_module_set_context_if_different(self):
+ from ansible.module_utils import basic
+
+ basic.MODULE_COMPLEX_ARGS = '{}'
+ am = basic.AnsibleModule(
+ argument_spec = dict(),
+ )
+
+ basic.HAS_SELINUX = False
+
+ am.selinux_enabled = MagicMock(return_value=False)
+ self.assertEqual(am.set_context_if_different('/path/to/file', ['foo_u', 'foo_r', 'foo_t', 's0'], True), True)
+ self.assertEqual(am.set_context_if_different('/path/to/file', ['foo_u', 'foo_r', 'foo_t', 's0'], False), False)
+
+ basic.HAS_SELINUX = True
+
+ am.selinux_enabled = MagicMock(return_value=True)
+ am.selinux_context = MagicMock(return_value=['bar_u', 'bar_r', None, None])
+ am.is_special_selinux_path = MagicMock(return_value=(False, None))
+
+ with patch('selinux.lsetfilecon', return_value=0) as m:
+ self.assertEqual(am.set_context_if_different('/path/to/file', ['foo_u', 'foo_r', 'foo_t', 's0'], False), True)
+ m.assert_called_with('/path/to/file', 'foo_u:foo_r:foo_t:s0')
+ m.reset_mock()
+ am.check_mode = True
+ self.assertEqual(am.set_context_if_different('/path/to/file', ['foo_u', 'foo_r', 'foo_t', 's0'], False), True)
+ self.assertEqual(m.called, False)
+ am.check_mode = False
+
+ with patch('selinux.lsetfilecon', return_value=1) as m:
+ self.assertRaises(SystemExit, am.set_context_if_different, '/path/to/file', ['foo_u', 'foo_r', 'foo_t', 's0'], True)
+
+ with patch('selinux.lsetfilecon', side_effect=OSError) as m:
+ self.assertRaises(SystemExit, am.set_context_if_different, '/path/to/file', ['foo_u', 'foo_r', 'foo_t', 's0'], True)
+
+ am.is_special_selinux_path = MagicMock(return_value=(True, ['sp_u', 'sp_r', 'sp_t', 's0']))
+
+ with patch('selinux.lsetfilecon', return_value=0) as m:
+ self.assertEqual(am.set_context_if_different('/path/to/file', ['foo_u', 'foo_r', 'foo_t', 's0'], False), True)
+ m.assert_called_with('/path/to/file', 'sp_u:sp_r:sp_t:s0')
+
+ def test_module_utils_basic_ansible_module_set_owner_if_different(self):
+ from ansible.module_utils import basic
+
+ basic.MODULE_COMPLEX_ARGS = '{}'
+ am = basic.AnsibleModule(
+ argument_spec = dict(),
+ )
+
+ self.assertEqual(am.set_owner_if_different('/path/to/file', None, True), True)
+ self.assertEqual(am.set_owner_if_different('/path/to/file', None, False), False)
+
+ am.user_and_group = MagicMock(return_value=(500, 500))
+
+ with patch('os.lchown', return_value=None) as m:
+ self.assertEqual(am.set_owner_if_different('/path/to/file', 0, False), True)
+ m.assert_called_with('/path/to/file', 0, -1)
+
+ def _mock_getpwnam(*args, **kwargs):
+ mock_pw = MagicMock()
+ mock_pw.pw_uid = 0
+ return mock_pw
+
+ m.reset_mock()
+ with patch('pwd.getpwnam', side_effect=_mock_getpwnam):
+ self.assertEqual(am.set_owner_if_different('/path/to/file', 'root', False), True)
+ m.assert_called_with('/path/to/file', 0, -1)
+
+ with patch('pwd.getpwnam', side_effect=KeyError):
+ self.assertRaises(SystemExit, am.set_owner_if_different, '/path/to/file', 'root', False)
+
+ m.reset_mock()
+ am.check_mode = True
+ self.assertEqual(am.set_owner_if_different('/path/to/file', 0, False), True)
+ self.assertEqual(m.called, False)
+ am.check_mode = False
+
+ with patch('os.lchown', side_effect=OSError) as m:
+ self.assertRaises(SystemExit, am.set_owner_if_different, '/path/to/file', 'root', False)
+
+ def test_module_utils_basic_ansible_module_set_group_if_different(self):
+ from ansible.module_utils import basic
+
+ basic.MODULE_COMPLEX_ARGS = '{}'
+ am = basic.AnsibleModule(
+ argument_spec = dict(),
+ )
+
+ self.assertEqual(am.set_group_if_different('/path/to/file', None, True), True)
+ self.assertEqual(am.set_group_if_different('/path/to/file', None, False), False)
+
+ am.user_and_group = MagicMock(return_value=(500, 500))
+
+ with patch('os.lchown', return_value=None) as m:
+ self.assertEqual(am.set_group_if_different('/path/to/file', 0, False), True)
+ m.assert_called_with('/path/to/file', -1, 0)
+
+ def _mock_getgrnam(*args, **kwargs):
+ mock_gr = MagicMock()
+ mock_gr.gr_gid = 0
+ return mock_gr
+
+ m.reset_mock()
+ with patch('grp.getgrnam', side_effect=_mock_getgrnam):
+ self.assertEqual(am.set_group_if_different('/path/to/file', 'root', False), True)
+ m.assert_called_with('/path/to/file', -1, 0)
+
+ with patch('grp.getgrnam', side_effect=KeyError):
+ self.assertRaises(SystemExit, am.set_group_if_different, '/path/to/file', 'root', False)
+
+ m.reset_mock()
+ am.check_mode = True
+ self.assertEqual(am.set_group_if_different('/path/to/file', 0, False), True)
+ self.assertEqual(m.called, False)
+ am.check_mode = False
+
+ with patch('os.lchown', side_effect=OSError) as m:
+ self.assertRaises(SystemExit, am.set_group_if_different, '/path/to/file', 'root', False)
+
+ def test_module_utils_basic_ansible_module_set_mode_if_different(self):
+ from ansible.module_utils import basic
+
+ basic.MODULE_COMPLEX_ARGS = '{}'
+ am = basic.AnsibleModule(
+ argument_spec = dict(),
+ )
+
+ mock_stat1 = MagicMock()
+ mock_stat1.st_mode = 0444
+ mock_stat2 = MagicMock()
+ mock_stat2.st_mode = 0660
+
+ with patch('os.lstat', side_effect=[mock_stat1]):
+ self.assertEqual(am.set_mode_if_different('/path/to/file', None, True), True)
+ with patch('os.lstat', side_effect=[mock_stat1]):
+ self.assertEqual(am.set_mode_if_different('/path/to/file', None, False), False)
+
+ with patch('os.lstat') as m:
+ with patch('os.lchmod', return_value=None, create=True) as m_os:
+ m.side_effect = [mock_stat1, mock_stat2, mock_stat2]
+ self.assertEqual(am.set_mode_if_different('/path/to/file', 0660, False), True)
+ m_os.assert_called_with('/path/to/file', 0660)
+
+ m.side_effect = [mock_stat1, mock_stat2, mock_stat2]
+ am._symbolic_mode_to_octal = MagicMock(return_value=0660)
+ self.assertEqual(am.set_mode_if_different('/path/to/file', 'o+w,g+w,a-r', False), True)
+ m_os.assert_called_with('/path/to/file', 0660)
+
+ m.side_effect = [mock_stat1, mock_stat2, mock_stat2]
+ am._symbolic_mode_to_octal = MagicMock(side_effect=Exception)
+ self.assertRaises(SystemExit, am.set_mode_if_different, '/path/to/file', 'o+w,g+w,a-r', False)
+
+ m.side_effect = [mock_stat1, mock_stat2, mock_stat2]
+ am.check_mode = True
+ self.assertEqual(am.set_mode_if_different('/path/to/file', 0660, False), True)
+ am.check_mode = False
+
+ # FIXME: this isn't working yet
+ #with patch('os.lstat', side_effect=[mock_stat1, mock_stat2]):
+ # with patch('os.lchmod', return_value=None, create=True) as m_os:
+ # del m_os.lchmod
+ # with patch('os.path.islink', return_value=False):
+ # with patch('os.chmod', return_value=None) as m_chmod:
+ # self.assertEqual(am.set_mode_if_different('/path/to/file/no_lchmod', 0660, False), True)
+ # m_chmod.assert_called_with('/path/to/file', 0660)
+ # with patch('os.path.islink', return_value=True):
+ # with patch('os.chmod', return_value=None) as m_chmod:
+ # with patch('os.stat', return_value=mock_stat2):
+ # self.assertEqual(am.set_mode_if_different('/path/to/file', 0660, False), True)
+ # m_chmod.assert_called_with('/path/to/file', 0660)
From 37ae5aab31ad10bf4e194b54e09050d5dbd807ef Mon Sep 17 00:00:00 2001
From: alberto
Date: Thu, 28 May 2015 12:19:32 +0200
Subject: [PATCH 120/971] Capture only IOError when reading shebang from
inventory file, to avoid ignoring other possible exceptions like timeouts
from a task
---
lib/ansible/inventory/__init__.py | 15 +++++++--------
1 file changed, 7 insertions(+), 8 deletions(-)
diff --git a/lib/ansible/inventory/__init__.py b/lib/ansible/inventory/__init__.py
index f012246e22..e4080e39d8 100644
--- a/lib/ansible/inventory/__init__.py
+++ b/lib/ansible/inventory/__init__.py
@@ -105,19 +105,18 @@ class Inventory(object):
# class we can show a more apropos error
shebang_present = False
try:
- inv_file = open(host_list)
- first_line = inv_file.readlines()[0]
- inv_file.close()
- if first_line.startswith('#!'):
- shebang_present = True
- except:
+ with open(host_list, "r") as inv_file:
+ first_line = inv_file.readline()
+ if first_line.startswith("#!"):
+ shebang_present = True
+ except IOError:
pass
if utils.is_executable(host_list):
try:
self.parser = InventoryScript(filename=host_list)
self.groups = self.parser.groups.values()
- except:
+ except errors.AnsibleError:
if not shebang_present:
raise errors.AnsibleError("The file %s is marked as executable, but failed to execute correctly. " % host_list + \
"If this is not supposed to be an executable script, correct this with `chmod -x %s`." % host_list)
@@ -127,7 +126,7 @@ class Inventory(object):
try:
self.parser = InventoryParser(filename=host_list)
self.groups = self.parser.groups.values()
- except:
+ except errors.AnsibleError:
if shebang_present:
raise errors.AnsibleError("The file %s looks like it should be an executable inventory script, but is not marked executable. " % host_list + \
"Perhaps you want to correct this with `chmod +x %s`?" % host_list)
From aef76cc701d8f647444c624da664bb65e84e6bce Mon Sep 17 00:00:00 2001
From: Edwin Chiu
Date: Thu, 28 May 2015 14:43:25 -0400
Subject: [PATCH 121/971] More complex example of using test-module
---
hacking/README.md | 16 ++++++++++++++++
1 file changed, 16 insertions(+)
diff --git a/hacking/README.md b/hacking/README.md
index ae8db7e3a9..be19249519 100644
--- a/hacking/README.md
+++ b/hacking/README.md
@@ -33,6 +33,22 @@ Example:
This is a good way to insert a breakpoint into a module, for instance.
+For more complex arguments such as the following yaml:
+
+```yaml
+parent:
+ child:
+ - item: first
+ val: foo
+ - item: second
+ val: boo
+```
+
+Use:
+
+ $ ./hacking/test-module -m module \
+ -a "{"parent": {"child": [{"item": "first", "val": "foo"}, {"item": "second", "val": "bar"}]}}"
+
Module-formatter
----------------
From 1ccf2a4685d136a81d266ed5728c7f2c9b7351e4 Mon Sep 17 00:00:00 2001
From: Toshio Kuratomi
Date: Thu, 28 May 2015 12:35:37 -0700
Subject: [PATCH 122/971] Make fetch_url check the server's certificate on
https connections
---
lib/ansible/module_utils/urls.py | 49 ++++++++++++-------
.../roles/test_get_url/tasks/main.yml | 20 ++++++++
2 files changed, 52 insertions(+), 17 deletions(-)
diff --git a/lib/ansible/module_utils/urls.py b/lib/ansible/module_utils/urls.py
index d56cc89395..18317e86ae 100644
--- a/lib/ansible/module_utils/urls.py
+++ b/lib/ansible/module_utils/urls.py
@@ -50,6 +50,15 @@ try:
except:
HAS_SSL=False
+HAS_MATCH_HOSTNAME = True
+try:
+ from ssl import match_hostname, CertificateError
+except ImportError:
+ try:
+ from backports.ssl_match_hostname import match_hostname, CertificateError
+ except ImportError:
+ HAS_MATCH_HOSTNAME = False
+
import httplib
import os
import re
@@ -293,11 +302,13 @@ class SSLValidationHandler(urllib2.BaseHandler):
connect_result = s.recv(4096)
self.validate_proxy_response(connect_result)
ssl_s = ssl.wrap_socket(s, ca_certs=tmp_ca_cert_path, cert_reqs=ssl.CERT_REQUIRED)
+ match_hostname(ssl_s.getpeercert(), self.hostname)
else:
self.module.fail_json(msg='Unsupported proxy scheme: %s. Currently ansible only supports HTTP proxies.' % proxy_parts.get('scheme'))
else:
s.connect((self.hostname, self.port))
ssl_s = ssl.wrap_socket(s, ca_certs=tmp_ca_cert_path, cert_reqs=ssl.CERT_REQUIRED)
+ match_hostname(ssl_s.getpeercert(), self.hostname)
# close the ssl connection
#ssl_s.unwrap()
s.close()
@@ -311,6 +322,9 @@ class SSLValidationHandler(urllib2.BaseHandler):
'Use validate_certs=no or make sure your managed systems have a valid CA certificate installed. ' + \
'Paths checked for this platform: %s' % ", ".join(paths_checked)
)
+ except CertificateError:
+ self.module.fail_json(msg="SSL Certificate does not belong to %s. Make sure the url has a certificate that belongs to it or use validate_certs=no (insecure)" % self.hostname)
+
try:
# cleanup the temp file created, don't worry
# if it fails for some reason
@@ -363,28 +377,29 @@ def fetch_url(module, url, data=None, headers=None, method=None,
# FIXME: change the following to use the generic_urlparse function
# to remove the indexed references for 'parsed'
parsed = urlparse.urlparse(url)
- if parsed[0] == 'https':
- if not HAS_SSL and validate_certs:
+ if parsed[0] == 'https' and validate_certs:
+ if not HAS_SSL:
if distribution == 'Redhat':
module.fail_json(msg='SSL validation is not available in your version of python. You can use validate_certs=no, however this is unsafe and not recommended. You can also install python-ssl from EPEL')
else:
module.fail_json(msg='SSL validation is not available in your version of python. You can use validate_certs=no, however this is unsafe and not recommended')
+ if not HAS_MATCH_HOSTNAME:
+ module.fail_json(msg='Available SSL validation does not check that the certificate matches the hostname. You can install backports.ssl_match_hostname or update your managed machine to python-2.7.9 or newer. You could also use validate_certs=no, however this is unsafe and not recommended')
- elif validate_certs:
- # do the cert validation
- netloc = parsed[1]
- if '@' in netloc:
- netloc = netloc.split('@', 1)[1]
- if ':' in netloc:
- hostname, port = netloc.split(':', 1)
- port = int(port)
- else:
- hostname = netloc
- port = 443
- # create the SSL validation handler and
- # add it to the list of handlers
- ssl_handler = SSLValidationHandler(module, hostname, port)
- handlers.append(ssl_handler)
+ # do the cert validation
+ netloc = parsed[1]
+ if '@' in netloc:
+ netloc = netloc.split('@', 1)[1]
+ if ':' in netloc:
+ hostname, port = netloc.split(':', 1)
+ port = int(port)
+ else:
+ hostname = netloc
+ port = 443
+ # create the SSL validation handler and
+ # add it to the list of handlers
+ ssl_handler = SSLValidationHandler(module, hostname, port)
+ handlers.append(ssl_handler)
if parsed[0] != 'ftp':
username = module.params.get('url_username', '')
diff --git a/test/integration/roles/test_get_url/tasks/main.yml b/test/integration/roles/test_get_url/tasks/main.yml
index 1aa4b287ea..6d016fe6be 100644
--- a/test/integration/roles/test_get_url/tasks/main.yml
+++ b/test/integration/roles/test_get_url/tasks/main.yml
@@ -25,3 +25,23 @@
that:
- result.changed
- '"OK" in result.msg'
+
+- name: test https fetch to a site with invalid domain
+ get_url:
+ url: "https://kennethreitz.org/"
+ dest: "{{ output_dir }}/shouldnotexist.html"
+ ignore_errors: True
+ register: result
+
+- stat:
+ path: "{{ output_dir }}/shouldnotexist.html"
+ register: stat_result
+
+- debug: var=result
+
+- name: Assert that the file was not downloaded
+ assert:
+ that:
+ - "result.failed == true"
+ - "'Certificate does not belong to ' in result.msg"
+ - "stat_result.stat.exists == false"
From afc19894e1006780d2f248e325f7ecae84bb4f14 Mon Sep 17 00:00:00 2001
From: Toshio Kuratomi
Date: Thu, 28 May 2015 12:35:37 -0700
Subject: [PATCH 123/971] Make fetch_url check the server's certificate on
https connections
---
lib/ansible/module_utils/urls.py | 49 ++++++++++++-------
.../roles/test_get_url/tasks/main.yml | 20 ++++++++
2 files changed, 52 insertions(+), 17 deletions(-)
diff --git a/lib/ansible/module_utils/urls.py b/lib/ansible/module_utils/urls.py
index d56cc89395..18317e86ae 100644
--- a/lib/ansible/module_utils/urls.py
+++ b/lib/ansible/module_utils/urls.py
@@ -50,6 +50,15 @@ try:
except:
HAS_SSL=False
+HAS_MATCH_HOSTNAME = True
+try:
+ from ssl import match_hostname, CertificateError
+except ImportError:
+ try:
+ from backports.ssl_match_hostname import match_hostname, CertificateError
+ except ImportError:
+ HAS_MATCH_HOSTNAME = False
+
import httplib
import os
import re
@@ -293,11 +302,13 @@ class SSLValidationHandler(urllib2.BaseHandler):
connect_result = s.recv(4096)
self.validate_proxy_response(connect_result)
ssl_s = ssl.wrap_socket(s, ca_certs=tmp_ca_cert_path, cert_reqs=ssl.CERT_REQUIRED)
+ match_hostname(ssl_s.getpeercert(), self.hostname)
else:
self.module.fail_json(msg='Unsupported proxy scheme: %s. Currently ansible only supports HTTP proxies.' % proxy_parts.get('scheme'))
else:
s.connect((self.hostname, self.port))
ssl_s = ssl.wrap_socket(s, ca_certs=tmp_ca_cert_path, cert_reqs=ssl.CERT_REQUIRED)
+ match_hostname(ssl_s.getpeercert(), self.hostname)
# close the ssl connection
#ssl_s.unwrap()
s.close()
@@ -311,6 +322,9 @@ class SSLValidationHandler(urllib2.BaseHandler):
'Use validate_certs=no or make sure your managed systems have a valid CA certificate installed. ' + \
'Paths checked for this platform: %s' % ", ".join(paths_checked)
)
+ except CertificateError:
+ self.module.fail_json(msg="SSL Certificate does not belong to %s. Make sure the url has a certificate that belongs to it or use validate_certs=no (insecure)" % self.hostname)
+
try:
# cleanup the temp file created, don't worry
# if it fails for some reason
@@ -363,28 +377,29 @@ def fetch_url(module, url, data=None, headers=None, method=None,
# FIXME: change the following to use the generic_urlparse function
# to remove the indexed references for 'parsed'
parsed = urlparse.urlparse(url)
- if parsed[0] == 'https':
- if not HAS_SSL and validate_certs:
+ if parsed[0] == 'https' and validate_certs:
+ if not HAS_SSL:
if distribution == 'Redhat':
module.fail_json(msg='SSL validation is not available in your version of python. You can use validate_certs=no, however this is unsafe and not recommended. You can also install python-ssl from EPEL')
else:
module.fail_json(msg='SSL validation is not available in your version of python. You can use validate_certs=no, however this is unsafe and not recommended')
+ if not HAS_MATCH_HOSTNAME:
+ module.fail_json(msg='Available SSL validation does not check that the certificate matches the hostname. You can install backports.ssl_match_hostname or update your managed machine to python-2.7.9 or newer. You could also use validate_certs=no, however this is unsafe and not recommended')
- elif validate_certs:
- # do the cert validation
- netloc = parsed[1]
- if '@' in netloc:
- netloc = netloc.split('@', 1)[1]
- if ':' in netloc:
- hostname, port = netloc.split(':', 1)
- port = int(port)
- else:
- hostname = netloc
- port = 443
- # create the SSL validation handler and
- # add it to the list of handlers
- ssl_handler = SSLValidationHandler(module, hostname, port)
- handlers.append(ssl_handler)
+ # do the cert validation
+ netloc = parsed[1]
+ if '@' in netloc:
+ netloc = netloc.split('@', 1)[1]
+ if ':' in netloc:
+ hostname, port = netloc.split(':', 1)
+ port = int(port)
+ else:
+ hostname = netloc
+ port = 443
+ # create the SSL validation handler and
+ # add it to the list of handlers
+ ssl_handler = SSLValidationHandler(module, hostname, port)
+ handlers.append(ssl_handler)
if parsed[0] != 'ftp':
username = module.params.get('url_username', '')
diff --git a/test/integration/roles/test_get_url/tasks/main.yml b/test/integration/roles/test_get_url/tasks/main.yml
index 1aa4b287ea..6d016fe6be 100644
--- a/test/integration/roles/test_get_url/tasks/main.yml
+++ b/test/integration/roles/test_get_url/tasks/main.yml
@@ -25,3 +25,23 @@
that:
- result.changed
- '"OK" in result.msg'
+
+- name: test https fetch to a site with invalid domain
+ get_url:
+ url: "https://kennethreitz.org/"
+ dest: "{{ output_dir }}/shouldnotexist.html"
+ ignore_errors: True
+ register: result
+
+- stat:
+ path: "{{ output_dir }}/shouldnotexist.html"
+ register: stat_result
+
+- debug: var=result
+
+- name: Assert that the file was not downloaded
+ assert:
+ that:
+ - "result.failed == true"
+ - "'Certificate does not belong to ' in result.msg"
+ - "stat_result.stat.exists == false"
From 4d8427538dbf3b15e65622b56ff20a6fc67429fd Mon Sep 17 00:00:00 2001
From: Toshio Kuratomi
Date: Thu, 28 May 2015 15:10:06 -0700
Subject: [PATCH 124/971] Update submodule refs
---
lib/ansible/modules/core | 2 +-
lib/ansible/modules/extras | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/lib/ansible/modules/core b/lib/ansible/modules/core
index 2b5e932cfb..7fea93835c 160000
--- a/lib/ansible/modules/core
+++ b/lib/ansible/modules/core
@@ -1 +1 @@
-Subproject commit 2b5e932cfb4df42f46812aee2476fdf5aabab172
+Subproject commit 7fea93835c172d23638959cbe2d00a3be8d14557
diff --git a/lib/ansible/modules/extras b/lib/ansible/modules/extras
index b2e4f31beb..c223716bc7 160000
--- a/lib/ansible/modules/extras
+++ b/lib/ansible/modules/extras
@@ -1 +1 @@
-Subproject commit b2e4f31bebfec49380659b9d65b5828f1c1ed8d9
+Subproject commit c223716bc7ccf2d0ac7995b36f76cca8ccd5bfda
From 0f4a3409d851c658a765c95442d985ea7b9a13ec Mon Sep 17 00:00:00 2001
From: Toshio Kuratomi
Date: Thu, 28 May 2015 15:35:25 -0700
Subject: [PATCH 125/971] Add test that validate_certs=no works
---
.../roles/test_get_url/tasks/main.yml | 21 ++++++++++++++++---
1 file changed, 18 insertions(+), 3 deletions(-)
diff --git a/test/integration/roles/test_get_url/tasks/main.yml b/test/integration/roles/test_get_url/tasks/main.yml
index 6d016fe6be..3a6bc509c0 100644
--- a/test/integration/roles/test_get_url/tasks/main.yml
+++ b/test/integration/roles/test_get_url/tasks/main.yml
@@ -26,7 +26,7 @@
- result.changed
- '"OK" in result.msg'
-- name: test https fetch to a site with invalid domain
+- name: test https fetch to a site with mismatched hostname and certificate
get_url:
url: "https://kennethreitz.org/"
dest: "{{ output_dir }}/shouldnotexist.html"
@@ -37,11 +37,26 @@
path: "{{ output_dir }}/shouldnotexist.html"
register: stat_result
-- debug: var=result
-
- name: Assert that the file was not downloaded
assert:
that:
- "result.failed == true"
- "'Certificate does not belong to ' in result.msg"
- "stat_result.stat.exists == false"
+
+- name: test https fetch to a site with mismatched hostname and certificate and validate_certs=no
+ get_url:
+ url: "https://kennethreitz.org/"
+ dest: "{{ output_dir }}/kreitz.html"
+ validate_certs: no
+ register: result
+
+- stat:
+ path: "{{ output_dir }}/kreitz.html"
+ register: stat_result
+
+- name: Assert that the file was not downloaded
+ assert:
+ that:
+ - "result.failed == false"
+ - "stat_result.stat.exists == true"
From 1bda7cc200d5bd1054d1bcb3b1986afe80b30dbd Mon Sep 17 00:00:00 2001
From: Toshio Kuratomi
Date: Thu, 28 May 2015 15:35:45 -0700
Subject: [PATCH 126/971] Test that uri module validates certs
---
.../integration/roles/test_uri/tasks/main.yml | 35 +++++++++++++++++++
1 file changed, 35 insertions(+)
diff --git a/test/integration/roles/test_uri/tasks/main.yml b/test/integration/roles/test_uri/tasks/main.yml
index 66e01ae8e5..da4bf65574 100644
--- a/test/integration/roles/test_uri/tasks/main.yml
+++ b/test/integration/roles/test_uri/tasks/main.yml
@@ -91,3 +91,38 @@
with_together:
- fail_checksum.results
- fail.results
+
+- name: test https fetch to a site with mismatched hostname and certificate
+ uri:
+ url: "https://kennethreitz.org/"
+ dest: "{{ output_dir }}/shouldnotexist.html"
+ ignore_errors: True
+ register: result
+
+- stat:
+ path: "{{ output_dir }}/shouldnotexist.html"
+ register: stat_result
+
+- name: Assert that the file was not downloaded
+ assert:
+ that:
+ - "result.failed == true"
+ - "'certificate does not match ' in result.msg"
+ - "stat_result.stat.exists == false"
+
+- name: test https fetch to a site with mismatched hostname and certificate and validate_certs=no
+ get_url:
+ url: "https://kennethreitz.org/"
+ dest: "{{ output_dir }}/kreitz.html"
+ validate_certs: no
+ register: result
+
+- stat:
+ path: "{{ output_dir }}/kreitz.html"
+ register: stat_result
+
+- name: Assert that the file was not downloaded
+ assert:
+ that:
+ - "result.failed == false"
+ - "stat_result.stat.exists == true"
From 2f4ad2714f773b0a34dfc5ba4be4e3e62719df53 Mon Sep 17 00:00:00 2001
From: Toshio Kuratomi
Date: Thu, 28 May 2015 15:36:35 -0700
Subject: [PATCH 127/971] Update core module ref
---
lib/ansible/modules/core | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/lib/ansible/modules/core b/lib/ansible/modules/core
index 7fea93835c..a7a3ef54d7 160000
--- a/lib/ansible/modules/core
+++ b/lib/ansible/modules/core
@@ -1 +1 @@
-Subproject commit 7fea93835c172d23638959cbe2d00a3be8d14557
+Subproject commit a7a3ef54d7e917fb81d44cda4266ff2b4e8870c9
From 5ffc1183dd18397048d9a82d720cb79882c88bfd Mon Sep 17 00:00:00 2001
From: Toshio Kuratomi
Date: Thu, 28 May 2015 15:37:12 -0700
Subject: [PATCH 128/971] WHoops, that was the core module stable branch
---
lib/ansible/modules/core | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/lib/ansible/modules/core b/lib/ansible/modules/core
index a7a3ef54d7..5983d64d77 160000
--- a/lib/ansible/modules/core
+++ b/lib/ansible/modules/core
@@ -1 +1 @@
-Subproject commit a7a3ef54d7e917fb81d44cda4266ff2b4e8870c9
+Subproject commit 5983d64d7728ea88ef27606e95e4aa34cde5ff46
From 5d213cab23ced2664fdd0d77a9c1e1b11a3d489b Mon Sep 17 00:00:00 2001
From: Toshio Kuratomi
Date: Thu, 28 May 2015 16:00:58 -0700
Subject: [PATCH 129/971] Update extras submodule ref for doc fix
---
lib/ansible/modules/extras | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/lib/ansible/modules/extras b/lib/ansible/modules/extras
index c223716bc7..1276420a3a 160000
--- a/lib/ansible/modules/extras
+++ b/lib/ansible/modules/extras
@@ -1 +1 @@
-Subproject commit c223716bc7ccf2d0ac7995b36f76cca8ccd5bfda
+Subproject commit 1276420a3a39340fcd9e053a1e621cdd89f480fa
From e5190327f2131997cae02e57e0c012e69c1a1828 Mon Sep 17 00:00:00 2001
From: Stefan Midjich
Date: Wed, 6 May 2015 22:47:53 +0200
Subject: [PATCH 130/971] this fixes ansible on openbsd and freebsd systems.
only tested on openbsd.
---
lib/ansible/module_utils/facts.py | 37 +++++++++++++++++++++++++++++++
1 file changed, 37 insertions(+)
diff --git a/lib/ansible/module_utils/facts.py b/lib/ansible/module_utils/facts.py
index 6ddae5df85..7209f699c3 100644
--- a/lib/ansible/module_utils/facts.py
+++ b/lib/ansible/module_utils/facts.py
@@ -2535,6 +2535,43 @@ class LinuxVirtual(Virtual):
self.facts['virtualization_role'] = 'NA'
return
+class FreeBSDVirtual(Virtual):
+ """
+ This is a FreeBSD-specific subclass of Virtual. It defines
+ - virtualization_type
+ - virtualization_role
+ """
+ platform = 'FreeBSD'
+
+ def __init__(self):
+ Virtual.__init__(self)
+
+ def populate(self):
+ self.get_virtual_facts()
+ return self.facts
+
+ def get_virtual_facts(self):
+ self.facts['virtualization_type'] = ''
+ self.facts['virtualization_role'] = ''
+
+class OpenBSDVirtual(Virtual):
+ """
+ This is a OpenBSD-specific subclass of Virtual. It defines
+ - virtualization_type
+ - virtualization_role
+ """
+ platform = 'OpenBSD'
+
+ def __init__(self):
+ Virtual.__init__(self)
+
+ def populate(self):
+ self.get_virtual_facts()
+ return self.facts
+
+ def get_virtual_facts(self):
+ self.facts['virtualization_type'] = ''
+ self.facts['virtualization_role'] = ''
class HPUXVirtual(Virtual):
"""
From 73b7d96501420fcce7bc002bd839ec9cafde6a0a Mon Sep 17 00:00:00 2001
From: Toshio Kuratomi
Date: Thu, 28 May 2015 17:01:18 -0700
Subject: [PATCH 131/971] Test on fields that exist
---
test/integration/roles/test_get_url/tasks/main.yml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/test/integration/roles/test_get_url/tasks/main.yml b/test/integration/roles/test_get_url/tasks/main.yml
index 3a6bc509c0..88ff3b2e21 100644
--- a/test/integration/roles/test_get_url/tasks/main.yml
+++ b/test/integration/roles/test_get_url/tasks/main.yml
@@ -55,8 +55,8 @@
path: "{{ output_dir }}/kreitz.html"
register: stat_result
-- name: Assert that the file was not downloaded
+- name: Assert that the file was downloaded
assert:
that:
- - "result.failed == false"
+ - "result.changed == true"
- "stat_result.stat.exists == true"
From e7a096c4c53084572adf3c67ccd245919c47e0a8 Mon Sep 17 00:00:00 2001
From: Brian Coca
Date: Thu, 28 May 2015 20:01:39 -0400
Subject: [PATCH 132/971] cowsay is back!
---
lib/ansible/utils/display.py | 50 ++++++++++++++++++++++++++++++++++++
1 file changed, 50 insertions(+)
diff --git a/lib/ansible/utils/display.py b/lib/ansible/utils/display.py
index d5b6ad71a9..6c5e850a70 100644
--- a/lib/ansible/utils/display.py
+++ b/lib/ansible/utils/display.py
@@ -20,6 +20,9 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import textwrap
+import os
+import random
+import subprocess
import sys
from ansible import constants as C
@@ -37,6 +40,31 @@ class Display:
self._warns = {}
self._errors = {}
+ self.cowsay = None
+ self.noncow = os.getenv("ANSIBLE_COW_SELECTION",None)
+ self.set_cowsay_info()
+
+ def set_cowsay_info(self):
+
+ if not C.ANSIBLE_NOCOWS:
+ if os.path.exists("/usr/bin/cowsay"):
+ self.cowsay = "/usr/bin/cowsay"
+ elif os.path.exists("/usr/games/cowsay"):
+ self.cowsay = "/usr/games/cowsay"
+ elif os.path.exists("/usr/local/bin/cowsay"):
+ # BSD path for cowsay
+ self.cowsay = "/usr/local/bin/cowsay"
+ elif os.path.exists("/opt/local/bin/cowsay"):
+ # MacPorts path for cowsay
+ self.cowsay = "/opt/local/bin/cowsay"
+
+ if self.cowsay and self.noncow == 'random':
+ cmd = subprocess.Popen([self.cowsay, "-l"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ (out, err) = cmd.communicate()
+ cows = out.split()
+ cows.append(False)
+ self.noncow = random.choice(cows)
+
def display(self, msg, color=None, stderr=False, screen_only=False, log_only=False):
msg2 = msg
if color:
@@ -125,6 +153,14 @@ class Display:
Prints a header-looking line with stars taking up to 80 columns
of width (3 columns, minimum)
'''
+ if self.cowsay:
+ try:
+ self.banner_cowsay(msg)
+ return
+ except OSError:
+ # somebody cleverly deleted cowsay or something during the PB run. heh.
+ pass
+
msg = msg.strip()
star_len = (80 - len(msg))
if star_len < 0:
@@ -132,6 +168,20 @@ class Display:
stars = "*" * star_len
self.display("\n%s %s" % (msg, stars), color=color)
+ def banner_cowsay(self, msg, color=None):
+ if ": [" in msg:
+ msg = msg.replace("[","")
+ if msg.endswith("]"):
+ msg = msg[:-1]
+ runcmd = [self.cowsay,"-W", "60"]
+ if self.noncow:
+ runcmd.append('-f')
+ runcmd.append(self.noncow)
+ runcmd.append(msg)
+ cmd = subprocess.Popen(runcmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ (out, err) = cmd.communicate()
+ self.display("%s\n" % out, color=color)
+
def error(self, msg):
new_msg = "\n[ERROR]: %s" % msg
wrapped = textwrap.wrap(new_msg, 79)
From ac14ad1419aff12aa9b7186dae129fe9aa770106 Mon Sep 17 00:00:00 2001
From: Toshio Kuratomi
Date: Thu, 28 May 2015 17:02:48 -0700
Subject: [PATCH 133/971] Test on fields that are actually set
---
test/integration/roles/test_uri/tasks/main.yml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/test/integration/roles/test_uri/tasks/main.yml b/test/integration/roles/test_uri/tasks/main.yml
index da4bf65574..99c6048a59 100644
--- a/test/integration/roles/test_uri/tasks/main.yml
+++ b/test/integration/roles/test_uri/tasks/main.yml
@@ -121,8 +121,8 @@
path: "{{ output_dir }}/kreitz.html"
register: stat_result
-- name: Assert that the file was not downloaded
+- name: Assert that the file was downloaded
assert:
that:
- - "result.failed == false"
+ - "result.changed == true"
- "stat_result.stat.exists == true"
From fe014148d9ed97c11951f9c6d34c72c1c303c64a Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Thu, 28 May 2015 20:29:16 -0500
Subject: [PATCH 134/971] Removing errant debug print
---
lib/ansible/plugins/strategies/__init__.py | 1 -
1 file changed, 1 deletion(-)
diff --git a/lib/ansible/plugins/strategies/__init__.py b/lib/ansible/plugins/strategies/__init__.py
index e933ca73d4..e37610a9db 100644
--- a/lib/ansible/plugins/strategies/__init__.py
+++ b/lib/ansible/plugins/strategies/__init__.py
@@ -96,7 +96,6 @@ class StrategyBase:
return 0
def get_hosts_remaining(self, play):
- print("inventory get hosts: %s" % self._inventory.get_hosts(play.hosts))
return [host for host in self._inventory.get_hosts(play.hosts) if host.name not in self._tqm._failed_hosts and host.name not in self._tqm._unreachable_hosts]
def get_failed_hosts(self, play):
From 7985d2a8be1804c53390e14618d141b1ad33fb0a Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Thu, 28 May 2015 23:58:38 -0500
Subject: [PATCH 135/971] Moving included file stuff to a proper dedicated
class and file (v2)
---
lib/ansible/playbook/included_file.py | 79 ++++++++++++++++++++++
lib/ansible/plugins/strategies/__init__.py | 17 +++--
lib/ansible/plugins/strategies/linear.py | 62 ++---------------
3 files changed, 98 insertions(+), 60 deletions(-)
create mode 100644 lib/ansible/playbook/included_file.py
diff --git a/lib/ansible/playbook/included_file.py b/lib/ansible/playbook/included_file.py
new file mode 100644
index 0000000000..74fdfbc903
--- /dev/null
+++ b/lib/ansible/playbook/included_file.py
@@ -0,0 +1,79 @@
+# (c) 2012-2014, Michael DeHaan
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see .
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+class IncludedFile:
+
+ def __init__(self, filename, args, task):
+ self._filename = filename
+ self._args = args
+ self._task = task
+ self._hosts = []
+
+ def add_host(self, host):
+ if host not in self._hosts:
+ self._hosts.append(host)
+
+ def __eq__(self, other):
+ return other._filename == self._filename and other._args == self._args
+
+ def __repr__(self):
+ return "%s (%s): %s" % (self._filename, self._args, self._hosts)
+
+ @staticmethod
+ def process_include_results(results, tqm, iterator, loader):
+ included_files = []
+
+ for res in results:
+ if res._host in tqm._failed_hosts:
+ raise AnsibleError("host is failed, not including files")
+
+ if res._task.action == 'include':
+ if res._task.loop:
+ include_results = res._result['results']
+ else:
+ include_results = [ res._result ]
+
+ for include_result in include_results:
+ # if the task result was skipped or failed, continue
+ if 'skipped' in include_result and include_result['skipped'] or 'failed' in include_result:
+ continue
+
+ original_task = iterator.get_original_task(res._host, res._task)
+ if original_task and original_task._role:
+ include_file = loader.path_dwim_relative(original_task._role._role_path, 'tasks', include_result['include'])
+ else:
+ include_file = loader.path_dwim(res._task.args.get('_raw_params'))
+
+ include_variables = include_result.get('include_variables', dict())
+ if 'item' in include_result:
+ include_variables['item'] = include_result['item']
+
+ inc_file = IncludedFile(include_file, include_variables, original_task)
+
+ try:
+ pos = included_files.index(inc_file)
+ inc_file = included_files[pos]
+ except ValueError:
+ included_files.append(inc_file)
+
+ inc_file.add_host(res._host)
+
+ return included_files
diff --git a/lib/ansible/plugins/strategies/__init__.py b/lib/ansible/plugins/strategies/__init__.py
index e37610a9db..03ad57ed4a 100644
--- a/lib/ansible/plugins/strategies/__init__.py
+++ b/lib/ansible/plugins/strategies/__init__.py
@@ -23,10 +23,9 @@ from six.moves import queue as Queue
import time
from ansible.errors import *
-
+from ansible.executor.task_result import TaskResult
from ansible.inventory.host import Host
from ansible.inventory.group import Group
-
from ansible.playbook.handler import Handler
from ansible.playbook.helpers import load_list_of_blocks
from ansible.playbook.role import ROLE_CACHE, hash_params
@@ -307,12 +306,22 @@ class StrategyBase:
# and add the host to the group
new_group.add_host(actual_host)
- def _load_included_file(self, included_file):
+ def _load_included_file(self, included_file, iterator):
'''
Loads an included YAML file of tasks, applying the optional set of variables.
'''
- data = self._loader.load_from_file(included_file._filename)
+ try:
+ data = self._loader.load_from_file(included_file._filename)
+ except AnsibleError, e:
+ for host in included_file._hosts:
+ tr = TaskResult(host=host, task=included_file._task, return_data=dict(failed=True, reason=str(e)))
+ iterator.mark_host_failed(host)
+ self._tqm._failed_hosts[host.name] = True
+ self._tqm._stats.increment('failures', host.name)
+ self._tqm.send_callback('v2_runner_on_failed', tr)
+ return []
+
if not isinstance(data, list):
raise AnsibleParserError("included task files must contain a list of tasks", obj=included_file._task._ds)
diff --git a/lib/ansible/plugins/strategies/linear.py b/lib/ansible/plugins/strategies/linear.py
index ec829c8996..af12587b92 100644
--- a/lib/ansible/plugins/strategies/linear.py
+++ b/lib/ansible/plugins/strategies/linear.py
@@ -22,6 +22,7 @@ __metaclass__ = type
from ansible.errors import AnsibleError
from ansible.executor.play_iterator import PlayIterator
from ansible.playbook.block import Block
+from ansible.playbook.included_file import IncludedFile
from ansible.playbook.task import Task
from ansible.plugins import action_loader
from ansible.plugins.strategies import StrategyBase
@@ -114,7 +115,6 @@ class StrategyModule(StrategyBase):
# return None for all hosts in the list
return [(host, None) for host in hosts]
-
def run(self, iterator, connection_info):
'''
The linear strategy is simple - get the next task and queue
@@ -208,61 +208,11 @@ class StrategyModule(StrategyBase):
results = self._wait_on_pending_results(iterator)
host_results.extend(results)
- # FIXME: this needs to be somewhere else
- class IncludedFile:
- def __init__(self, filename, args, task):
- self._filename = filename
- self._args = args
- self._task = task
- self._hosts = []
- def add_host(self, host):
- if host not in self._hosts:
- self._hosts.append(host)
- def __eq__(self, other):
- return other._filename == self._filename and other._args == self._args
- def __repr__(self):
- return "%s (%s): %s" % (self._filename, self._args, self._hosts)
+ try:
+ included_files = IncludedFile.process_include_results(host_results, self._tqm, iterator=iterator, loader=self._loader)
+ except AnsibleError, e:
+ return 1
- # FIXME: this should also be moved to the base class in a method
- included_files = []
- for res in host_results:
- if res._host in self._tqm._failed_hosts:
- return 1
-
- if res._task.action == 'include':
- if res._task.loop:
- include_results = res._result['results']
- else:
- include_results = [ res._result ]
-
- for include_result in include_results:
- # if the task result was skipped or failed, continue
- if 'skipped' in include_result and include_result['skipped'] or 'failed' in include_result:
- continue
-
- original_task = iterator.get_original_task(res._host, res._task)
- if original_task and original_task._role:
- include_file = self._loader.path_dwim_relative(original_task._role._role_path, 'tasks', include_result['include'])
- else:
- include_file = self._loader.path_dwim(res._task.args.get('_raw_params'))
-
- include_variables = include_result.get('include_variables', dict())
- if 'item' in include_result:
- include_variables['item'] = include_result['item']
-
- inc_file = IncludedFile(include_file, include_variables, original_task)
-
- try:
- pos = included_files.index(inc_file)
- inc_file = included_files[pos]
- except ValueError:
- included_files.append(inc_file)
-
- inc_file.add_host(res._host)
-
- # FIXME: should this be moved into the iterator class? Main downside would be
- # that accessing the TQM's callback member would be more difficult, if
- # we do want to send callbacks from here
if len(included_files) > 0:
noop_task = Task()
noop_task.action = 'meta'
@@ -274,7 +224,7 @@ class StrategyModule(StrategyBase):
# included hosts get the task list while those excluded get an equal-length
# list of noop tasks, to make sure that they continue running in lock-step
try:
- new_blocks = self._load_included_file(included_file)
+ new_blocks = self._load_included_file(included_file, iterator=iterator)
except AnsibleError, e:
for host in included_file._hosts:
iterator.mark_host_failed(host)
From 0828028c71bb5273a6796c0c47f93cf23b818471 Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Fri, 29 May 2015 00:15:14 -0500
Subject: [PATCH 136/971] Fixing unit test for included file changes
---
test/units/plugins/strategies/test_strategy_base.py | 7 +++++--
1 file changed, 5 insertions(+), 2 deletions(-)
diff --git a/test/units/plugins/strategies/test_strategy_base.py b/test/units/plugins/strategies/test_strategy_base.py
index 7d8cb42ee6..4c177f7343 100644
--- a/test/units/plugins/strategies/test_strategy_base.py
+++ b/test/units/plugins/strategies/test_strategy_base.py
@@ -299,14 +299,17 @@ class TestStrategyBase(unittest.TestCase):
mock_task._block = mock_block
mock_task._role = None
+ mock_iterator = MagicMock()
+ mock_iterator.mark_host_failed.return_value = None
+
mock_inc_file = MagicMock()
mock_inc_file._task = mock_task
mock_inc_file._filename = "test.yml"
- res = strategy_base._load_included_file(included_file=mock_inc_file)
+ res = strategy_base._load_included_file(included_file=mock_inc_file, iterator=mock_iterator)
mock_inc_file._filename = "bad.yml"
- self.assertRaises(AnsibleParserError, strategy_base._load_included_file, included_file=mock_inc_file)
+ self.assertRaises(AnsibleParserError, strategy_base._load_included_file, included_file=mock_inc_file, iterator=mock_iterator)
def test_strategy_base_run_handlers(self):
workers = []
From 9371c38af928f750114525e5f447ebad73446caa Mon Sep 17 00:00:00 2001
From: Jon Hawkesworth
Date: Fri, 29 May 2015 14:50:08 +0100
Subject: [PATCH 137/971] Add -Compress to ConvertTo-Json calls in common
powershell code
---
lib/ansible/module_utils/powershell.ps1 | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/lib/ansible/module_utils/powershell.ps1 b/lib/ansible/module_utils/powershell.ps1
index ee7d3ddeca..9606f47783 100644
--- a/lib/ansible/module_utils/powershell.ps1
+++ b/lib/ansible/module_utils/powershell.ps1
@@ -65,7 +65,7 @@ Function Exit-Json($obj)
$obj = New-Object psobject
}
- echo $obj | ConvertTo-Json -Depth 99
+ echo $obj | ConvertTo-Json -Compress -Depth 99
Exit
}
@@ -89,7 +89,7 @@ Function Fail-Json($obj, $message = $null)
Set-Attr $obj "msg" $message
Set-Attr $obj "failed" $true
- echo $obj | ConvertTo-Json -Depth 99
+ echo $obj | ConvertTo-Json -Compress -Depth 99
Exit 1
}
From 12691ce109dcf1625c6c41357ce26f95da0862f0 Mon Sep 17 00:00:00 2001
From: Jon Hawkesworth
Date: Fri, 29 May 2015 14:50:08 +0100
Subject: [PATCH 138/971] Add -Compress to ConvertTo-Json calls in common
powershell code
---
lib/ansible/module_utils/powershell.ps1 | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/lib/ansible/module_utils/powershell.ps1 b/lib/ansible/module_utils/powershell.ps1
index 57d2c1b101..c58ac4b9b7 100644
--- a/lib/ansible/module_utils/powershell.ps1
+++ b/lib/ansible/module_utils/powershell.ps1
@@ -65,7 +65,7 @@ Function Exit-Json($obj)
$obj = New-Object psobject
}
- echo $obj | ConvertTo-Json -Depth 99
+ echo $obj | ConvertTo-Json -Compress -Depth 99
Exit
}
@@ -89,7 +89,7 @@ Function Fail-Json($obj, $message = $null)
Set-Attr $obj "msg" $message
Set-Attr $obj "failed" $true
- echo $obj | ConvertTo-Json -Depth 99
+ echo $obj | ConvertTo-Json -Compress -Depth 99
Exit 1
}
From dee2d53b3e68e85d96d821167183803ad7e27f99 Mon Sep 17 00:00:00 2001
From: Toshio Kuratomi
Date: Fri, 29 May 2015 08:51:50 -0700
Subject: [PATCH 139/971] Update v2 submodule refs
---
lib/ansible/modules/core | 2 +-
lib/ansible/modules/extras | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/lib/ansible/modules/core b/lib/ansible/modules/core
index 9cc23c749a..191a672891 160000
--- a/lib/ansible/modules/core
+++ b/lib/ansible/modules/core
@@ -1 +1 @@
-Subproject commit 9cc23c749a8cd5039db7aa1998d310bbb04d1e13
+Subproject commit 191a672891359f3b6faff83cb0613f1b38e3fc0e
diff --git a/lib/ansible/modules/extras b/lib/ansible/modules/extras
index a07fc88ba0..1276420a3a 160000
--- a/lib/ansible/modules/extras
+++ b/lib/ansible/modules/extras
@@ -1 +1 @@
-Subproject commit a07fc88ba0d2546b92fbe93b2bede699fdf2bc48
+Subproject commit 1276420a3a39340fcd9e053a1e621cdd89f480fa
From 1e418fe56a67bfa18468783f47c75781f02b11e4 Mon Sep 17 00:00:00 2001
From: Chris Church
Date: Fri, 29 May 2015 13:57:11 -0400
Subject: [PATCH 140/971] Only run win_feature tests when the host has the
ServerManager module.
---
.../roles/test_win_feature/tasks/main.yml | 18 ++++++++++++++++++
1 file changed, 18 insertions(+)
diff --git a/test/integration/roles/test_win_feature/tasks/main.yml b/test/integration/roles/test_win_feature/tasks/main.yml
index a49622c232..4b31f8b358 100644
--- a/test/integration/roles/test_win_feature/tasks/main.yml
+++ b/test/integration/roles/test_win_feature/tasks/main.yml
@@ -17,10 +17,16 @@
# along with Ansible. If not, see .
+- name: check whether servermanager module is available (windows 2008 r2 or later)
+ raw: PowerShell -Command Import-Module ServerManager
+ register: win_feature_has_servermanager
+ ignore_errors: true
+
- name: start with feature absent
win_feature:
name: "{{ test_win_feature_name }}"
state: absent
+ when: win_feature_has_servermanager|success
- name: install feature
win_feature:
@@ -30,6 +36,7 @@
include_sub_features: yes
include_management_tools: yes
register: win_feature_install_result
+ when: win_feature_has_servermanager|success
- name: check result of installing feature
assert:
@@ -45,6 +52,7 @@
- "win_feature_install_result.feature_result[0].restart_needed is defined"
- "win_feature_install_result.feature_result[0].skip_reason"
- "win_feature_install_result.feature_result[0].success is defined"
+ when: win_feature_has_servermanager|success
- name: install feature again
win_feature:
@@ -54,6 +62,7 @@
include_sub_features: yes
include_management_tools: yes
register: win_feature_install_again_result
+ when: win_feature_has_servermanager|success
- name: check result of installing feature again
assert:
@@ -63,12 +72,14 @@
- "win_feature_install_again_result.exitcode == 'NoChangeNeeded'"
- "not win_feature_install_again_result.restart_needed"
- "win_feature_install_again_result.feature_result == []"
+ when: win_feature_has_servermanager|success
- name: remove feature
win_feature:
name: "{{ test_win_feature_name }}"
state: absent
register: win_feature_remove_result
+ when: win_feature_has_servermanager|success
- name: check result of removing feature
assert:
@@ -84,12 +95,14 @@
- "win_feature_remove_result.feature_result[0].restart_needed is defined"
- "win_feature_remove_result.feature_result[0].skip_reason"
- "win_feature_remove_result.feature_result[0].success is defined"
+ when: win_feature_has_servermanager|success
- name: remove feature again
win_feature:
name: "{{ test_win_feature_name }}"
state: absent
register: win_feature_remove_again_result
+ when: win_feature_has_servermanager|success
- name: check result of removing feature again
assert:
@@ -99,6 +112,7 @@
- "win_feature_remove_again_result.exitcode == 'NoChangeNeeded'"
- "not win_feature_remove_again_result.restart_needed"
- "win_feature_remove_again_result.feature_result == []"
+ when: win_feature_has_servermanager|success
- name: try to install an invalid feature name
win_feature:
@@ -106,6 +120,7 @@
state: present
register: win_feature_install_invalid_result
ignore_errors: true
+ when: win_feature_has_servermanager|success
- name: check result of installing invalid feature name
assert:
@@ -114,6 +129,7 @@
- "not win_feature_install_invalid_result|changed"
- "win_feature_install_invalid_result.msg"
- "win_feature_install_invalid_result.exitcode == 'InvalidArgs'"
+ when: win_feature_has_servermanager|success
- name: try to remove an invalid feature name
win_feature:
@@ -121,6 +137,7 @@
state: absent
register: win_feature_remove_invalid_result
ignore_errors: true
+ when: win_feature_has_servermanager|success
- name: check result of removing invalid feature name
assert:
@@ -129,3 +146,4 @@
- "not win_feature_remove_invalid_result|changed"
- "win_feature_remove_invalid_result.msg"
- "win_feature_remove_invalid_result.exitcode == 'InvalidArgs'"
+ when: win_feature_has_servermanager|success
From b659621575168b57d06b44de2d507aba202f2607 Mon Sep 17 00:00:00 2001
From: Monty Taylor
Date: Mon, 11 May 2015 08:06:21 -0400
Subject: [PATCH 141/971] Remove unneeded required_one_of for openstack
We're being too strict - there is a third possibility, which is that a
user will have defined the OS_* environment variables and expect them to
pass through.
---
lib/ansible/module_utils/openstack.py | 6 +-----
lib/ansible/utils/module_docs_fragments/openstack.py | 7 +++++--
v1/ansible/module_utils/openstack.py | 6 +-----
3 files changed, 7 insertions(+), 12 deletions(-)
diff --git a/lib/ansible/module_utils/openstack.py b/lib/ansible/module_utils/openstack.py
index b58cc53428..4069449144 100644
--- a/lib/ansible/module_utils/openstack.py
+++ b/lib/ansible/module_utils/openstack.py
@@ -93,11 +93,7 @@ def openstack_full_argument_spec(**kwargs):
def openstack_module_kwargs(**kwargs):
- ret = dict(
- required_one_of=[
- ['cloud', 'auth'],
- ],
- )
+ ret = {}
for key in ('mutually_exclusive', 'required_together', 'required_one_of'):
if key in kwargs:
if key in ret:
diff --git a/lib/ansible/utils/module_docs_fragments/openstack.py b/lib/ansible/utils/module_docs_fragments/openstack.py
index f989b3dcb8..c295ed4306 100644
--- a/lib/ansible/utils/module_docs_fragments/openstack.py
+++ b/lib/ansible/utils/module_docs_fragments/openstack.py
@@ -23,7 +23,9 @@ class ModuleDocFragment(object):
options:
cloud:
description:
- - Named cloud to operate against. Provides default values for I(auth) and I(auth_plugin)
+ - Named cloud to operate against. Provides default values for I(auth) and
+ I(auth_type). This parameter is not needed if I(auth) is provided or if
+ OpenStack OS_* environment variables are present.
required: false
auth:
description:
@@ -32,7 +34,8 @@ options:
I(auth_url), I(username), I(password), I(project_name) and any
information about domains if the cloud supports them. For other plugins,
this param will need to contain whatever parameters that auth plugin
- requires. This parameter is not needed if a named cloud is provided.
+ requires. This parameter is not needed if a named cloud is provided or
+ OpenStack OS_* environment variables are present.
required: false
auth_type:
description:
diff --git a/v1/ansible/module_utils/openstack.py b/v1/ansible/module_utils/openstack.py
index b58cc53428..4069449144 100644
--- a/v1/ansible/module_utils/openstack.py
+++ b/v1/ansible/module_utils/openstack.py
@@ -93,11 +93,7 @@ def openstack_full_argument_spec(**kwargs):
def openstack_module_kwargs(**kwargs):
- ret = dict(
- required_one_of=[
- ['cloud', 'auth'],
- ],
- )
+ ret = {}
for key in ('mutually_exclusive', 'required_together', 'required_one_of'):
if key in kwargs:
if key in ret:
From 2046d763109d8d62a39e6e215ae8cd2a2465d422 Mon Sep 17 00:00:00 2001
From: Monty Taylor
Date: Mon, 11 May 2015 08:10:37 -0400
Subject: [PATCH 142/971] Add defaults and a link to os-client-config docs
---
lib/ansible/utils/module_docs_fragments/openstack.py | 6 +++++-
1 file changed, 5 insertions(+), 1 deletion(-)
diff --git a/lib/ansible/utils/module_docs_fragments/openstack.py b/lib/ansible/utils/module_docs_fragments/openstack.py
index c295ed4306..94d5b9834c 100644
--- a/lib/ansible/utils/module_docs_fragments/openstack.py
+++ b/lib/ansible/utils/module_docs_fragments/openstack.py
@@ -80,14 +80,17 @@ options:
- A path to a CA Cert bundle that can be used as part of verifying
SSL API requests.
required: false
+ default: None
cert:
description:
- A path to a client certificate to use as part of the SSL transaction
required: false
+ default: None
key:
description:
- A path to a client key to use as part of the SSL transaction
required: false
+ default: None
endpoint_type:
description:
- Endpoint URL type to fetch from the service catalog.
@@ -102,5 +105,6 @@ notes:
can come from a yaml config file in /etc/ansible/openstack.yaml,
/etc/openstack/clouds.yaml or ~/.config/openstack/clouds.yaml, then from
standard environment variables, then finally by explicit parameters in
- plays.
+ plays. More information can be found at
+ U(http://docs.openstack.org/developer/os-client-config)
'''
From a8c290cc3bb4b2549a0e5b64beb985ff78bf8d23 Mon Sep 17 00:00:00 2001
From: Brian Coca
Date: Fri, 29 May 2015 16:13:30 -0400
Subject: [PATCH 143/971] fixed ubuntu facts for all versions made sure NA is
option of last resort
---
lib/ansible/module_utils/facts.py | 9 ++++++---
lib/ansible/modules/core | 2 +-
lib/ansible/modules/extras | 2 +-
3 files changed, 8 insertions(+), 5 deletions(-)
diff --git a/lib/ansible/module_utils/facts.py b/lib/ansible/module_utils/facts.py
index 3485690b83..6f5f35f831 100644
--- a/lib/ansible/module_utils/facts.py
+++ b/lib/ansible/module_utils/facts.py
@@ -99,8 +99,9 @@ class Facts(object):
('/etc/os-release', 'SuSE'),
('/etc/gentoo-release', 'Gentoo'),
('/etc/os-release', 'Debian'),
+ ('/etc/lsb-release', 'Mandriva'),
('/etc/os-release', 'NA'),
- ('/etc/lsb-release', 'Mandriva'))
+ )
SELINUX_MODE_DICT = { 1: 'enforcing', 0: 'permissive', -1: 'disabled' }
# A list of dicts. If there is a platform with more than one
@@ -416,11 +417,13 @@ class Facts(object):
self.facts['distribution_version'] = self.facts['distribution_version'] + '.' + release.group(1)
elif name == 'Debian':
data = get_file_content(path)
- if 'Debian' in data or 'Raspbian' in data:
+ if 'Ubuntu' in data:
+ break # Ubuntu gets correct info from python functions
+ elif 'Debian' in data or 'Raspbian' in data:
release = re.search("PRETTY_NAME=[^(]+ \(?([^)]+?)\)", data)
if release:
self.facts['distribution_release'] = release.groups()[0]
- break
+ break
elif name == 'Mandriva':
data = get_file_content(path)
if 'Mandriva' in data:
diff --git a/lib/ansible/modules/core b/lib/ansible/modules/core
index 5983d64d77..9cc23c749a 160000
--- a/lib/ansible/modules/core
+++ b/lib/ansible/modules/core
@@ -1 +1 @@
-Subproject commit 5983d64d7728ea88ef27606e95e4aa34cde5ff46
+Subproject commit 9cc23c749a8cd5039db7aa1998d310bbb04d1e13
diff --git a/lib/ansible/modules/extras b/lib/ansible/modules/extras
index 1276420a3a..a07fc88ba0 160000
--- a/lib/ansible/modules/extras
+++ b/lib/ansible/modules/extras
@@ -1 +1 @@
-Subproject commit 1276420a3a39340fcd9e053a1e621cdd89f480fa
+Subproject commit a07fc88ba0d2546b92fbe93b2bede699fdf2bc48
From 7e020d21deeb3425784e3bf13e07eed1cf036b22 Mon Sep 17 00:00:00 2001
From: Brian Coca
Date: Fri, 29 May 2015 16:19:09 -0400
Subject: [PATCH 144/971] correctly identify ubuntu now in all cases made NA
the last resort
---
lib/ansible/module_utils/facts.py | 7 +++++--
1 file changed, 5 insertions(+), 2 deletions(-)
diff --git a/lib/ansible/module_utils/facts.py b/lib/ansible/module_utils/facts.py
index 7209f699c3..39546cc8bb 100644
--- a/lib/ansible/module_utils/facts.py
+++ b/lib/ansible/module_utils/facts.py
@@ -99,8 +99,9 @@ class Facts(object):
('/etc/os-release', 'SuSE'),
('/etc/gentoo-release', 'Gentoo'),
('/etc/os-release', 'Debian'),
+ ('/etc/lsb-release', 'Mandriva'),
('/etc/os-release', 'NA'),
- ('/etc/lsb-release', 'Mandriva'))
+ )
SELINUX_MODE_DICT = { 1: 'enforcing', 0: 'permissive', -1: 'disabled' }
# A list of dicts. If there is a platform with more than one
@@ -416,7 +417,9 @@ class Facts(object):
self.facts['distribution_version'] = self.facts['distribution_version'] + '.' + release.group(1)
elif name == 'Debian':
data = get_file_content(path)
- if 'Debian' in data or 'Raspbian' in data:
+ if 'Ubuntu' in data:
+ break # Ubuntu gets correct info from python functions
+ elif 'Debian' in data or 'Raspbian' in data:
release = re.search("PRETTY_NAME=[^(]+ \(?([^)]+?)\)", data)
if release:
self.facts['distribution_release'] = release.groups()[0]
From 529726d0baa5a34cff8dcd5ffaf81b904f842b4f Mon Sep 17 00:00:00 2001
From: Brian Coca
Date: Fri, 29 May 2015 16:22:55 -0400
Subject: [PATCH 145/971] fixed mistaken module update in prev commit
---
lib/ansible/modules/core | 2 +-
lib/ansible/modules/extras | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/lib/ansible/modules/core b/lib/ansible/modules/core
index 9cc23c749a..5983d64d77 160000
--- a/lib/ansible/modules/core
+++ b/lib/ansible/modules/core
@@ -1 +1 @@
-Subproject commit 9cc23c749a8cd5039db7aa1998d310bbb04d1e13
+Subproject commit 5983d64d7728ea88ef27606e95e4aa34cde5ff46
diff --git a/lib/ansible/modules/extras b/lib/ansible/modules/extras
index a07fc88ba0..1276420a3a 160000
--- a/lib/ansible/modules/extras
+++ b/lib/ansible/modules/extras
@@ -1 +1 @@
-Subproject commit a07fc88ba0d2546b92fbe93b2bede699fdf2bc48
+Subproject commit 1276420a3a39340fcd9e053a1e621cdd89f480fa
From d8bfb4c6290e1da3f281c728c5ad8a77598830f1 Mon Sep 17 00:00:00 2001
From: Rob Szarka
Date: Fri, 29 May 2015 21:49:52 -0400
Subject: [PATCH 146/971] Update guide_aws.rst
Fixed typos.
---
docsite/rst/guide_aws.rst | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/docsite/rst/guide_aws.rst b/docsite/rst/guide_aws.rst
index c4e12eab49..e0d0c12630 100644
--- a/docsite/rst/guide_aws.rst
+++ b/docsite/rst/guide_aws.rst
@@ -13,7 +13,7 @@ Requirements for the AWS modules are minimal.
All of the modules require and are tested against recent versions of boto. You'll need this Python module installed on your control machine. Boto can be installed from your OS distribution or python's "pip install boto".
-Whereas classically ansible will execute tasks in it's host loop against multiple remote machines, most cloud-control steps occur on your local machine with reference to the regions to control.
+Whereas classically ansible will execute tasks in its host loop against multiple remote machines, most cloud-control steps occur on your local machine with reference to the regions to control.
In your playbook steps we'll typically be using the following pattern for provisioning steps::
@@ -214,7 +214,7 @@ AWS Image Building With Ansible
```````````````````````````````
Many users may want to have images boot to a more complete configuration rather than configuring them entirely after instantiation. To do this,
-one of many programs can be used with Ansible playbooks to define and upload a base image, which will then get it's own AMI ID for usage with
+one of many programs can be used with Ansible playbooks to define and upload a base image, which will then get its own AMI ID for usage with
the ec2 module or other Ansible AWS modules such as ec2_asg or the cloudformation module. Possible tools include Packer, aminator, and Ansible's
ec2_ami module.
From 5954892457a89cbd61133cc2e95377c04c83bca1 Mon Sep 17 00:00:00 2001
From: Toshio Kuratomi
Date: Fri, 29 May 2015 19:00:16 -0700
Subject: [PATCH 147/971] Update submodule refs
---
lib/ansible/modules/core | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/lib/ansible/modules/core b/lib/ansible/modules/core
index 5983d64d77..f8d8af17cd 160000
--- a/lib/ansible/modules/core
+++ b/lib/ansible/modules/core
@@ -1 +1 @@
-Subproject commit 5983d64d7728ea88ef27606e95e4aa34cde5ff46
+Subproject commit f8d8af17cdc72500af8319c96004b86ac702a0a4
From 908d6c0ef25384d126a488d3be4196803eb5f06e Mon Sep 17 00:00:00 2001
From: sysadmin75
Date: Sun, 31 May 2015 20:05:02 -0400
Subject: [PATCH 148/971] Fixes #11046
---
lib/ansible/module_utils/facts.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/lib/ansible/module_utils/facts.py b/lib/ansible/module_utils/facts.py
index 6f5f35f831..1162e05b9c 100644
--- a/lib/ansible/module_utils/facts.py
+++ b/lib/ansible/module_utils/facts.py
@@ -2163,7 +2163,7 @@ class DarwinNetwork(GenericBsdIfconfigNetwork, Network):
current_if['media'] = 'Unknown' # Mac does not give us this
current_if['media_select'] = words[1]
if len(words) > 2:
- current_if['media_type'] = words[2][1:]
+ current_if['media_type'] = words[2][1:-1]
if len(words) > 3:
current_if['media_options'] = self.get_options(words[3])
From 8d742df1deba75d0e7ebfbb73db3f030827b0283 Mon Sep 17 00:00:00 2001
From: Chris Church
Date: Sun, 31 May 2015 23:15:28 -0400
Subject: [PATCH 149/971] Allow prepare_win_tests role to run multiple times,
before each role that depends on it.
---
test/integration/roles/prepare_win_tests/meta/main.yml | 3 +++
1 file changed, 3 insertions(+)
create mode 100644 test/integration/roles/prepare_win_tests/meta/main.yml
diff --git a/test/integration/roles/prepare_win_tests/meta/main.yml b/test/integration/roles/prepare_win_tests/meta/main.yml
new file mode 100644
index 0000000000..cf5427b608
--- /dev/null
+++ b/test/integration/roles/prepare_win_tests/meta/main.yml
@@ -0,0 +1,3 @@
+---
+
+allow_duplicates: yes
From d2ba0de6aab12a136d71959d45b4158bfbf45ce9 Mon Sep 17 00:00:00 2001
From: Chris Church
Date: Sun, 31 May 2015 23:16:45 -0400
Subject: [PATCH 150/971] When running winrm tests against multiple hosts, fail
the play when any host has a failure.
---
test/integration/test_winrm.yml | 1 +
1 file changed, 1 insertion(+)
diff --git a/test/integration/test_winrm.yml b/test/integration/test_winrm.yml
index 69d3b652a6..b249224cb8 100644
--- a/test/integration/test_winrm.yml
+++ b/test/integration/test_winrm.yml
@@ -18,6 +18,7 @@
- hosts: windows
gather_facts: false
+ max_fail_percentage: 1
roles:
- { role: test_win_raw, tags: test_win_raw }
- { role: test_win_script, tags: test_win_script }
From 46a72d108acbe6e194aa44592203dd7206fdfdbb Mon Sep 17 00:00:00 2001
From: Brian Coca
Date: Mon, 1 Jun 2015 10:17:18 -0400
Subject: [PATCH 151/971] added cs_project new module
---
CHANGELOG.md | 1 +
1 file changed, 1 insertion(+)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 9800650369..f806cbfb1f 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -30,6 +30,7 @@ New Modules:
* cloudstack: cs_instance
* cloudstack: cs_instancegroup
* cloudstack: cs_portforward
+ * cloudstack: cs_project
* cloudstack: cs_sshkeypair
* cloudstack: cs_securitygroup
* cloudstack: cs_securitygroup_rule
From 816b20af0beb5a96957cd51412aa116f14374b04 Mon Sep 17 00:00:00 2001
From: sysadmin75
Date: Sun, 31 May 2015 20:05:02 -0400
Subject: [PATCH 152/971] Fixes #11046
---
lib/ansible/module_utils/facts.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/lib/ansible/module_utils/facts.py b/lib/ansible/module_utils/facts.py
index 39546cc8bb..8575f457fb 100644
--- a/lib/ansible/module_utils/facts.py
+++ b/lib/ansible/module_utils/facts.py
@@ -2153,7 +2153,7 @@ class DarwinNetwork(GenericBsdIfconfigNetwork, Network):
current_if['media'] = 'Unknown' # Mac does not give us this
current_if['media_select'] = words[1]
if len(words) > 2:
- current_if['media_type'] = words[2][1:]
+ current_if['media_type'] = words[2][1:-1]
if len(words) > 3:
current_if['media_options'] = self.get_options(words[3])
From 30b92a6f4cd92b69ae562d970efaf831858891e2 Mon Sep 17 00:00:00 2001
From: Jon Hawkesworth
Date: Mon, 1 Jun 2015 21:53:49 +0100
Subject: [PATCH 153/971] Get-FileChecksum allways returns a string now, and
the test_win_copy integration tests that depend on the checksum have been
updated in this change too.
---
lib/ansible/module_utils/powershell.ps1 | 2 +-
test/integration/roles/test_win_copy/tasks/main.yml | 4 ++--
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/lib/ansible/module_utils/powershell.ps1 b/lib/ansible/module_utils/powershell.ps1
index 9606f47783..a11e316989 100644
--- a/lib/ansible/module_utils/powershell.ps1
+++ b/lib/ansible/module_utils/powershell.ps1
@@ -151,7 +151,7 @@ Function Get-FileChecksum($path)
{
$sp = new-object -TypeName System.Security.Cryptography.SHA1CryptoServiceProvider;
$fp = [System.IO.File]::Open($path, [System.IO.Filemode]::Open, [System.IO.FileAccess]::Read);
- [System.BitConverter]::ToString($sp.ComputeHash($fp)).Replace("-", "").ToLower();
+ $hash = [System.BitConverter]::ToString($sp.ComputeHash($fp)).Replace("-", "").ToLower();
$fp.Dispose();
}
ElseIf (Test-Path -PathType Container $path)
diff --git a/test/integration/roles/test_win_copy/tasks/main.yml b/test/integration/roles/test_win_copy/tasks/main.yml
index d898219a85..48df427380 100644
--- a/test/integration/roles/test_win_copy/tasks/main.yml
+++ b/test/integration/roles/test_win_copy/tasks/main.yml
@@ -62,7 +62,7 @@
- name: verify that the file checksum is correct
assert:
that:
- - "copy_result.checksum[0] == 'c79a6506c1c948be0d456ab5104d5e753ab2f3e6'"
+ - "copy_result.checksum == 'c79a6506c1c948be0d456ab5104d5e753ab2f3e6'"
- name: check the stat results of the file
win_stat: path={{output_file}}
@@ -78,7 +78,7 @@
# - "stat_results.stat.isfifo == false"
# - "stat_results.stat.isreg == true"
# - "stat_results.stat.issock == false"
- - "stat_results.stat.checksum[0] == 'c79a6506c1c948be0d456ab5104d5e753ab2f3e6'"
+ - "stat_results.stat.checksum == 'c79a6506c1c948be0d456ab5104d5e753ab2f3e6'"
- name: overwrite the file via same means
win_copy: src=foo.txt dest={{output_file}}
From 4bc7703db310c6178b45969b941dea9cddcee046 Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Mon, 1 Jun 2015 16:41:52 -0500
Subject: [PATCH 154/971] Fixing some small bugs related to integration tests
(v2)
---
lib/ansible/executor/play_iterator.py | 2 +-
lib/ansible/inventory/group.py | 2 -
lib/ansible/module_utils/basic.py | 4 +-
lib/ansible/parsing/yaml/dumper.py | 37 +++++++++++++++++++
lib/ansible/plugins/filter/core.py | 13 +++++--
lib/ansible/plugins/strategies/__init__.py | 28 ++++++++------
lib/ansible/plugins/strategies/linear.py | 4 +-
lib/ansible/template/__init__.py | 8 ----
test/integration/Makefile | 13 ++++---
.../roles/test_lineinfile/tasks/main.yml | 2 +-
test/integration/test_filters.yml | 5 +++
test/units/module_utils/test_basic.py | 2 +-
12 files changed, 81 insertions(+), 39 deletions(-)
create mode 100644 lib/ansible/parsing/yaml/dumper.py
create mode 100644 test/integration/test_filters.yml
diff --git a/lib/ansible/executor/play_iterator.py b/lib/ansible/executor/play_iterator.py
index dc4d4c7d5d..d7c9661489 100644
--- a/lib/ansible/executor/play_iterator.py
+++ b/lib/ansible/executor/play_iterator.py
@@ -239,7 +239,7 @@ class PlayIterator:
self._host_states[host.name] = s
def get_failed_hosts(self):
- return dict((host, True) for (host, state) in self._host_states.iteritems() if state.run_state == self.ITERATING_COMPLETE and state.fail_state != self.FAILED_NONE)
+ return dict((host, True) for (host, state) in self._host_states.iteritems() if state.fail_state != self.FAILED_NONE)
def get_original_task(self, host, task):
'''
diff --git a/lib/ansible/inventory/group.py b/lib/ansible/inventory/group.py
index 6525e69b46..17f3ff744f 100644
--- a/lib/ansible/inventory/group.py
+++ b/lib/ansible/inventory/group.py
@@ -59,11 +59,9 @@ class Group:
depth=self.depth,
)
- debug("serializing group, result is: %s" % result)
return result
def deserialize(self, data):
- debug("deserializing group, data is: %s" % data)
self.__init__()
self.name = data.get('name')
self.vars = data.get('vars', dict())
diff --git a/lib/ansible/module_utils/basic.py b/lib/ansible/module_utils/basic.py
index 793223b165..69e4036c83 100644
--- a/lib/ansible/module_utils/basic.py
+++ b/lib/ansible/module_utils/basic.py
@@ -588,8 +588,8 @@ class AnsibleModule(object):
return True
rc = selinux.lsetfilecon(self._to_filesystem_str(path),
str(':'.join(new_context)))
- except OSError:
- self.fail_json(path=path, msg='invalid selinux context', new_context=new_context, cur_context=cur_context, input_was=context)
+ except OSError, e:
+ self.fail_json(path=path, msg='invalid selinux context: %s' % str(e), new_context=new_context, cur_context=cur_context, input_was=context)
if rc != 0:
self.fail_json(path=path, msg='set selinux context failed')
changed = True
diff --git a/lib/ansible/parsing/yaml/dumper.py b/lib/ansible/parsing/yaml/dumper.py
new file mode 100644
index 0000000000..dc498acd06
--- /dev/null
+++ b/lib/ansible/parsing/yaml/dumper.py
@@ -0,0 +1,37 @@
+# (c) 2012-2014, Michael DeHaan
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see .
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import yaml
+
+from ansible.parsing.yaml.objects import AnsibleUnicode
+
+class AnsibleDumper(yaml.SafeDumper):
+ '''
+ A simple stub class that allows us to add representers
+ for our overridden object types.
+ '''
+ pass
+
+AnsibleDumper.add_representer(
+ AnsibleUnicode,
+ yaml.representer.SafeRepresenter.represent_unicode
+)
+
diff --git a/lib/ansible/plugins/filter/core.py b/lib/ansible/plugins/filter/core.py
index bdf45509c3..977d0947c3 100644
--- a/lib/ansible/plugins/filter/core.py
+++ b/lib/ansible/plugins/filter/core.py
@@ -38,16 +38,21 @@ from jinja2.filters import environmentfilter
from distutils.version import LooseVersion, StrictVersion
from ansible import errors
+from ansible.parsing.yaml.dumper import AnsibleDumper
from ansible.utils.hashing import md5s, checksum_s
from ansible.utils.unicode import unicode_wrap, to_unicode
UUID_NAMESPACE_ANSIBLE = uuid.UUID('361E6D51-FAEC-444A-9079-341386DA8E2E')
-
-def to_nice_yaml(*a, **kw):
+def to_yaml(a, *args, **kw):
'''Make verbose, human readable yaml'''
- transformed = yaml.safe_dump(*a, indent=4, allow_unicode=True, default_flow_style=False, **kw)
+ transformed = yaml.dump(a, Dumper=AnsibleDumper, allow_unicode=True, **kw)
+ return to_unicode(transformed)
+
+def to_nice_yaml(a, *args, **kw):
+ '''Make verbose, human readable yaml'''
+ transformed = yaml.dump(a, Dumper=AnsibleDumper, indent=4, allow_unicode=True, default_flow_style=False, **kw)
return to_unicode(transformed)
def to_json(a, *args, **kw):
@@ -288,7 +293,7 @@ class FilterModule(object):
'from_json': json.loads,
# yaml
- 'to_yaml': yaml.safe_dump,
+ 'to_yaml': to_yaml,
'to_nice_yaml': to_nice_yaml,
'from_yaml': yaml.safe_load,
diff --git a/lib/ansible/plugins/strategies/__init__.py b/lib/ansible/plugins/strategies/__init__.py
index 03ad57ed4a..bb839f20f4 100644
--- a/lib/ansible/plugins/strategies/__init__.py
+++ b/lib/ansible/plugins/strategies/__init__.py
@@ -73,24 +73,28 @@ class StrategyBase:
self._blocked_hosts = dict()
def run(self, iterator, connection_info, result=True):
- # save the counts on failed/unreachable hosts, as the cleanup/handler
- # methods will clear that information during their runs
- num_failed = len(self._tqm._failed_hosts)
- num_unreachable = len(self._tqm._unreachable_hosts)
+ # save the failed/unreachable hosts, as the run_handlers()
+ # method will clear that information during its execution
+ failed_hosts = self._tqm._failed_hosts.keys()
+ unreachable_hosts = self._tqm._unreachable_hosts.keys()
debug("running handlers")
result &= self.run_handlers(iterator, connection_info)
+ # now update with the hosts (if any) that failed or were
+ # unreachable during the handler execution phase
+ failed_hosts = set(failed_hosts).union(self._tqm._failed_hosts.keys())
+ unreachable_hosts = set(unreachable_hosts).union(self._tqm._unreachable_hosts.keys())
+
# send the stats callback
self._tqm.send_callback('v2_playbook_on_stats', self._tqm._stats)
- if not result:
- if num_unreachable > 0:
- return 3
- elif num_failed > 0:
- return 2
- else:
- return 1
+ if len(unreachable_hosts) > 0:
+ return 3
+ elif len(failed_hosts) > 0:
+ return 2
+ elif not result:
+ return 1
else:
return 0
@@ -145,7 +149,7 @@ class StrategyBase:
task_result = result[1]
host = task_result._host
task = task_result._task
- if result[0] == 'host_task_failed':
+ if result[0] == 'host_task_failed' or 'failed' in task_result._result:
if not task.ignore_errors:
debug("marking %s as failed" % host.name)
iterator.mark_host_failed(host)
diff --git a/lib/ansible/plugins/strategies/linear.py b/lib/ansible/plugins/strategies/linear.py
index af12587b92..e92f10eb37 100644
--- a/lib/ansible/plugins/strategies/linear.py
+++ b/lib/ansible/plugins/strategies/linear.py
@@ -211,7 +211,7 @@ class StrategyModule(StrategyBase):
try:
included_files = IncludedFile.process_include_results(host_results, self._tqm, iterator=iterator, loader=self._loader)
except AnsibleError, e:
- return 1
+ return False
if len(included_files) > 0:
noop_task = Task()
@@ -252,7 +252,7 @@ class StrategyModule(StrategyBase):
except (IOError, EOFError), e:
debug("got IOError/EOFError in task loop: %s" % e)
# most likely an abort, return failed
- return 1
+ return False
# run the base class run() method, which executes the cleanup function
# and runs any outstanding handlers which have been triggered
diff --git a/lib/ansible/template/__init__.py b/lib/ansible/template/__init__.py
index 8ad9917d60..00bc386f26 100644
--- a/lib/ansible/template/__init__.py
+++ b/lib/ansible/template/__init__.py
@@ -238,14 +238,6 @@ class Templar:
environment.filters.update(self._get_filters())
environment.template_class = AnsibleJ2Template
- # FIXME: may not be required anymore, as the basedir stuff will
- # be handled by the loader?
- #if '_original_file' in vars:
- # basedir = os.path.dirname(vars['_original_file'])
- # filesdir = os.path.abspath(os.path.join(basedir, '..', 'files'))
- # if os.path.exists(filesdir):
- # basedir = filesdir
-
try:
t = environment.from_string(data)
except TemplateSyntaxError, e:
diff --git a/test/integration/Makefile b/test/integration/Makefile
index 3ee38b0ab7..69fe804c65 100644
--- a/test/integration/Makefile
+++ b/test/integration/Makefile
@@ -24,12 +24,13 @@ CONSUL_RUNNING := $(shell python consul_running.py)
all: parsing test_var_precedence unicode test_templating_settings non_destructive destructive includes check_mode test_hash test_handlers test_group_by test_vault test_tags
parsing:
- ansible-playbook bad_parsing.yml -i $(INVENTORY) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -vvv $(TEST_FLAGS) --tags prepare,common,scenario1; [ $$? -eq 4 ]
- ansible-playbook bad_parsing.yml -i $(INVENTORY) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -vvv $(TEST_FLAGS) --tags prepare,common,scenario2; [ $$? -eq 4 ]
- ansible-playbook bad_parsing.yml -i $(INVENTORY) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -vvv $(TEST_FLAGS) --tags prepare,common,scenario3; [ $$? -eq 4 ]
- ansible-playbook bad_parsing.yml -i $(INVENTORY) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -vvv $(TEST_FLAGS) --tags prepare,common,scenario4; [ $$? -eq 4 ]
- ansible-playbook bad_parsing.yml -i $(INVENTORY) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -vvv $(TEST_FLAGS) --tags prepare,common,scenario5; [ $$? -eq 4 ]
- ansible-playbook good_parsing.yml -i $(INVENTORY) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -v $(TEST_FLAGS)
+ #ansible-playbook bad_parsing.yml -i $(INVENTORY) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -vvv $(TEST_FLAGS) --tags prepare,common,scenario1; [ $$? -eq 4 ]
+ #ansible-playbook bad_parsing.yml -i $(INVENTORY) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -vvv $(TEST_FLAGS) --tags prepare,common,scenario2; [ $$? -eq 4 ]
+ #ansible-playbook bad_parsing.yml -i $(INVENTORY) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -vvv $(TEST_FLAGS) --tags prepare,common,scenario3; [ $$? -eq 4 ]
+ #ansible-playbook bad_parsing.yml -i $(INVENTORY) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -vvv $(TEST_FLAGS) --tags prepare,common,scenario4; [ $$? -eq 4 ]
+ #ansible-playbook bad_parsing.yml -i $(INVENTORY) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -vvv $(TEST_FLAGS) --tags prepare,common,scenario5; [ $$? -eq 4 ]
+ #ansible-playbook good_parsing.yml -i $(INVENTORY) -e @$(VARS_FILE) $(CREDENTIALS_ARG) -v $(TEST_FLAGS)
+ echo "skipping for now..."
includes:
ansible-playbook test_includes.yml -i $(INVENTORY) -e @$(VARS_FILE) $(CREDENTIALS_ARG) $(TEST_FLAGS)
diff --git a/test/integration/roles/test_lineinfile/tasks/main.yml b/test/integration/roles/test_lineinfile/tasks/main.yml
index 0c018ccaa5..8cfb3430f6 100644
--- a/test/integration/roles/test_lineinfile/tasks/main.yml
+++ b/test/integration/roles/test_lineinfile/tasks/main.yml
@@ -225,7 +225,7 @@
- "result.msg == 'line added'"
- name: insert a multiple lines at the end of the file
- lineinfile: dest={{output_dir}}/test.txt state=present line="This is a line\nwith \\\n character" insertafter="EOF"
+ lineinfile: dest={{output_dir}}/test.txt state=present line="This is a line\nwith \\n character" insertafter="EOF"
register: result
- name: assert that the multiple lines was inserted
diff --git a/test/integration/test_filters.yml b/test/integration/test_filters.yml
new file mode 100644
index 0000000000..050a303f60
--- /dev/null
+++ b/test/integration/test_filters.yml
@@ -0,0 +1,5 @@
+- hosts: testhost
+ connection: local
+ gather_facts: yes
+ roles:
+ - { role: test_filters }
diff --git a/test/units/module_utils/test_basic.py b/test/units/module_utils/test_basic.py
index cd2bf0536e..757a5f87d7 100644
--- a/test/units/module_utils/test_basic.py
+++ b/test/units/module_utils/test_basic.py
@@ -722,7 +722,7 @@ class TestModuleUtilsBasic(unittest.TestCase):
# FIXME: this isn't working yet
#with patch('os.lstat', side_effect=[mock_stat1, mock_stat2]):
- # with patch('os.lchmod', return_value=None, create=True) as m_os:
+ # with patch('os.lchmod', return_value=None) as m_os:
# del m_os.lchmod
# with patch('os.path.islink', return_value=False):
# with patch('os.chmod', return_value=None) as m_chmod:
From c7d1dd4b687098598c3abe7b7b29635f23b83422 Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Mon, 1 Jun 2015 16:50:18 -0500
Subject: [PATCH 155/971] Updating v1/ansible/modules/core/ to use the
v1_modules branch
---
v1/ansible/modules/core | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/v1/ansible/modules/core b/v1/ansible/modules/core
index 9028e9d4be..f8d8af17cd 160000
--- a/v1/ansible/modules/core
+++ b/v1/ansible/modules/core
@@ -1 +1 @@
-Subproject commit 9028e9d4be8a3dbb96c81a799e18f3adf63d9fd0
+Subproject commit f8d8af17cdc72500af8319c96004b86ac702a0a4
From 7f1b64d934b137185e05a7276c653bbe84458dd5 Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Mon, 1 Jun 2015 19:46:29 -0500
Subject: [PATCH 156/971] Submodule pointer update for core to the merged v2
branch
---
lib/ansible/modules/core | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/lib/ansible/modules/core b/lib/ansible/modules/core
index 191a672891..b138411671 160000
--- a/lib/ansible/modules/core
+++ b/lib/ansible/modules/core
@@ -1 +1 @@
-Subproject commit 191a672891359f3b6faff83cb0613f1b38e3fc0e
+Subproject commit b138411671194e3ec236d8ec3d27bcf32447350d
From 620fad9f8d750ac3ddb976782df4d5347e3c2704 Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Mon, 1 Jun 2015 20:02:15 -0500
Subject: [PATCH 157/971] Fixing an oops in inventory/__init__.py where the
slots are incorrect
---
lib/ansible/inventory/__init__.py | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/lib/ansible/inventory/__init__.py b/lib/ansible/inventory/__init__.py
index 9870648cee..43a6084cbd 100644
--- a/lib/ansible/inventory/__init__.py
+++ b/lib/ansible/inventory/__init__.py
@@ -43,9 +43,9 @@ class Inventory(object):
Host inventory for ansible.
"""
- __slots__ = [ 'host_list', 'groups', '_restriction', '_also_restriction', '_subset',
- 'parser', '_vars_per_host', '_vars_per_group', '_hosts_cache', '_groups_list',
- '_pattern_cache', '_vault_password', '_vars_plugins', '_playbook_basedir']
+ #__slots__ = [ 'host_list', 'groups', '_restriction', '_also_restriction', '_subset',
+ # 'parser', '_vars_per_host', '_vars_per_group', '_hosts_cache', '_groups_list',
+ # '_pattern_cache', '_vault_password', '_vars_plugins', '_playbook_basedir']
def __init__(self, loader, variable_manager, host_list=C.DEFAULT_HOST_LIST):
From 8868f4b4819d162e2031a6f9781f0ed0cc3fd518 Mon Sep 17 00:00:00 2001
From: Rene Moser
Date: Sat, 30 May 2015 11:21:48 +0200
Subject: [PATCH 158/971] cloudstack: sync module_utils/cloudstack.py to v1
Commits from 31520cdd178246f94921ba9d9866abf23b28e252 to 62ccc1b9b643196b8de36980a597c2d5d644b957 related to cloudstack.py
---
v1/ansible/module_utils/cloudstack.py | 243 ++++++++++++++++++++++----
1 file changed, 211 insertions(+), 32 deletions(-)
diff --git a/v1/ansible/module_utils/cloudstack.py b/v1/ansible/module_utils/cloudstack.py
index 2c891434bd..e887367c2f 100644
--- a/v1/ansible/module_utils/cloudstack.py
+++ b/v1/ansible/module_utils/cloudstack.py
@@ -41,15 +41,22 @@ class AnsibleCloudStack:
if not has_lib_cs:
module.fail_json(msg="python library cs required: pip install cs")
+ self.result = {
+ 'changed': False,
+ }
+
self.module = module
self._connect()
- self.project_id = None
- self.ip_address_id = None
- self.zone_id = None
- self.vm_id = None
- self.os_type_id = None
+ self.domain = None
+ self.account = None
+ self.project = None
+ self.ip_address = None
+ self.zone = None
+ self.vm = None
+ self.os_type = None
self.hypervisor = None
+ self.capabilities = None
def _connect(self):
@@ -68,27 +75,73 @@ class AnsibleCloudStack:
else:
self.cs = CloudStack(**read_config())
+ # TODO: rename to has_changed()
+ def _has_changed(self, want_dict, current_dict, only_keys=None):
+ for key, value in want_dict.iteritems():
+ # Optionally limit by a list of keys
+ if only_keys and key not in only_keys:
+ continue;
+
+ # Skip None values
+ if value is None:
+ continue;
+
+ if key in current_dict:
+
+ # API returns string for int in some cases, just to make sure
+ if isinstance(value, int):
+ current_dict[key] = int(current_dict[key])
+ elif isinstance(value, str):
+ current_dict[key] = str(current_dict[key])
+
+ # Only need to detect a singe change, not every item
+ if value != current_dict[key]:
+ return True
+ return False
+
+
+ def _get_by_key(self, key=None, my_dict={}):
+ if key:
+ if key in my_dict:
+ return my_dict[key]
+ self.module.fail_json(msg="Something went wrong: %s not found" % key)
+ return my_dict
+
+
+ # TODO: for backward compatibility only, remove if not used anymore
def get_project_id(self):
- if self.project_id:
- return self.project_id
+ return self.get_project(key='id')
+
+
+ def get_project(self, key=None):
+ if self.project:
+ return self._get_by_key(key, self.project)
project = self.module.params.get('project')
if not project:
return None
-
- projects = self.cs.listProjects()
+ args = {}
+ args['listall'] = True
+ args['account'] = self.get_account(key='name')
+ args['domainid'] = self.get_domain(key='id')
+ projects = self.cs.listProjects(**args)
if projects:
for p in projects['project']:
if project in [ p['name'], p['displaytext'], p['id'] ]:
- self.project_id = p['id']
- return self.project_id
+ self.project = p
+ return self._get_by_key(key, self.project)
self.module.fail_json(msg="project '%s' not found" % project)
+ # TODO: for backward compatibility only, remove if not used anymore
def get_ip_address_id(self):
- if self.ip_address_id:
- return self.ip_address_id
+ return self.get_ip_address(key='id')
+
+
+ def get_ip_address(self, key=None):
+ if self.ip_address:
+ return self._get_by_key(key, self.ip_address)
ip_address = self.module.params.get('ip_address')
if not ip_address:
@@ -96,58 +149,78 @@ class AnsibleCloudStack:
args = {}
args['ipaddress'] = ip_address
- args['projectid'] = self.get_project_id()
+ args['account'] = self.get_account(key='name')
+ args['domainid'] = self.get_domain(key='id')
+ args['projectid'] = self.get_project(key='id')
ip_addresses = self.cs.listPublicIpAddresses(**args)
if not ip_addresses:
self.module.fail_json(msg="IP address '%s' not found" % args['ipaddress'])
- self.ip_address_id = ip_addresses['publicipaddress'][0]['id']
- return self.ip_address_id
+ self.ip_address = ip_addresses['publicipaddress'][0]
+ return self._get_by_key(key, self.ip_address)
+ # TODO: for backward compatibility only, remove if not used anymore
def get_vm_id(self):
- if self.vm_id:
- return self.vm_id
+ return self.get_vm(key='id')
+
+
+ def get_vm(self, key=None):
+ if self.vm:
+ return self._get_by_key(key, self.vm)
vm = self.module.params.get('vm')
if not vm:
self.module.fail_json(msg="Virtual machine param 'vm' is required")
args = {}
- args['projectid'] = self.get_project_id()
+ args['account'] = self.get_account(key='name')
+ args['domainid'] = self.get_domain(key='id')
+ args['projectid'] = self.get_project(key='id')
+ args['zoneid'] = self.get_zone(key='id')
vms = self.cs.listVirtualMachines(**args)
if vms:
for v in vms['virtualmachine']:
- if vm in [ v['displayname'], v['name'], v['id'] ]:
- self.vm_id = v['id']
- return self.vm_id
+ if vm in [ v['name'], v['displayname'], v['id'] ]:
+ self.vm = v
+ return self._get_by_key(key, self.vm)
self.module.fail_json(msg="Virtual machine '%s' not found" % vm)
+ # TODO: for backward compatibility only, remove if not used anymore
def get_zone_id(self):
- if self.zone_id:
- return self.zone_id
+ return self.get_zone(key='id')
+
+
+ def get_zone(self, key=None):
+ if self.zone:
+ return self._get_by_key(key, self.zone)
zone = self.module.params.get('zone')
zones = self.cs.listZones()
# use the first zone if no zone param given
if not zone:
- self.zone_id = zones['zone'][0]['id']
- return self.zone_id
+ self.zone = zones['zone'][0]
+ return self._get_by_key(key, self.zone)
if zones:
for z in zones['zone']:
if zone in [ z['name'], z['id'] ]:
- self.zone_id = z['id']
- return self.zone_id
+ self.zone = z
+ return self._get_by_key(key, self.zone)
self.module.fail_json(msg="zone '%s' not found" % zone)
+ # TODO: for backward compatibility only, remove if not used anymore
def get_os_type_id(self):
- if self.os_type_id:
- return self.os_type_id
+ return self.get_os_type(key='id')
+
+
+ def get_os_type(self, key=None):
+ if self.os_type:
+ return self._get_by_key(key, self.zone)
os_type = self.module.params.get('os_type')
if not os_type:
@@ -157,8 +230,8 @@ class AnsibleCloudStack:
if os_types:
for o in os_types['ostype']:
if os_type in [ o['description'], o['id'] ]:
- self.os_type_id = o['id']
- return self.os_type_id
+ self.os_type = o
+ return self._get_by_key(key, self.os_type)
self.module.fail_json(msg="OS type '%s' not found" % os_type)
@@ -181,6 +254,112 @@ class AnsibleCloudStack:
self.module.fail_json(msg="Hypervisor '%s' not found" % hypervisor)
+ def get_account(self, key=None):
+ if self.account:
+ return self._get_by_key(key, self.account)
+
+ account = self.module.params.get('account')
+ if not account:
+ return None
+
+ domain = self.module.params.get('domain')
+ if not domain:
+ self.module.fail_json(msg="Account must be specified with Domain")
+
+ args = {}
+ args['name'] = account
+ args['domainid'] = self.get_domain(key='id')
+ args['listall'] = True
+ accounts = self.cs.listAccounts(**args)
+ if accounts:
+ self.account = accounts['account'][0]
+ return self._get_by_key(key, self.account)
+ self.module.fail_json(msg="Account '%s' not found" % account)
+
+
+ def get_domain(self, key=None):
+ if self.domain:
+ return self._get_by_key(key, self.domain)
+
+ domain = self.module.params.get('domain')
+ if not domain:
+ return None
+
+ args = {}
+ args['name'] = domain
+ args['listall'] = True
+ domains = self.cs.listDomains(**args)
+ if domains:
+ self.domain = domains['domain'][0]
+ return self._get_by_key(key, self.domain)
+ self.module.fail_json(msg="Domain '%s' not found" % domain)
+
+
+ def get_tags(self, resource=None):
+ existing_tags = self.cs.listTags(resourceid=resource['id'])
+ if existing_tags:
+ return existing_tags['tag']
+ return []
+
+
+ def _delete_tags(self, resource, resource_type, tags):
+ existing_tags = resource['tags']
+ tags_to_delete = []
+ for existing_tag in existing_tags:
+ if existing_tag['key'] in tags:
+ if existing_tag['value'] != tags[key]:
+ tags_to_delete.append(existing_tag)
+ else:
+ tags_to_delete.append(existing_tag)
+ if tags_to_delete:
+ self.result['changed'] = True
+ if not self.module.check_mode:
+ args = {}
+ args['resourceids'] = resource['id']
+ args['resourcetype'] = resource_type
+ args['tags'] = tags_to_delete
+ self.cs.deleteTags(**args)
+
+
+ def _create_tags(self, resource, resource_type, tags):
+ tags_to_create = []
+ for i, tag_entry in enumerate(tags):
+ tag = {
+ 'key': tag_entry['key'],
+ 'value': tag_entry['value'],
+ }
+ tags_to_create.append(tag)
+ if tags_to_create:
+ self.result['changed'] = True
+ if not self.module.check_mode:
+ args = {}
+ args['resourceids'] = resource['id']
+ args['resourcetype'] = resource_type
+ args['tags'] = tags_to_create
+ self.cs.createTags(**args)
+
+
+ def ensure_tags(self, resource, resource_type=None):
+ if not resource_type or not resource:
+ self.module.fail_json(msg="Error: Missing resource or resource_type for tags.")
+
+ if 'tags' in resource:
+ tags = self.module.params.get('tags')
+ if tags is not None:
+ self._delete_tags(resource, resource_type, tags)
+ self._create_tags(resource, resource_type, tags)
+ resource['tags'] = self.get_tags(resource)
+ return resource
+
+
+ def get_capabilities(self, key=None):
+ if self.capabilities:
+ return self._get_by_key(key, self.capabilities)
+ capabilities = self.cs.listCapabilities()
+ self.capabilities = capabilities['capability']
+ return self._get_by_key(key, self.capabilities)
+
+ # TODO: rename to poll_job()
def _poll_job(self, job=None, key=None):
if 'jobid' in job:
while True:
From 7bb9cd3766fcffa90dbd775c4530a6227679e357 Mon Sep 17 00:00:00 2001
From: Rene Moser
Date: Sat, 30 May 2015 11:34:20 +0200
Subject: [PATCH 159/971] cloudstack: minor cleanup in doc fragments
---
lib/ansible/utils/module_docs_fragments/cloudstack.py | 7 ++-----
1 file changed, 2 insertions(+), 5 deletions(-)
diff --git a/lib/ansible/utils/module_docs_fragments/cloudstack.py b/lib/ansible/utils/module_docs_fragments/cloudstack.py
index 5a7411b199..ebb6fdab2c 100644
--- a/lib/ansible/utils/module_docs_fragments/cloudstack.py
+++ b/lib/ansible/utils/module_docs_fragments/cloudstack.py
@@ -27,32 +27,29 @@ options:
- API key of the CloudStack API.
required: false
default: null
- aliases: []
api_secret:
description:
- Secret key of the CloudStack API.
required: false
default: null
- aliases: []
api_url:
description:
- URL of the CloudStack API e.g. https://cloud.example.com/client/api.
required: false
default: null
- aliases: []
api_http_method:
description:
- HTTP method used.
required: false
default: 'get'
- aliases: []
+ choices: [ 'get', 'post' ]
requirements:
- "python >= 2.6"
- cs
notes:
- Ansible uses the C(cs) library's configuration method if credentials are not
provided by the options C(api_url), C(api_key), C(api_secret).
- Configuration is read from several locations, in the following order":"
+ Configuration is read from several locations, in the following order.
- The C(CLOUDSTACK_ENDPOINT), C(CLOUDSTACK_KEY), C(CLOUDSTACK_SECRET) and
C(CLOUDSTACK_METHOD) environment variables.
- A C(CLOUDSTACK_CONFIG) environment variable pointing to an C(.ini) file,
From fc807e29c8b67d560505363b3dadb56e1590bf20 Mon Sep 17 00:00:00 2001
From: Rene Moser
Date: Sat, 30 May 2015 11:35:55 +0200
Subject: [PATCH 160/971] cloudstack: add api_timeout to doc fragments
---
lib/ansible/utils/module_docs_fragments/cloudstack.py | 7 ++++++-
1 file changed, 6 insertions(+), 1 deletion(-)
diff --git a/lib/ansible/utils/module_docs_fragments/cloudstack.py b/lib/ansible/utils/module_docs_fragments/cloudstack.py
index ebb6fdab2c..bafb7b4c15 100644
--- a/lib/ansible/utils/module_docs_fragments/cloudstack.py
+++ b/lib/ansible/utils/module_docs_fragments/cloudstack.py
@@ -43,6 +43,11 @@ options:
required: false
default: 'get'
choices: [ 'get', 'post' ]
+ api_timeout:
+ description:
+ - HTTP timeout.
+ required: false
+ default: 10
requirements:
- "python >= 2.6"
- cs
@@ -51,7 +56,7 @@ notes:
provided by the options C(api_url), C(api_key), C(api_secret).
Configuration is read from several locations, in the following order.
- The C(CLOUDSTACK_ENDPOINT), C(CLOUDSTACK_KEY), C(CLOUDSTACK_SECRET) and
- C(CLOUDSTACK_METHOD) environment variables.
+ C(CLOUDSTACK_METHOD). C(CLOUDSTACK_TIMEOUT) environment variables.
- A C(CLOUDSTACK_CONFIG) environment variable pointing to an C(.ini) file,
- A C(cloudstack.ini) file in the current working directory.
- A C(.cloudstack.ini) file in the users home directory.
From caf3cf69302858d62c206027629ab30124ff9c08 Mon Sep 17 00:00:00 2001
From: Rene Moser
Date: Sat, 30 May 2015 11:42:45 +0200
Subject: [PATCH 161/971] cloudstack: add timeout to utils
---
lib/ansible/module_utils/cloudstack.py | 2 ++
v1/ansible/module_utils/cloudstack.py | 2 ++
2 files changed, 4 insertions(+)
diff --git a/lib/ansible/module_utils/cloudstack.py b/lib/ansible/module_utils/cloudstack.py
index e887367c2f..82306b9a0b 100644
--- a/lib/ansible/module_utils/cloudstack.py
+++ b/lib/ansible/module_utils/cloudstack.py
@@ -64,12 +64,14 @@ class AnsibleCloudStack:
api_secret = self.module.params.get('secret_key')
api_url = self.module.params.get('api_url')
api_http_method = self.module.params.get('api_http_method')
+ api_timeout = self.module.params.get('api_timeout')
if api_key and api_secret and api_url:
self.cs = CloudStack(
endpoint=api_url,
key=api_key,
secret=api_secret,
+ timeout=api_timeout,
method=api_http_method
)
else:
diff --git a/v1/ansible/module_utils/cloudstack.py b/v1/ansible/module_utils/cloudstack.py
index e887367c2f..82306b9a0b 100644
--- a/v1/ansible/module_utils/cloudstack.py
+++ b/v1/ansible/module_utils/cloudstack.py
@@ -64,12 +64,14 @@ class AnsibleCloudStack:
api_secret = self.module.params.get('secret_key')
api_url = self.module.params.get('api_url')
api_http_method = self.module.params.get('api_http_method')
+ api_timeout = self.module.params.get('api_timeout')
if api_key and api_secret and api_url:
self.cs = CloudStack(
endpoint=api_url,
key=api_key,
secret=api_secret,
+ timeout=api_timeout,
method=api_http_method
)
else:
From e251e701783ff053dc1d59a917bfaa9d788a2c6a Mon Sep 17 00:00:00 2001
From: Brian Coca
Date: Tue, 2 Jun 2015 08:54:37 -0400
Subject: [PATCH 162/971] added raw to 'raw' modules
---
lib/ansible/parsing/mod_args.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/lib/ansible/parsing/mod_args.py b/lib/ansible/parsing/mod_args.py
index 87b3813d8f..c24b581fa8 100644
--- a/lib/ansible/parsing/mod_args.py
+++ b/lib/ansible/parsing/mod_args.py
@@ -274,6 +274,7 @@ class ModuleArgsParser:
'add_host',
'group_by',
'set_fact',
+ 'raw',
'meta',
)
# if we didn't see any module in the task at all, it's not a task really
From bc041ffea07ce812587ee23ec1b6511a08bef999 Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Tue, 2 Jun 2015 08:41:58 -0500
Subject: [PATCH 163/971] Adding raw module to list of modules allowing raw
params
Fixes #11119
---
lib/ansible/parsing/mod_args.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/lib/ansible/parsing/mod_args.py b/lib/ansible/parsing/mod_args.py
index c24b581fa8..a154d40577 100644
--- a/lib/ansible/parsing/mod_args.py
+++ b/lib/ansible/parsing/mod_args.py
@@ -266,6 +266,7 @@ class ModuleArgsParser:
# FIXME: this should probably be somewhere else
RAW_PARAM_MODULES = (
+ 'raw',
'command',
'shell',
'script',
From d1b43712870f5331a58abe115911725619264ca5 Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Tue, 2 Jun 2015 09:41:46 -0500
Subject: [PATCH 164/971] Correctly evaluate changed/failed for tasks using
loops
---
lib/ansible/executor/task_executor.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/lib/ansible/executor/task_executor.py b/lib/ansible/executor/task_executor.py
index 9bc875b02a..7c769cc460 100644
--- a/lib/ansible/executor/task_executor.py
+++ b/lib/ansible/executor/task_executor.py
@@ -83,9 +83,9 @@ class TaskExecutor:
changed = False
failed = False
for item in item_results:
- if 'changed' in item:
+ if 'changed' in item and item['changed']:
changed = True
- if 'failed' in item:
+ if 'failed' in item and item['failed']:
failed = True
# create the overall result item, and set the changed/failed
From 47be5b416658ef1474aee89873fbd72622f83777 Mon Sep 17 00:00:00 2001
From: Brian Coca
Date: Tue, 2 Jun 2015 11:02:40 -0400
Subject: [PATCH 165/971] added missing ansibleoptionserror import and moved
args check in playbook to after parser exists to allow for creating usage
info
---
lib/ansible/cli/__init__.py | 2 +-
lib/ansible/cli/playbook.py | 7 ++++---
2 files changed, 5 insertions(+), 4 deletions(-)
diff --git a/lib/ansible/cli/__init__.py b/lib/ansible/cli/__init__.py
index 1e997f58d3..d63203b2e5 100644
--- a/lib/ansible/cli/__init__.py
+++ b/lib/ansible/cli/__init__.py
@@ -31,7 +31,7 @@ import subprocess
from ansible import __version__
from ansible import constants as C
-from ansible.errors import AnsibleError
+from ansible.errors import AnsibleError, AnsibleOptionsError
from ansible.utils.unicode import to_bytes
class SortedOptParser(optparse.OptionParser):
diff --git a/lib/ansible/cli/playbook.py b/lib/ansible/cli/playbook.py
index 97d4f0de3f..1c59d5dde6 100644
--- a/lib/ansible/cli/playbook.py
+++ b/lib/ansible/cli/playbook.py
@@ -24,7 +24,7 @@ import sys
from ansible import constants as C
from ansible.cli import CLI
-from ansible.errors import AnsibleError
+from ansible.errors import AnsibleError, AnsibleOptionsError
from ansible.executor.playbook_executor import PlaybookExecutor
from ansible.inventory import Inventory
from ansible.parsing import DataLoader
@@ -69,11 +69,12 @@ class PlaybookCLI(CLI):
self.options, self.args = parser.parse_args()
- if len(self.args) == 0:
- raise AnsibleOptionsError("You must specify a playbook file to run")
self.parser = parser
+ if len(self.args) == 0:
+ raise AnsibleOptionsError("You must specify a playbook file to run")
+
self.display.verbosity = self.options.verbosity
self.validate_conflicts()
From 2590df6df1e3e4317f3247185be2940d95bd2c7b Mon Sep 17 00:00:00 2001
From: Brian Coca
Date: Tue, 2 Jun 2015 11:41:30 -0400
Subject: [PATCH 166/971] created makedirs_safe function for use in cases of
multiprocess should fix #11126 and most race conditions
---
lib/ansible/plugins/action/fetch.py | 4 ++--
lib/ansible/plugins/connections/paramiko_ssh.py | 7 +++----
lib/ansible/plugins/connections/winrm.py | 7 +++----
lib/ansible/plugins/lookup/password.py | 10 +++++-----
lib/ansible/utils/path.py | 10 ++++++++++
5 files changed, 23 insertions(+), 15 deletions(-)
diff --git a/lib/ansible/plugins/action/fetch.py b/lib/ansible/plugins/action/fetch.py
index c242c8739d..6a903ae5a2 100644
--- a/lib/ansible/plugins/action/fetch.py
+++ b/lib/ansible/plugins/action/fetch.py
@@ -29,6 +29,7 @@ from ansible.errors import *
from ansible.plugins.action import ActionBase
from ansible.utils.boolean import boolean
from ansible.utils.hashing import checksum, checksum_s, md5, secure_hash
+from ansible.utils.path import makedirs_safe
class ActionModule(ActionBase):
@@ -125,8 +126,7 @@ class ActionModule(ActionBase):
if remote_checksum != local_checksum:
# create the containing directories, if needed
- if not os.path.isdir(os.path.dirname(dest)):
- os.makedirs(os.path.dirname(dest))
+ makedirs_safe(os.path.dirname(dest))
# fetch the file and check for changes
if remote_data is None:
diff --git a/lib/ansible/plugins/connections/paramiko_ssh.py b/lib/ansible/plugins/connections/paramiko_ssh.py
index 797eeea9e0..0d7a82c34b 100644
--- a/lib/ansible/plugins/connections/paramiko_ssh.py
+++ b/lib/ansible/plugins/connections/paramiko_ssh.py
@@ -42,6 +42,7 @@ from binascii import hexlify
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleFileNotFound
from ansible.plugins.connections import ConnectionBase
+from ansible.utils.path import makedirs_safe
AUTHENTICITY_MSG="""
paramiko: The authenticity of host '%s' can't be established.
@@ -309,8 +310,7 @@ class Connection(ConnectionBase):
return False
path = os.path.expanduser("~/.ssh")
- if not os.path.exists(path):
- os.makedirs(path)
+ makedirs_safe(path)
f = open(filename, 'w')
@@ -347,8 +347,7 @@ class Connection(ConnectionBase):
# add any new SSH host keys -- warning -- this could be slow
lockfile = self.keyfile.replace("known_hosts",".known_hosts.lock")
dirname = os.path.dirname(self.keyfile)
- if not os.path.exists(dirname):
- os.makedirs(dirname)
+ makedirs_safe(dirname)
KEY_LOCK = open(lockfile, 'w')
fcntl.lockf(KEY_LOCK, fcntl.LOCK_EX)
diff --git a/lib/ansible/plugins/connections/winrm.py b/lib/ansible/plugins/connections/winrm.py
index 8a42da2534..dbdf7cd678 100644
--- a/lib/ansible/plugins/connections/winrm.py
+++ b/lib/ansible/plugins/connections/winrm.py
@@ -44,6 +44,7 @@ from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleFileNotFound
from ansible.plugins.connections import ConnectionBase
from ansible.plugins import shell_loader
+from ansible.utils import makedirs_safe
class Connection(ConnectionBase):
'''WinRM connections over HTTP/HTTPS.'''
@@ -213,8 +214,7 @@ class Connection(ConnectionBase):
out_path = out_path.replace('\\', '/')
self._display.vvv("FETCH %s TO %s" % (in_path, out_path), host=self._connection_info.remote_addr)
buffer_size = 2**19 # 0.5MB chunks
- if not os.path.exists(os.path.dirname(out_path)):
- os.makedirs(os.path.dirname(out_path))
+ makedirs_safe(os.path.dirname(out_path))
out_file = None
try:
offset = 0
@@ -251,8 +251,7 @@ class Connection(ConnectionBase):
else:
data = base64.b64decode(result.std_out.strip())
if data is None:
- if not os.path.exists(out_path):
- os.makedirs(out_path)
+ makedirs_safe(out_path)
break
else:
if not out_file:
diff --git a/lib/ansible/plugins/lookup/password.py b/lib/ansible/plugins/lookup/password.py
index 2e7633a067..9506274e5f 100644
--- a/lib/ansible/plugins/lookup/password.py
+++ b/lib/ansible/plugins/lookup/password.py
@@ -30,6 +30,7 @@ from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.plugins.lookup import LookupBase
from ansible.utils.encrypt import do_encrypt
+from ansible.utils import makedirs_safe
DEFAULT_LENGTH = 20
@@ -98,11 +99,10 @@ class LookupModule(LookupBase):
path = self._loader.path_dwim(relpath)
if not os.path.exists(path):
pathdir = os.path.dirname(path)
- if not os.path.isdir(pathdir):
- try:
- os.makedirs(pathdir, mode=0o700)
- except OSError as e:
- raise AnsibleError("cannot create the path for the password lookup: %s (error was %s)" % (pathdir, str(e)))
+ try:
+ makedirs_safe(pathdir, mode=0o700)
+ except OSError as e:
+ raise AnsibleError("cannot create the path for the password lookup: %s (error was %s)" % (pathdir, str(e)))
chars = "".join([getattr(string,c,c) for c in use_chars]).replace('"','').replace("'",'')
password = ''.join(random.choice(chars) for _ in range(length))
diff --git a/lib/ansible/utils/path.py b/lib/ansible/utils/path.py
index e49a2f7d55..534226984b 100644
--- a/lib/ansible/utils/path.py
+++ b/lib/ansible/utils/path.py
@@ -19,6 +19,7 @@ __metaclass__ = type
import os
import stat
+from time import sleep
__all__ = ['is_executable', 'unfrackpath']
@@ -35,3 +36,12 @@ def unfrackpath(path):
'''
return os.path.normpath(os.path.realpath(os.path.expandvars(os.path.expanduser(path))))
+def makedirs_safe(path, mode=None):
+ '''Safe way to create dirs in muliprocess/thread environments'''
+ while not os.path.exists(path):
+ try:
+ os.makedirs(path, mode)
+ except OSError, e:
+ if e.errno != 17:
+ raise
+ sleep(1)
From 8c6fa5fb773cbbb847c4be8932c5452b4abe76c8 Mon Sep 17 00:00:00 2001
From: Brian Coca
Date: Tue, 2 Jun 2015 12:41:02 -0400
Subject: [PATCH 167/971] added promox_template module to changelog
---
CHANGELOG.md | 1 +
1 file changed, 1 insertion(+)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index f806cbfb1f..cfc062f577 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -44,6 +44,7 @@ New Modules:
* openstack: os_subnet
* openstack: os_volume
* proxmox
+ * proxmox_template
* pushover
* pushbullet
* rabbitmq_binding
From ba02e5e3bf7d03a8c64713cebb5f851b2f5396ce Mon Sep 17 00:00:00 2001
From: Brian Coca
Date: Tue, 2 Jun 2015 13:01:02 -0400
Subject: [PATCH 168/971] minor adjustments as per code review
---
lib/ansible/utils/path.py | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/lib/ansible/utils/path.py b/lib/ansible/utils/path.py
index 534226984b..ac5160402b 100644
--- a/lib/ansible/utils/path.py
+++ b/lib/ansible/utils/path.py
@@ -20,6 +20,7 @@ __metaclass__ = type
import os
import stat
from time import sleep
+from errno import EEXIST
__all__ = ['is_executable', 'unfrackpath']
@@ -38,10 +39,9 @@ def unfrackpath(path):
def makedirs_safe(path, mode=None):
'''Safe way to create dirs in muliprocess/thread environments'''
- while not os.path.exists(path):
+ if not os.path.exists(path):
try:
os.makedirs(path, mode)
except OSError, e:
- if e.errno != 17:
+ if e.errno != EEXIST:
raise
- sleep(1)
From e0ef217f9714280e8ad3eddbf00c5742346446bf Mon Sep 17 00:00:00 2001
From: Brian Coca
Date: Tue, 2 Jun 2015 13:33:33 -0400
Subject: [PATCH 169/971] Revert "Adding raw module to list of modules allowing
raw params"
This reverts commit bc041ffea07ce812587ee23ec1b6511a08bef999.
same fix x2 does not fix it 'more'
---
lib/ansible/parsing/mod_args.py | 1 -
1 file changed, 1 deletion(-)
diff --git a/lib/ansible/parsing/mod_args.py b/lib/ansible/parsing/mod_args.py
index a154d40577..c24b581fa8 100644
--- a/lib/ansible/parsing/mod_args.py
+++ b/lib/ansible/parsing/mod_args.py
@@ -266,7 +266,6 @@ class ModuleArgsParser:
# FIXME: this should probably be somewhere else
RAW_PARAM_MODULES = (
- 'raw',
'command',
'shell',
'script',
From 71014ab01e54fc5f84f0ec256ea9822de8602ef6 Mon Sep 17 00:00:00 2001
From: Matt Martz
Date: Tue, 2 Jun 2015 13:30:14 -0500
Subject: [PATCH 170/971] Fix command building for scp if ssh
---
lib/ansible/plugins/connections/ssh.py | 12 ++++++------
1 file changed, 6 insertions(+), 6 deletions(-)
diff --git a/lib/ansible/plugins/connections/ssh.py b/lib/ansible/plugins/connections/ssh.py
index 426dc6b49d..b3ada343c0 100644
--- a/lib/ansible/plugins/connections/ssh.py
+++ b/lib/ansible/plugins/connections/ssh.py
@@ -407,12 +407,12 @@ class Connection(ConnectionBase):
if C.DEFAULT_SCP_IF_SSH:
cmd.append('scp')
- cmd += self._common_args
- cmd.append(in_path,host + ":" + pipes.quote(out_path))
+ cmd.extend(self._common_args)
+ cmd.extend([in_path, '{0}:{1}'.format(host, pipes.quote(out_path))])
indata = None
else:
cmd.append('sftp')
- cmd += self._common_args
+ cmd.extend(self._common_args)
cmd.append(host)
indata = "put {0} {1}\n".format(pipes.quote(in_path), pipes.quote(out_path))
@@ -440,12 +440,12 @@ class Connection(ConnectionBase):
if C.DEFAULT_SCP_IF_SSH:
cmd.append('scp')
- cmd += self._common_args
- cmd += ('{0}:{1}'.format(host, in_path), out_path)
+ cmd.extend(self._common_args)
+ cmd.extend(['{0}:{1}'.format(host, in_path), out_path])
indata = None
else:
cmd.append('sftp')
- cmd += self._common_args
+ cmd.extend(self._common_args)
cmd.append(host)
indata = "get {0} {1}\n".format(in_path, out_path)
From 300eb3a843dc773722ebd7bc1ceea9a3b8d91e86 Mon Sep 17 00:00:00 2001
From: Toshio Kuratomi
Date: Tue, 2 Jun 2015 11:43:35 -0700
Subject: [PATCH 171/971] Add six as a dependency for packaging
---
packaging/debian/README.md | 2 +-
packaging/debian/control | 2 +-
packaging/rpm/ansible.spec | 4 ++++
3 files changed, 6 insertions(+), 2 deletions(-)
diff --git a/packaging/debian/README.md b/packaging/debian/README.md
index 62c6af084c..a8150ff30f 100644
--- a/packaging/debian/README.md
+++ b/packaging/debian/README.md
@@ -3,7 +3,7 @@ Ansible Debian Package
To create an Ansible DEB package:
- sudo apt-get install python-paramiko python-yaml python-jinja2 python-httplib2 python-setuptools sshpass
+ sudo apt-get install python-paramiko python-yaml python-jinja2 python-httplib2 python-setuptools python-six sshpass
sudo apt-get install cdbs debhelper dpkg-dev git-core reprepro python-support fakeroot asciidoc devscripts
git clone git://github.com/ansible/ansible.git
cd ansible
diff --git a/packaging/debian/control b/packaging/debian/control
index 14d737444e..73e1cc9202 100644
--- a/packaging/debian/control
+++ b/packaging/debian/control
@@ -8,7 +8,7 @@ Homepage: http://ansible.github.com/
Package: ansible
Architecture: all
-Depends: python, python-support (>= 0.90), python-jinja2, python-yaml, python-paramiko, python-httplib2, python-crypto (>= 2.6), sshpass, ${misc:Depends}
+Depends: python, python-support (>= 0.90), python-jinja2, python-yaml, python-paramiko, python-httplib2, python-six, python-crypto (>= 2.6), sshpass, ${misc:Depends}
Description: A radically simple IT automation platform
A radically simple IT automation platform that makes your applications and
systems easier to deploy. Avoid writing scripts or custom code to deploy and
diff --git a/packaging/rpm/ansible.spec b/packaging/rpm/ansible.spec
index 394017dc0f..ddda6eeb79 100644
--- a/packaging/rpm/ansible.spec
+++ b/packaging/rpm/ansible.spec
@@ -28,6 +28,7 @@ Requires: python26-jinja2
Requires: python26-keyczar
Requires: python26-httplib2
Requires: python26-setuptools
+Requires: python26-six
%endif
# RHEL == 6
@@ -45,6 +46,7 @@ Requires: python-jinja2
Requires: python-keyczar
Requires: python-httplib2
Requires: python-setuptools
+Requires: python-six
%endif
# FEDORA > 17
@@ -57,6 +59,7 @@ Requires: python-jinja2
Requires: python-keyczar
Requires: python-httplib2
Requires: python-setuptools
+Requires: python-six
%endif
# SuSE/openSuSE
@@ -69,6 +72,7 @@ Requires: python-keyczar
Requires: python-yaml
Requires: python-httplib2
Requires: python-setuptools
+Requires: python-six
%endif
Requires: sshpass
From 697a1a406122fa7d932146b0d32159ad363cf245 Mon Sep 17 00:00:00 2001
From: Matt Martz
Date: Tue, 2 Jun 2015 14:01:11 -0500
Subject: [PATCH 172/971] Don't override ansible_ssh_host with
inventory_hostname
---
lib/ansible/executor/task_executor.py | 1 -
1 file changed, 1 deletion(-)
diff --git a/lib/ansible/executor/task_executor.py b/lib/ansible/executor/task_executor.py
index 9bc875b02a..5c6fc862a0 100644
--- a/lib/ansible/executor/task_executor.py
+++ b/lib/ansible/executor/task_executor.py
@@ -371,7 +371,6 @@ class TaskExecutor:
# FIXME: delegate_to calculation should be done here
# FIXME: calculation of connection params/auth stuff should be done here
- self._connection_info.remote_addr = self._host.ipv4_address
if self._task.delegate_to is not None:
self._compute_delegate(variables)
From 65191181069f8d67de81fea1943786fbbf6466d5 Mon Sep 17 00:00:00 2001
From: Matt Martz
Date: Tue, 2 Jun 2015 14:11:16 -0500
Subject: [PATCH 173/971] Add missing import in ansible.cli
---
lib/ansible/cli/__init__.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/lib/ansible/cli/__init__.py b/lib/ansible/cli/__init__.py
index d63203b2e5..daf14aab1f 100644
--- a/lib/ansible/cli/__init__.py
+++ b/lib/ansible/cli/__init__.py
@@ -33,6 +33,7 @@ from ansible import __version__
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleOptionsError
from ansible.utils.unicode import to_bytes
+from ansible.utils.display import Display
class SortedOptParser(optparse.OptionParser):
'''Optparser which sorts the options by opt before outputting --help'''
From 1b48111b12f507dcce509c24917e27f9c29653b7 Mon Sep 17 00:00:00 2001
From: Matt Martz
Date: Tue, 2 Jun 2015 14:56:32 -0500
Subject: [PATCH 174/971] If remote_addr isn't set, set to ipv4_address
---
lib/ansible/executor/task_executor.py | 3 +++
1 file changed, 3 insertions(+)
diff --git a/lib/ansible/executor/task_executor.py b/lib/ansible/executor/task_executor.py
index 23cc880bce..9ba2b6bca5 100644
--- a/lib/ansible/executor/task_executor.py
+++ b/lib/ansible/executor/task_executor.py
@@ -371,6 +371,9 @@ class TaskExecutor:
# FIXME: delegate_to calculation should be done here
# FIXME: calculation of connection params/auth stuff should be done here
+ if not self._connection_info.remote_addr:
+ self._connection_info.remote_addr = self._host.ipv4_address
+
if self._task.delegate_to is not None:
self._compute_delegate(variables)
From 48c0d6388ff0cfaa760e77617170ebffe60298ba Mon Sep 17 00:00:00 2001
From: Brian Coca
Date: Tue, 2 Jun 2015 15:37:06 -0400
Subject: [PATCH 175/971] moved RAW var to class and as a frozenset
---
lib/ansible/parsing/mod_args.py | 27 ++++++++++++++-------------
1 file changed, 14 insertions(+), 13 deletions(-)
diff --git a/lib/ansible/parsing/mod_args.py b/lib/ansible/parsing/mod_args.py
index c24b581fa8..19a51212f7 100644
--- a/lib/ansible/parsing/mod_args.py
+++ b/lib/ansible/parsing/mod_args.py
@@ -25,6 +25,20 @@ from ansible.errors import AnsibleParserError
from ansible.plugins import module_loader
from ansible.parsing.splitter import parse_kv
+# For filtering out modules correctly below
+RAW_PARAM_MODULES = frozenset(
+ 'command',
+ 'shell',
+ 'script',
+ 'include',
+ 'include_vars',
+ 'add_host',
+ 'group_by',
+ 'set_fact',
+ 'raw',
+ 'meta',
+)
+
class ModuleArgsParser:
"""
@@ -264,19 +278,6 @@ class ModuleArgsParser:
thing = value
action, args = self._normalize_parameters(value, action=action, additional_args=additional_args)
- # FIXME: this should probably be somewhere else
- RAW_PARAM_MODULES = (
- 'command',
- 'shell',
- 'script',
- 'include',
- 'include_vars',
- 'add_host',
- 'group_by',
- 'set_fact',
- 'raw',
- 'meta',
- )
# if we didn't see any module in the task at all, it's not a task really
if action is None:
raise AnsibleParserError("no action detected in task", obj=self._task_ds)
From 5622fc23bc51eebde538b582b5e020c885511f31 Mon Sep 17 00:00:00 2001
From: Brian Coca
Date: Tue, 2 Jun 2015 23:34:57 -0400
Subject: [PATCH 176/971] fixed frozen set, missing iterable
---
lib/ansible/parsing/mod_args.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/lib/ansible/parsing/mod_args.py b/lib/ansible/parsing/mod_args.py
index 19a51212f7..d7cc83a905 100644
--- a/lib/ansible/parsing/mod_args.py
+++ b/lib/ansible/parsing/mod_args.py
@@ -26,7 +26,7 @@ from ansible.plugins import module_loader
from ansible.parsing.splitter import parse_kv
# For filtering out modules correctly below
-RAW_PARAM_MODULES = frozenset(
+RAW_PARAM_MODULES = ([
'command',
'shell',
'script',
@@ -37,7 +37,7 @@ RAW_PARAM_MODULES = frozenset(
'set_fact',
'raw',
'meta',
-)
+])
class ModuleArgsParser:
From 65b82f69e4456c8f6521fbec9af769092fe0b2e0 Mon Sep 17 00:00:00 2001
From: Brian Coca
Date: Tue, 2 Jun 2015 23:39:57 -0400
Subject: [PATCH 177/971] avoid failing when mode is none
---
lib/ansible/utils/path.py | 5 ++++-
1 file changed, 4 insertions(+), 1 deletion(-)
diff --git a/lib/ansible/utils/path.py b/lib/ansible/utils/path.py
index ac5160402b..b271e7ed4b 100644
--- a/lib/ansible/utils/path.py
+++ b/lib/ansible/utils/path.py
@@ -41,7 +41,10 @@ def makedirs_safe(path, mode=None):
'''Safe way to create dirs in muliprocess/thread environments'''
if not os.path.exists(path):
try:
- os.makedirs(path, mode)
+ if mode:
+ os.makedirs(path, mode)
+ else:
+ os.makedirs(path)
except OSError, e:
if e.errno != EEXIST:
raise
From 3e2e81d896067170e72ca2999fe84c1ba81b9604 Mon Sep 17 00:00:00 2001
From: Brian Coca
Date: Tue, 2 Jun 2015 23:42:00 -0400
Subject: [PATCH 178/971] missing path in import path for making paths
---
lib/ansible/plugins/connections/winrm.py | 2 +-
lib/ansible/plugins/lookup/password.py | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/lib/ansible/plugins/connections/winrm.py b/lib/ansible/plugins/connections/winrm.py
index dbdf7cd678..f16da0f6e6 100644
--- a/lib/ansible/plugins/connections/winrm.py
+++ b/lib/ansible/plugins/connections/winrm.py
@@ -44,7 +44,7 @@ from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleFileNotFound
from ansible.plugins.connections import ConnectionBase
from ansible.plugins import shell_loader
-from ansible.utils import makedirs_safe
+from ansible.utils.path import makedirs_safe
class Connection(ConnectionBase):
'''WinRM connections over HTTP/HTTPS.'''
diff --git a/lib/ansible/plugins/lookup/password.py b/lib/ansible/plugins/lookup/password.py
index 9506274e5f..47ec786429 100644
--- a/lib/ansible/plugins/lookup/password.py
+++ b/lib/ansible/plugins/lookup/password.py
@@ -30,7 +30,7 @@ from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.plugins.lookup import LookupBase
from ansible.utils.encrypt import do_encrypt
-from ansible.utils import makedirs_safe
+from ansible.utils.path import makedirs_safe
DEFAULT_LENGTH = 20
From a899f8f01655bdaca349c19e73d4e9bc0d04e095 Mon Sep 17 00:00:00 2001
From: Patrick McConnell
Date: Wed, 3 Jun 2015 07:26:18 +0200
Subject: [PATCH 179/971] Fix for task_executor on OS X
I get this exception during the setup task:
AttributeError: 'ConnectionInformation' object has no attribute 'remote_pass'
I believe it is supposed to be looking at the password attribute. Either that
or we should create a remote_pass attribute in ConnectionInformation.
---
lib/ansible/executor/task_executor.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/lib/ansible/executor/task_executor.py b/lib/ansible/executor/task_executor.py
index 9ba2b6bca5..69cbb63f47 100644
--- a/lib/ansible/executor/task_executor.py
+++ b/lib/ansible/executor/task_executor.py
@@ -380,7 +380,7 @@ class TaskExecutor:
conn_type = self._connection_info.connection
if conn_type == 'smart':
conn_type = 'ssh'
- if sys.platform.startswith('darwin') and self._connection_info.remote_pass:
+ if sys.platform.startswith('darwin') and self._connection_info.password:
# due to a current bug in sshpass on OSX, which can trigger
# a kernel panic even for non-privileged users, we revert to
# paramiko on that OS when a SSH password is specified
From 5204d7ca889e0f723c6b66eee13f3e479465fde0 Mon Sep 17 00:00:00 2001
From: Etienne CARRIERE
Date: Wed, 3 Jun 2015 08:20:26 +0200
Subject: [PATCH 180/971] Add common fonctions for F5 modules (FQ Name
functions)
---
lib/ansible/module_utils/f5.py | 18 +++++++++++++++++-
1 file changed, 17 insertions(+), 1 deletion(-)
diff --git a/lib/ansible/module_utils/f5.py b/lib/ansible/module_utils/f5.py
index 2d97662a0b..d072c759e2 100644
--- a/lib/ansible/module_utils/f5.py
+++ b/lib/ansible/module_utils/f5.py
@@ -50,7 +50,7 @@ def f5_parse_arguments(module):
module.fail_json(msg="the python bigsuds module is required")
if not module.params['validate_certs']:
disable_ssl_cert_validation()
- return (module.params['server'],module.params['user'],module.params['password'],module.params['state'],module.params['partition'])
+ return (module.params['server'],module.params['user'],module.params['password'],module.params['state'],module.params['partition'],module.params['validate_certs'])
def bigip_api(bigip, user, password):
api = bigsuds.BIGIP(hostname=bigip, username=user, password=password)
@@ -62,3 +62,19 @@ def disable_ssl_cert_validation():
import ssl
ssl._create_default_https_context = ssl._create_unverified_context
+# Fully Qualified name (with the partition)
+def fq_name(partition,name):
+ if name is None:
+ return None
+ if name[0] is '/':
+ return name
+ else:
+ return '/%s/%s' % (partition,name)
+
+# Fully Qualified name (with partition) for a list
+def fq_list_names(partition,list_names):
+ if list_names is None:
+ return None
+ return map(lambda x: fq_name(partition,x),list_names)
+
+
From f983557e7e0c23540bb4625635b84726d572227b Mon Sep 17 00:00:00 2001
From: Matt Martz
Date: Wed, 3 Jun 2015 09:51:00 -0500
Subject: [PATCH 181/971] Don't set a default on the _become FieldAttribute.
Fixes #11136
---
lib/ansible/playbook/become.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/lib/ansible/playbook/become.py b/lib/ansible/playbook/become.py
index daa8c80ba9..fca2853858 100644
--- a/lib/ansible/playbook/become.py
+++ b/lib/ansible/playbook/become.py
@@ -27,7 +27,7 @@ from ansible.playbook.attribute import Attribute, FieldAttribute
class Become:
# Privlege escalation
- _become = FieldAttribute(isa='bool', default=False)
+ _become = FieldAttribute(isa='bool')
_become_method = FieldAttribute(isa='string')
_become_user = FieldAttribute(isa='string')
_become_pass = FieldAttribute(isa='string')
From 89dceb503a171a595a68960961ac3cb098336da6 Mon Sep 17 00:00:00 2001
From: Matt Martz
Date: Wed, 3 Jun 2015 10:02:27 -0500
Subject: [PATCH 182/971] Import missing MutableMapping class
---
lib/ansible/utils/module_docs.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/lib/ansible/utils/module_docs.py b/lib/ansible/utils/module_docs.py
index 1565bb3be8..9a7ee0ae33 100644
--- a/lib/ansible/utils/module_docs.py
+++ b/lib/ansible/utils/module_docs.py
@@ -23,6 +23,7 @@ import ast
import yaml
import traceback
+from collections import MutableMapping
from ansible.plugins import fragment_loader
# modules that are ok that they do not have documentation strings
From 2e39661a26d881f1ff5991ae46e5cbf45b91cfe9 Mon Sep 17 00:00:00 2001
From: Brian Coca
Date: Wed, 3 Jun 2015 11:15:13 -0400
Subject: [PATCH 183/971] made with_ examples have explicit templating
---
docsite/rst/playbooks_loops.rst | 30 +++++++++++++++---------------
1 file changed, 15 insertions(+), 15 deletions(-)
diff --git a/docsite/rst/playbooks_loops.rst b/docsite/rst/playbooks_loops.rst
index 5456791f61..a76254a966 100644
--- a/docsite/rst/playbooks_loops.rst
+++ b/docsite/rst/playbooks_loops.rst
@@ -23,7 +23,7 @@ To save some typing, repeated tasks can be written in short-hand like so::
If you have defined a YAML list in a variables file, or the 'vars' section, you can also do::
- with_items: somelist
+ with_items: "{{somelist}}"
The above would be the equivalent of::
@@ -58,12 +58,12 @@ Loops can be nested as well::
- [ 'alice', 'bob' ]
- [ 'clientdb', 'employeedb', 'providerdb' ]
-As with the case of 'with_items' above, you can use previously defined variables. Just specify the variable's name without templating it with '{{ }}'::
+As with the case of 'with_items' above, you can use previously defined variables.::
- name: here, 'users' contains the above list of employees
mysql_user: name={{ item[0] }} priv={{ item[1] }}.*:ALL append_privs=yes password=foo
with_nested:
- - users
+ - "{{users}}"
- [ 'clientdb', 'employeedb', 'providerdb' ]
.. _looping_over_hashes:
@@ -89,7 +89,7 @@ And you want to print every user's name and phone number. You can loop through
tasks:
- name: Print phone records
debug: msg="User {{ item.key }} is {{ item.value.name }} ({{ item.value.telephone }})"
- with_dict: users
+ with_dict: "{{users}}"
.. _looping_over_fileglobs:
@@ -111,7 +111,7 @@ be used like this::
- copy: src={{ item }} dest=/etc/fooapp/ owner=root mode=600
with_fileglob:
- /playbooks/files/fooapp/*
-
+
.. note:: When using a relative path with ``with_fileglob`` in a role, Ansible resolves the path relative to the `roles//files` directory.
Looping over Parallel Sets of Data
@@ -130,21 +130,21 @@ And you want the set of '(a, 1)' and '(b, 2)' and so on. Use 'with_together' t
tasks:
- debug: msg="{{ item.0 }} and {{ item.1 }}"
with_together:
- - alpha
- - numbers
+ - "{{alpha}}"
+ - "{{numbers}}"
Looping over Subelements
````````````````````````
Suppose you want to do something like loop over a list of users, creating them, and allowing them to login by a certain set of
-SSH keys.
+SSH keys.
How might that be accomplished? Let's assume you had the following defined and loaded in via "vars_files" or maybe a "group_vars/all" file::
---
users:
- name: alice
- authorized:
+ authorized:
- /tmp/alice/onekey.pub
- /tmp/alice/twokey.pub
mysql:
@@ -171,7 +171,7 @@ How might that be accomplished? Let's assume you had the following defined and
It might happen like so::
- user: name={{ item.name }} state=present generate_ssh_key=yes
- with_items: users
+ with_items: "{{users}}"
- authorized_key: "user={{ item.0.name }} key='{{ lookup('file', item.1) }}'"
with_subelements:
@@ -329,7 +329,7 @@ Should you ever need to execute a command remotely, you would not use the above
- name: Do something with each result
shell: /usr/bin/something_else --param {{ item }}
- with_items: command_result.stdout_lines
+ with_items: "{{command_result.stdout_lines}}"
.. _indexed_lists:
@@ -345,7 +345,7 @@ It's uncommonly used::
- name: indexed loop demo
debug: msg="at array position {{ item.0 }} there is a value {{ item.1 }}"
- with_indexed_items: some_list
+ with_indexed_items: "{{some_list}}"
.. _flattening_a_list:
@@ -370,8 +370,8 @@ As you can see the formatting of packages in these lists is all over the place.
- name: flattened loop demo
yum: name={{ item }} state=installed
with_flattened:
- - packages_base
- - packages_apps
+ - "{{packages_base}}"
+ - "{{packages_apps}}"
That's how!
@@ -435,7 +435,7 @@ Subsequent loops over the registered variable to inspect the results may look li
fail:
msg: "The command ({{ item.cmd }}) did not have a 0 return code"
when: item.rc != 0
- with_items: echo.results
+ with_items: "{{echo.results}}"
.. _writing_your_own_iterators:
From d8c8ca11cfa0787bc14655439b080a9b7c4962e5 Mon Sep 17 00:00:00 2001
From: Toshio Kuratomi
Date: Wed, 3 Jun 2015 08:45:10 -0700
Subject: [PATCH 184/971] Add compatibility for old version of six (present on
rhel7)
---
lib/ansible/parsing/vault/__init__.py | 14 +++++++++++++-
1 file changed, 13 insertions(+), 1 deletion(-)
diff --git a/lib/ansible/parsing/vault/__init__.py b/lib/ansible/parsing/vault/__init__.py
index 40d02d3d59..6c2b7c9c62 100644
--- a/lib/ansible/parsing/vault/__init__.py
+++ b/lib/ansible/parsing/vault/__init__.py
@@ -36,7 +36,19 @@ from hashlib import sha256
from hashlib import md5
from binascii import hexlify
from binascii import unhexlify
-from six import binary_type, byte2int, PY2, text_type
+from six import binary_type, PY2, text_type
+
+try:
+ from six import byte2int
+except ImportError:
+ # bytes2int added in six-1.4.0
+ if PY2:
+ def byte2int(bs):
+ return ord(bs[0])
+ else:
+ import operator
+ byte2int = operator.itemgetter(0)
+
from ansible import constants as C
from ansible.utils.unicode import to_unicode, to_bytes
From c3caff5eebac3a9ccdbc242367d22d9372e77c5f Mon Sep 17 00:00:00 2001
From: Toshio Kuratomi
Date: Wed, 3 Jun 2015 10:24:35 -0700
Subject: [PATCH 185/971] Fix for six version 1.1.0 (rhel6).
---
lib/ansible/parsing/vault/__init__.py | 22 +++++++++++-----------
1 file changed, 11 insertions(+), 11 deletions(-)
diff --git a/lib/ansible/parsing/vault/__init__.py b/lib/ansible/parsing/vault/__init__.py
index 6c2b7c9c62..4cd7d2e80b 100644
--- a/lib/ansible/parsing/vault/__init__.py
+++ b/lib/ansible/parsing/vault/__init__.py
@@ -36,18 +36,18 @@ from hashlib import sha256
from hashlib import md5
from binascii import hexlify
from binascii import unhexlify
-from six import binary_type, PY2, text_type
+from six import binary_type, PY3, text_type
try:
from six import byte2int
except ImportError:
# bytes2int added in six-1.4.0
- if PY2:
- def byte2int(bs):
- return ord(bs[0])
- else:
+ if PY3:
import operator
byte2int = operator.itemgetter(0)
+ else:
+ def byte2int(bs):
+ return ord(bs[0])
from ansible import constants as C
from ansible.utils.unicode import to_unicode, to_bytes
@@ -463,10 +463,10 @@ class VaultAES(object):
while not finished:
chunk, next_chunk = next_chunk, cipher.decrypt(in_file.read(1024 * bs))
if len(next_chunk) == 0:
- if PY2:
- padding_length = ord(chunk[-1])
- else:
+ if PY3:
padding_length = chunk[-1]
+ else:
+ padding_length = ord(chunk[-1])
chunk = chunk[:-padding_length]
finished = True
@@ -608,8 +608,8 @@ class VaultAES256(object):
result = 0
for x, y in zip(a, b):
- if PY2:
- result |= ord(x) ^ ord(y)
- else:
+ if PY3:
result |= x ^ y
+ else:
+ result |= ord(x) ^ ord(y)
return result == 0
From 1c8527044bd1fff05c2a716ede98b7a49ec93d93 Mon Sep 17 00:00:00 2001
From: Toshio Kuratomi
Date: Wed, 3 Jun 2015 11:26:53 -0700
Subject: [PATCH 186/971] Fix error handling when pasing output from dynamic
inventory
---
lib/ansible/inventory/script.py | 17 ++++++++++++-----
1 file changed, 12 insertions(+), 5 deletions(-)
diff --git a/lib/ansible/inventory/script.py b/lib/ansible/inventory/script.py
index 9675d70f69..be97f5454c 100644
--- a/lib/ansible/inventory/script.py
+++ b/lib/ansible/inventory/script.py
@@ -23,6 +23,8 @@ import os
import subprocess
import sys
+from collections import Mapping
+
from ansible import constants as C
from ansible.errors import *
from ansible.inventory.host import Host
@@ -62,7 +64,16 @@ class InventoryScript:
all_hosts = {}
# not passing from_remote because data from CMDB is trusted
- self.raw = self._loader.load(self.data)
+ try:
+ self.raw = self._loader.load(self.data)
+ except Exception as e:
+ sys.stderr.write(err + "\n")
+ raise AnsibleError("failed to parse executable inventory script results: %s" % str(e))
+
+ if not isinstance(self.raw, Mapping):
+ sys.stderr.write(err + "\n")
+ raise AnsibleError("failed to parse executable inventory script results: data needs to be formatted as a json dict" )
+
self.raw = json_dict_bytes_to_unicode(self.raw)
all = Group('all')
@@ -70,10 +81,6 @@ class InventoryScript:
group = None
- if 'failed' in self.raw:
- sys.stderr.write(err + "\n")
- raise AnsibleError("failed to parse executable inventory script results: %s" % self.raw)
-
for (group_name, data) in self.raw.items():
# in Ansible 1.3 and later, a "_meta" subelement may contain
From 96836412aa2257a45730e6e133bc479040eb7d71 Mon Sep 17 00:00:00 2001
From: Toshio Kuratomi
Date: Wed, 3 Jun 2015 11:51:05 -0700
Subject: [PATCH 187/971] Make error messages tell which inventory script the
error came from
---
lib/ansible/inventory/script.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/lib/ansible/inventory/script.py b/lib/ansible/inventory/script.py
index be97f5454c..91549d78fb 100644
--- a/lib/ansible/inventory/script.py
+++ b/lib/ansible/inventory/script.py
@@ -68,11 +68,11 @@ class InventoryScript:
self.raw = self._loader.load(self.data)
except Exception as e:
sys.stderr.write(err + "\n")
- raise AnsibleError("failed to parse executable inventory script results: %s" % str(e))
+ raise AnsibleError("failed to parse executable inventory script results from {0}: {1}".format(self.filename, str(e)))
if not isinstance(self.raw, Mapping):
sys.stderr.write(err + "\n")
- raise AnsibleError("failed to parse executable inventory script results: data needs to be formatted as a json dict" )
+ raise AnsibleError("failed to parse executable inventory script results from {0}: data needs to be formatted as a json dict".format(self.filename))
self.raw = json_dict_bytes_to_unicode(self.raw)
From 9856a8f674a4590fd461eba938ff3cb8eb872994 Mon Sep 17 00:00:00 2001
From: Brian Coca
Date: Wed, 3 Jun 2015 14:56:01 -0400
Subject: [PATCH 188/971] added missing imports to doc module
---
lib/ansible/utils/module_docs.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/lib/ansible/utils/module_docs.py b/lib/ansible/utils/module_docs.py
index 9a7ee0ae33..e296c0c698 100644
--- a/lib/ansible/utils/module_docs.py
+++ b/lib/ansible/utils/module_docs.py
@@ -23,7 +23,7 @@ import ast
import yaml
import traceback
-from collections import MutableMapping
+from collections import MutableMapping, MutableSet, MutableSequence
from ansible.plugins import fragment_loader
# modules that are ok that they do not have documentation strings
From 94fa5e879484b988036a2e12c0a3bf1b3e7a351e Mon Sep 17 00:00:00 2001
From: Etienne CARRIERE
Date: Wed, 3 Jun 2015 21:19:11 +0200
Subject: [PATCH 189/971] Simplify Fully Qualified function
---
lib/ansible/module_utils/f5.py | 7 ++-----
1 file changed, 2 insertions(+), 5 deletions(-)
diff --git a/lib/ansible/module_utils/f5.py b/lib/ansible/module_utils/f5.py
index d072c759e2..097a6370af 100644
--- a/lib/ansible/module_utils/f5.py
+++ b/lib/ansible/module_utils/f5.py
@@ -64,12 +64,9 @@ def disable_ssl_cert_validation():
# Fully Qualified name (with the partition)
def fq_name(partition,name):
- if name is None:
- return None
- if name[0] is '/':
- return name
- else:
+ if name is not None and not name.startswith('/'):
return '/%s/%s' % (partition,name)
+ return name
# Fully Qualified name (with partition) for a list
def fq_list_names(partition,list_names):
From c89f98168d0ba87c54bbc978928cb2d4f54afef2 Mon Sep 17 00:00:00 2001
From: Matt Martz
Date: Wed, 3 Jun 2015 14:53:19 -0500
Subject: [PATCH 190/971] Add the hacking directory to v1
---
v1/hacking/README.md | 48 ++++
v1/hacking/authors.sh | 14 ++
v1/hacking/env-setup | 78 ++++++
v1/hacking/env-setup.fish | 67 +++++
v1/hacking/get_library.py | 29 +++
v1/hacking/module_formatter.py | 447 +++++++++++++++++++++++++++++++++
v1/hacking/templates/rst.j2 | 211 ++++++++++++++++
v1/hacking/test-module | 193 ++++++++++++++
v1/hacking/update.sh | 3 +
9 files changed, 1090 insertions(+)
create mode 100644 v1/hacking/README.md
create mode 100755 v1/hacking/authors.sh
create mode 100644 v1/hacking/env-setup
create mode 100644 v1/hacking/env-setup.fish
create mode 100755 v1/hacking/get_library.py
create mode 100755 v1/hacking/module_formatter.py
create mode 100644 v1/hacking/templates/rst.j2
create mode 100755 v1/hacking/test-module
create mode 100755 v1/hacking/update.sh
diff --git a/v1/hacking/README.md b/v1/hacking/README.md
new file mode 100644
index 0000000000..ae8db7e3a9
--- /dev/null
+++ b/v1/hacking/README.md
@@ -0,0 +1,48 @@
+'Hacking' directory tools
+=========================
+
+Env-setup
+---------
+
+The 'env-setup' script modifies your environment to allow you to run
+ansible from a git checkout using python 2.6+. (You may not use
+python 3 at this time).
+
+First, set up your environment to run from the checkout:
+
+ $ source ./hacking/env-setup
+
+You will need some basic prerequisites installed. If you do not already have them
+and do not wish to install them from your operating system package manager, you
+can install them from pip
+
+ $ easy_install pip # if pip is not already available
+ $ pip install pyyaml jinja2 nose passlib pycrypto
+
+From there, follow ansible instructions on docs.ansible.com as normal.
+
+Test-module
+-----------
+
+'test-module' is a simple program that allows module developers (or testers) to run
+a module outside of the ansible program, locally, on the current machine.
+
+Example:
+
+ $ ./hacking/test-module -m lib/ansible/modules/core/commands/shell -a "echo hi"
+
+This is a good way to insert a breakpoint into a module, for instance.
+
+Module-formatter
+----------------
+
+The module formatter is a script used to generate manpages and online
+module documentation. This is used by the system makefiles and rarely
+needs to be run directly.
+
+Authors
+-------
+'authors' is a simple script that generates a list of everyone who has
+contributed code to the ansible repository.
+
+
diff --git a/v1/hacking/authors.sh b/v1/hacking/authors.sh
new file mode 100755
index 0000000000..7c97840b2f
--- /dev/null
+++ b/v1/hacking/authors.sh
@@ -0,0 +1,14 @@
+#!/bin/sh
+# script from http://stackoverflow.com/questions/12133583
+set -e
+
+# Get a list of authors ordered by number of commits
+# and remove the commit count column
+AUTHORS=$(git --no-pager shortlog -nse | cut -f 2- | sort -f)
+if [ -z "$AUTHORS" ] ; then
+ echo "Authors list was empty"
+ exit 1
+fi
+
+# Display the authors list and write it to the file
+echo "$AUTHORS" | tee "$(git rev-parse --show-toplevel)/AUTHORS.TXT"
diff --git a/v1/hacking/env-setup b/v1/hacking/env-setup
new file mode 100644
index 0000000000..29f4828410
--- /dev/null
+++ b/v1/hacking/env-setup
@@ -0,0 +1,78 @@
+# usage: source hacking/env-setup [-q]
+# modifies environment for running Ansible from checkout
+
+# Default values for shell variables we use
+PYTHONPATH=${PYTHONPATH-""}
+PATH=${PATH-""}
+MANPATH=${MANPATH-""}
+verbosity=${1-info} # Defaults to `info' if unspecified
+
+if [ "$verbosity" = -q ]; then
+ verbosity=silent
+fi
+
+# When run using source as directed, $0 gets set to bash, so we must use $BASH_SOURCE
+if [ -n "$BASH_SOURCE" ] ; then
+ HACKING_DIR=$(dirname "$BASH_SOURCE")
+elif [ $(basename -- "$0") = "env-setup" ]; then
+ HACKING_DIR=$(dirname "$0")
+# Works with ksh93 but not pdksh
+elif [ -n "$KSH_VERSION" ] && echo $KSH_VERSION | grep -qv '^@(#)PD KSH'; then
+ HACKING_DIR=$(dirname "${.sh.file}")
+else
+ HACKING_DIR="$PWD/hacking"
+fi
+# The below is an alternative to readlink -fn which doesn't exist on OS X
+# Source: http://stackoverflow.com/a/1678636
+FULL_PATH=$(python -c "import os; print(os.path.realpath('$HACKING_DIR'))")
+ANSIBLE_HOME=$(dirname "$FULL_PATH")
+
+PREFIX_PYTHONPATH="$ANSIBLE_HOME"
+PREFIX_PATH="$ANSIBLE_HOME/bin"
+PREFIX_MANPATH="$ANSIBLE_HOME/docs/man"
+
+expr "$PYTHONPATH" : "${PREFIX_PYTHONPATH}.*" > /dev/null || export PYTHONPATH="$PREFIX_PYTHONPATH:$PYTHONPATH"
+expr "$PATH" : "${PREFIX_PATH}.*" > /dev/null || export PATH="$PREFIX_PATH:$PATH"
+expr "$MANPATH" : "${PREFIX_MANPATH}.*" > /dev/null || export MANPATH="$PREFIX_MANPATH:$MANPATH"
+
+#
+# Generate egg_info so that pkg_resources works
+#
+
+# Do the work in a function so we don't repeat ourselves later
+gen_egg_info()
+{
+ if [ -e "$PREFIX_PYTHONPATH/ansible.egg-info" ] ; then
+ rm -r "$PREFIX_PYTHONPATH/ansible.egg-info"
+ fi
+ python setup.py egg_info
+}
+
+if [ "$ANSIBLE_HOME" != "$PWD" ] ; then
+ current_dir="$PWD"
+else
+ current_dir="$ANSIBLE_HOME"
+fi
+cd "$ANSIBLE_HOME"
+if [ "$verbosity" = silent ] ; then
+ gen_egg_info > /dev/null 2>&1
+else
+ gen_egg_info
+fi
+cd "$current_dir"
+
+if [ "$verbosity" != silent ] ; then
+ cat <<- EOF
+
+ Setting up Ansible to run out of checkout...
+
+ PATH=$PATH
+ PYTHONPATH=$PYTHONPATH
+ MANPATH=$MANPATH
+
+ Remember, you may wish to specify your host file with -i
+
+ Done!
+
+ EOF
+fi
diff --git a/v1/hacking/env-setup.fish b/v1/hacking/env-setup.fish
new file mode 100644
index 0000000000..9deffb4e3d
--- /dev/null
+++ b/v1/hacking/env-setup.fish
@@ -0,0 +1,67 @@
+#!/usr/bin/env fish
+# usage: . ./hacking/env-setup [-q]
+# modifies environment for running Ansible from checkout
+set HACKING_DIR (dirname (status -f))
+set FULL_PATH (python -c "import os; print(os.path.realpath('$HACKING_DIR'))")
+set ANSIBLE_HOME (dirname $FULL_PATH)
+set PREFIX_PYTHONPATH $ANSIBLE_HOME/
+set PREFIX_PATH $ANSIBLE_HOME/bin
+set PREFIX_MANPATH $ANSIBLE_HOME/docs/man
+
+# Set PYTHONPATH
+if not set -q PYTHONPATH
+ set -gx PYTHONPATH $PREFIX_PYTHONPATH
+else
+ switch PYTHONPATH
+ case "$PREFIX_PYTHONPATH*"
+ case "*"
+ echo "Appending PYTHONPATH"
+ set -gx PYTHONPATH "$PREFIX_PYTHONPATH:$PYTHONPATH"
+ end
+end
+
+# Set PATH
+if not contains $PREFIX_PATH $PATH
+ set -gx PATH $PREFIX_PATH $PATH
+end
+
+# Set MANPATH
+if not contains $PREFIX_MANPATH $MANPATH
+ if not set -q MANPATH
+ set -gx MANPATH $PREFIX_MANPATH
+ else
+ set -gx MANPATH $PREFIX_MANPATH $MANPATH
+ end
+end
+
+set -gx ANSIBLE_LIBRARY $ANSIBLE_HOME/library
+
+# Generate egg_info so that pkg_resources works
+pushd $ANSIBLE_HOME
+python setup.py egg_info
+if test -e $PREFIX_PYTHONPATH/ansible*.egg-info
+ rm -r $PREFIX_PYTHONPATH/ansible*.egg-info
+end
+mv ansible*egg-info $PREFIX_PYTHONPATH
+popd
+
+
+if set -q argv
+ switch $argv
+ case '-q' '--quiet'
+ case '*'
+ echo ""
+ echo "Setting up Ansible to run out of checkout..."
+ echo ""
+ echo "PATH=$PATH"
+ echo "PYTHONPATH=$PYTHONPATH"
+ echo "ANSIBLE_LIBRARY=$ANSIBLE_LIBRARY"
+ echo "MANPATH=$MANPATH"
+ echo ""
+
+ echo "Remember, you may wish to specify your host file with -i"
+ echo ""
+ echo "Done!"
+ echo ""
+ end
+end
diff --git a/v1/hacking/get_library.py b/v1/hacking/get_library.py
new file mode 100755
index 0000000000..571183b688
--- /dev/null
+++ b/v1/hacking/get_library.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python
+
+# (c) 2014, Will Thames
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see .
+#
+
+import ansible.constants as C
+import sys
+
+def main():
+ print C.DEFAULT_MODULE_PATH
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/v1/hacking/module_formatter.py b/v1/hacking/module_formatter.py
new file mode 100755
index 0000000000..acddd70093
--- /dev/null
+++ b/v1/hacking/module_formatter.py
@@ -0,0 +1,447 @@
+#!/usr/bin/env python
+# (c) 2012, Jan-Piet Mens
+# (c) 2012-2014, Michael DeHaan and others
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see .
+#
+
+import os
+import glob
+import sys
+import yaml
+import codecs
+import json
+import ast
+import re
+import optparse
+import time
+import datetime
+import subprocess
+import cgi
+from jinja2 import Environment, FileSystemLoader
+
+from ansible.utils import module_docs
+from ansible.utils.vars import merge_hash
+
+#####################################################################################
+# constants and paths
+
+# if a module is added in a version of Ansible older than this, don't print the version added information
+# in the module documentation because everyone is assumed to be running something newer than this already.
+TO_OLD_TO_BE_NOTABLE = 1.0
+
+# Get parent directory of the directory this script lives in
+MODULEDIR=os.path.abspath(os.path.join(
+ os.path.dirname(os.path.realpath(__file__)), os.pardir, 'lib', 'ansible', 'modules'
+))
+
+# The name of the DOCUMENTATION template
+EXAMPLE_YAML=os.path.abspath(os.path.join(
+ os.path.dirname(os.path.realpath(__file__)), os.pardir, 'examples', 'DOCUMENTATION.yml'
+))
+
+_ITALIC = re.compile(r"I\(([^)]+)\)")
+_BOLD = re.compile(r"B\(([^)]+)\)")
+_MODULE = re.compile(r"M\(([^)]+)\)")
+_URL = re.compile(r"U\(([^)]+)\)")
+_CONST = re.compile(r"C\(([^)]+)\)")
+
+DEPRECATED = " (D)"
+NOTCORE = " (E)"
+#####################################################################################
+
+def rst_ify(text):
+ ''' convert symbols like I(this is in italics) to valid restructured text '''
+
+ t = _ITALIC.sub(r'*' + r"\1" + r"*", text)
+ t = _BOLD.sub(r'**' + r"\1" + r"**", t)
+ t = _MODULE.sub(r':ref:`' + r"\1 <\1>" + r"`", t)
+ t = _URL.sub(r"\1", t)
+ t = _CONST.sub(r'``' + r"\1" + r"``", t)
+
+ return t
+
+#####################################################################################
+
+def html_ify(text):
+ ''' convert symbols like I(this is in italics) to valid HTML '''
+
+ t = cgi.escape(text)
+ t = _ITALIC.sub("" + r"\1" + "", t)
+ t = _BOLD.sub("" + r"\1" + "", t)
+ t = _MODULE.sub("" + r"\1" + "", t)
+ t = _URL.sub("" + r"\1" + "", t)
+ t = _CONST.sub("" + r"\1" + "
", t)
+
+ return t
+
+
+#####################################################################################
+
+def rst_fmt(text, fmt):
+ ''' helper for Jinja2 to do format strings '''
+
+ return fmt % (text)
+
+#####################################################################################
+
+def rst_xline(width, char="="):
+ ''' return a restructured text line of a given length '''
+
+ return char * width
+
+#####################################################################################
+
+def write_data(text, options, outputname, module):
+ ''' dumps module output to a file or the screen, as requested '''
+
+ if options.output_dir is not None:
+ fname = os.path.join(options.output_dir, outputname % module)
+ fname = fname.replace(".py","")
+ f = open(fname, 'w')
+ f.write(text.encode('utf-8'))
+ f.close()
+ else:
+ print text
+
+#####################################################################################
+
+
+def list_modules(module_dir, depth=0):
+ ''' returns a hash of categories, each category being a hash of module names to file paths '''
+
+ categories = dict(all=dict(),_aliases=dict())
+ if depth <= 3: # limit # of subdirs
+
+ files = glob.glob("%s/*" % module_dir)
+ for d in files:
+
+ category = os.path.splitext(os.path.basename(d))[0]
+ if os.path.isdir(d):
+
+ res = list_modules(d, depth + 1)
+ for key in res.keys():
+ if key in categories:
+ categories[key] = merge_hash(categories[key], res[key])
+ res.pop(key, None)
+
+ if depth < 2:
+ categories.update(res)
+ else:
+ category = module_dir.split("/")[-1]
+ if not category in categories:
+ categories[category] = res
+ else:
+ categories[category].update(res)
+ else:
+ module = category
+ category = os.path.basename(module_dir)
+ if not d.endswith(".py") or d.endswith('__init__.py'):
+ # windows powershell modules have documentation stubs in python docstring
+ # format (they are not executed) so skip the ps1 format files
+ continue
+ elif module.startswith("_") and os.path.islink(d):
+ source = os.path.splitext(os.path.basename(os.path.realpath(d)))[0]
+ module = module.replace("_","",1)
+ if not d in categories['_aliases']:
+ categories['_aliases'][source] = [module]
+ else:
+ categories['_aliases'][source].update(module)
+ continue
+
+ if not category in categories:
+ categories[category] = {}
+ categories[category][module] = d
+ categories['all'][module] = d
+
+ return categories
+
+#####################################################################################
+
+def generate_parser():
+ ''' generate an optparse parser '''
+
+ p = optparse.OptionParser(
+ version='%prog 1.0',
+ usage='usage: %prog [options] arg1 arg2',
+ description='Generate module documentation from metadata',
+ )
+
+ p.add_option("-A", "--ansible-version", action="store", dest="ansible_version", default="unknown", help="Ansible version number")
+ p.add_option("-M", "--module-dir", action="store", dest="module_dir", default=MODULEDIR, help="Ansible library path")
+ p.add_option("-T", "--template-dir", action="store", dest="template_dir", default="hacking/templates", help="directory containing Jinja2 templates")
+ p.add_option("-t", "--type", action='store', dest='type', choices=['rst'], default='rst', help="Document type")
+ p.add_option("-v", "--verbose", action='store_true', default=False, help="Verbose")
+ p.add_option("-o", "--output-dir", action="store", dest="output_dir", default=None, help="Output directory for module files")
+ p.add_option("-I", "--includes-file", action="store", dest="includes_file", default=None, help="Create a file containing list of processed modules")
+ p.add_option('-V', action='version', help='Show version number and exit')
+ return p
+
+#####################################################################################
+
+def jinja2_environment(template_dir, typ):
+
+ env = Environment(loader=FileSystemLoader(template_dir),
+ variable_start_string="@{",
+ variable_end_string="}@",
+ trim_blocks=True,
+ )
+ env.globals['xline'] = rst_xline
+
+ if typ == 'rst':
+ env.filters['convert_symbols_to_format'] = rst_ify
+ env.filters['html_ify'] = html_ify
+ env.filters['fmt'] = rst_fmt
+ env.filters['xline'] = rst_xline
+ template = env.get_template('rst.j2')
+ outputname = "%s_module.rst"
+ else:
+ raise Exception("unknown module format type: %s" % typ)
+
+ return env, template, outputname
+
+#####################################################################################
+
+def process_module(module, options, env, template, outputname, module_map, aliases):
+
+ fname = module_map[module]
+ if isinstance(fname, dict):
+ return "SKIPPED"
+
+ basename = os.path.basename(fname)
+ deprecated = False
+
+ # ignore files with extensions
+ if not basename.endswith(".py"):
+ return
+ elif module.startswith("_"):
+ if os.path.islink(fname):
+ return # ignore, its an alias
+ deprecated = True
+ module = module.replace("_","",1)
+
+ print "rendering: %s" % module
+
+ # use ansible core library to parse out doc metadata YAML and plaintext examples
+ doc, examples, returndocs = module_docs.get_docstring(fname, verbose=options.verbose)
+
+ # crash if module is missing documentation and not explicitly hidden from docs index
+ if doc is None:
+ if module in module_docs.BLACKLIST_MODULES:
+ return "SKIPPED"
+ else:
+ sys.stderr.write("*** ERROR: MODULE MISSING DOCUMENTATION: %s, %s ***\n" % (fname, module))
+ sys.exit(1)
+
+ if deprecated and 'deprecated' not in doc:
+ sys.stderr.write("*** ERROR: DEPRECATED MODULE MISSING 'deprecated' DOCUMENTATION: %s, %s ***\n" % (fname, module))
+ sys.exit(1)
+
+ if "/core/" in fname:
+ doc['core'] = True
+ else:
+ doc['core'] = False
+
+ if module in aliases:
+ doc['aliases'] = aliases[module]
+
+ all_keys = []
+
+ if not 'version_added' in doc:
+ sys.stderr.write("*** ERROR: missing version_added in: %s ***\n" % module)
+ sys.exit(1)
+
+ added = 0
+ if doc['version_added'] == 'historical':
+ del doc['version_added']
+ else:
+ added = doc['version_added']
+
+ # don't show version added information if it's too old to be called out
+ if added:
+ added_tokens = str(added).split(".")
+ added = added_tokens[0] + "." + added_tokens[1]
+ added_float = float(added)
+ if added and added_float < TO_OLD_TO_BE_NOTABLE:
+ del doc['version_added']
+
+ if 'options' in doc:
+ for (k,v) in doc['options'].iteritems():
+ all_keys.append(k)
+
+ all_keys = sorted(all_keys)
+
+ doc['option_keys'] = all_keys
+ doc['filename'] = fname
+ doc['docuri'] = doc['module'].replace('_', '-')
+ doc['now_date'] = datetime.date.today().strftime('%Y-%m-%d')
+ doc['ansible_version'] = options.ansible_version
+ doc['plainexamples'] = examples #plain text
+ if returndocs:
+ doc['returndocs'] = yaml.safe_load(returndocs)
+ else:
+ doc['returndocs'] = None
+
+ # here is where we build the table of contents...
+
+ text = template.render(doc)
+ write_data(text, options, outputname, module)
+ return doc['short_description']
+
+#####################################################################################
+
+def print_modules(module, category_file, deprecated, core, options, env, template, outputname, module_map, aliases):
+ modstring = module
+ modname = module
+ if module in deprecated:
+ modstring = modstring + DEPRECATED
+ modname = "_" + module
+ elif module not in core:
+ modstring = modstring + NOTCORE
+
+ result = process_module(modname, options, env, template, outputname, module_map, aliases)
+
+ if result != "SKIPPED":
+ category_file.write(" %s - %s <%s_module>\n" % (modstring, result, module))
+
+def process_category(category, categories, options, env, template, outputname):
+
+ module_map = categories[category]
+
+ aliases = {}
+ if '_aliases' in categories:
+ aliases = categories['_aliases']
+
+ category_file_path = os.path.join(options.output_dir, "list_of_%s_modules.rst" % category)
+ category_file = open(category_file_path, "w")
+ print "*** recording category %s in %s ***" % (category, category_file_path)
+
+ # TODO: start a new category file
+
+ category = category.replace("_"," ")
+ category = category.title()
+
+ modules = []
+ deprecated = []
+ core = []
+ for module in module_map.keys():
+
+ if isinstance(module_map[module], dict):
+ for mod in module_map[module].keys():
+ if mod.startswith("_"):
+ mod = mod.replace("_","",1)
+ deprecated.append(mod)
+ elif '/core/' in module_map[module][mod]:
+ core.append(mod)
+ else:
+ if module.startswith("_"):
+ module = module.replace("_","",1)
+ deprecated.append(module)
+ elif '/core/' in module_map[module]:
+ core.append(module)
+
+ modules.append(module)
+
+ modules.sort()
+
+ category_header = "%s Modules" % (category.title())
+ underscores = "`" * len(category_header)
+
+ category_file.write("""\
+%s
+%s
+
+.. toctree:: :maxdepth: 1
+
+""" % (category_header, underscores))
+ sections = []
+ for module in modules:
+ if module in module_map and isinstance(module_map[module], dict):
+ sections.append(module)
+ continue
+ else:
+ print_modules(module, category_file, deprecated, core, options, env, template, outputname, module_map, aliases)
+
+ sections.sort()
+ for section in sections:
+ category_file.write("\n%s\n%s\n\n" % (section.replace("_"," ").title(),'-' * len(section)))
+ category_file.write(".. toctree:: :maxdepth: 1\n\n")
+
+ section_modules = module_map[section].keys()
+ section_modules.sort()
+ #for module in module_map[section]:
+ for module in section_modules:
+ print_modules(module, category_file, deprecated, core, options, env, template, outputname, module_map[section], aliases)
+
+ category_file.write("""\n\n
+.. note::
+ - %s: This marks a module as deprecated, which means a module is kept for backwards compatibility but usage is discouraged. The module documentation details page may explain more about this rationale.
+ - %s: This marks a module as 'extras', which means it ships with ansible but may be a newer module and possibly (but not necessarily) less actively maintained than 'core' modules.
+ - Tickets filed on modules are filed to different repos than those on the main open source project. Core module tickets should be filed at `ansible/ansible-modules-core on GitHub `_, extras tickets to `ansible/ansible-modules-extras on GitHub `_
+""" % (DEPRECATED, NOTCORE))
+ category_file.close()
+
+ # TODO: end a new category file
+
+#####################################################################################
+
+def validate_options(options):
+ ''' validate option parser options '''
+
+ if not options.module_dir:
+ print >>sys.stderr, "--module-dir is required"
+ sys.exit(1)
+ if not os.path.exists(options.module_dir):
+ print >>sys.stderr, "--module-dir does not exist: %s" % options.module_dir
+ sys.exit(1)
+ if not options.template_dir:
+ print "--template-dir must be specified"
+ sys.exit(1)
+
+#####################################################################################
+
+def main():
+
+ p = generate_parser()
+
+ (options, args) = p.parse_args()
+ validate_options(options)
+
+ env, template, outputname = jinja2_environment(options.template_dir, options.type)
+
+ categories = list_modules(options.module_dir)
+ last_category = None
+ category_names = categories.keys()
+ category_names.sort()
+
+ category_list_path = os.path.join(options.output_dir, "modules_by_category.rst")
+ category_list_file = open(category_list_path, "w")
+ category_list_file.write("Module Index\n")
+ category_list_file.write("============\n")
+ category_list_file.write("\n\n")
+ category_list_file.write(".. toctree::\n")
+ category_list_file.write(" :maxdepth: 1\n\n")
+
+ for category in category_names:
+ if category.startswith("_"):
+ continue
+ category_list_file.write(" list_of_%s_modules\n" % category)
+ process_category(category, categories, options, env, template, outputname)
+
+ category_list_file.close()
+
+if __name__ == '__main__':
+ main()
diff --git a/v1/hacking/templates/rst.j2 b/v1/hacking/templates/rst.j2
new file mode 100644
index 0000000000..f6f38e5910
--- /dev/null
+++ b/v1/hacking/templates/rst.j2
@@ -0,0 +1,211 @@
+.. _@{ module }@:
+
+{% if short_description %}
+{% set title = module + ' - ' + short_description|convert_symbols_to_format %}
+{% else %}
+{% set title = module %}
+{% endif %}
+{% set title_len = title|length %}
+
+@{ title }@
+@{ '+' * title_len }@
+
+.. contents::
+ :local:
+ :depth: 1
+
+{# ------------------------------------------
+ #
+ # Please note: this looks like a core dump
+ # but it isn't one.
+ #
+ --------------------------------------------#}
+
+{% if aliases is defined -%}
+Aliases: @{ ','.join(aliases) }@
+{% endif %}
+
+{% if deprecated is defined -%}
+DEPRECATED
+----------
+
+@{ deprecated }@
+{% endif %}
+
+Synopsis
+--------
+
+{% if version_added is defined -%}
+.. versionadded:: @{ version_added }@
+{% endif %}
+
+{% for desc in description -%}
+@{ desc | convert_symbols_to_format }@
+{% endfor %}
+
+{% if options -%}
+Options
+-------
+
+.. raw:: html
+
+
+
+ parameter |
+ required |
+ default |
+ choices |
+ comments |
+
+ {% for k in option_keys %}
+ {% set v = options[k] %}
+
+ @{ k }@ |
+ {% if v.get('required', False) %}yes{% else %}no{% endif %} |
+ {% if v['default'] %}@{ v['default'] }@{% endif %} |
+ {% if v.get('type', 'not_bool') == 'bool' %}
+ |
+ {% else %}
+ {% for choice in v.get('choices',[]) -%}- @{ choice }@
{% endfor -%} |
+ {% endif %}
+ {% for desc in v.description -%}@{ desc | html_ify }@{% endfor -%}{% if v['version_added'] %} (added in Ansible @{v['version_added']}@){% endif %} |
+
+ {% endfor %}
+
+{% endif %}
+
+{% if requirements %}
+{% for req in requirements %}
+
+.. note:: Requires @{ req | convert_symbols_to_format }@
+
+{% endfor %}
+{% endif %}
+
+{% if examples or plainexamples %}
+Examples
+--------
+
+.. raw:: html
+
+{% for example in examples %}
+ {% if example['description'] %}@{ example['description'] | html_ify }@
{% endif %}
+
+
+@{ example['code'] | escape | indent(4, True) }@
+
+
+{% endfor %}
+
+
+{% if plainexamples %}
+
+::
+
+@{ plainexamples | indent(4, True) }@
+{% endif %}
+{% endif %}
+
+
+{% if returndocs %}
+Return Values
+-------------
+
+Common return values are documented here :doc:`common_return_values`, the following are the fields unique to this module:
+
+.. raw:: html
+
+
+
+ name |
+ description |
+ returned |
+ type |
+ sample |
+
+
+ {% for entry in returndocs %}
+
+ @{ entry }@ |
+ @{ returndocs[entry].description }@ |
+ @{ returndocs[entry].returned }@ |
+ @{ returndocs[entry].type }@ |
+ @{ returndocs[entry].sample}@ |
+
+ {% if returndocs[entry].type == 'dictionary' %}
+ contains: |
+
+
+
+ name |
+ description |
+ returned |
+ type |
+ sample |
+
+
+ {% for sub in returndocs[entry].contains %}
+
+ @{ sub }@ |
+ @{ returndocs[entry].contains[sub].description }@ |
+ @{ returndocs[entry].contains[sub].returned }@ |
+ @{ returndocs[entry].contains[sub].type }@ |
+ @{ returndocs[entry].contains[sub].sample}@ |
+
+ {% endfor %}
+
+
+ |
+
+ {% endif %}
+ {% endfor %}
+
+
+
+{% endif %}
+
+{% if notes %}
+{% for note in notes %}
+.. note:: @{ note | convert_symbols_to_format }@
+{% endfor %}
+{% endif %}
+
+
+{% if not deprecated %}
+ {% if core %}
+
+This is a Core Module
+---------------------
+
+The source of this module is hosted on GitHub in the `ansible-modules-core `_ repo.
+
+If you believe you have found a bug in this module, and are already running the latest stable or development version of Ansible, first look in the `issue tracker at github.com/ansible/ansible-modules-core `_ to see if a bug has already been filed. If not, we would be grateful if you would file one.
+
+Should you have a question rather than a bug report, inquries are welcome on the `ansible-project google group `_ or on Ansible's "#ansible" channel, located on irc.freenode.net. Development oriented topics should instead use the similar `ansible-devel google group `_.
+
+Documentation updates for this module can also be edited directly by submitting a pull request to the module source code, just look for the "DOCUMENTATION" block in the source tree.
+
+This is a "core" ansible module, which means it will receive slightly higher priority for all requests than those in the "extras" repos.
+
+ {% else %}
+
+This is an Extras Module
+------------------------
+
+This source of this module is hosted on GitHub in the `ansible-modules-extras `_ repo.
+
+If you believe you have found a bug in this module, and are already running the latest stable or development version of Ansible, first look in the `issue tracker at github.com/ansible/ansible-modules-extras `_ to see if a bug has already been filed. If not, we would be grateful if you would file one.
+
+Should you have a question rather than a bug report, inquries are welcome on the `ansible-project google group `_ or on Ansible's "#ansible" channel, located on irc.freenode.net. Development oriented topics should instead use the similar `ansible-devel google group `_.
+
+Documentation updates for this module can also be edited directly by submitting a pull request to the module source code, just look for the "DOCUMENTATION" block in the source tree.
+
+Note that this module is designated a "extras" module. Non-core modules are still fully usable, but may receive slightly lower response rates for issues and pull requests.
+Popular "extras" modules may be promoted to core modules over time.
+
+ {% endif %}
+{% endif %}
+
+For help in developing on modules, should you be so inclined, please read :doc:`community`, :doc:`developing_test_pr` and :doc:`developing_modules`.
+
+
diff --git a/v1/hacking/test-module b/v1/hacking/test-module
new file mode 100755
index 0000000000..c226f32e88
--- /dev/null
+++ b/v1/hacking/test-module
@@ -0,0 +1,193 @@
+#!/usr/bin/env python
+
+# (c) 2012, Michael DeHaan
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see .
+#
+
+# this script is for testing modules without running through the
+# entire guts of ansible, and is very helpful for when developing
+# modules
+#
+# example:
+# test-module -m ../library/commands/command -a "/bin/sleep 3"
+# test-module -m ../library/system/service -a "name=httpd ensure=restarted"
+# test-module -m ../library/system/service -a "name=httpd ensure=restarted" --debugger /usr/bin/pdb
+# test-modulr -m ../library/file/lineinfile -a "dest=/etc/exports line='/srv/home hostname1(rw,sync)'" --check
+
+import sys
+import base64
+import os
+import subprocess
+import traceback
+import optparse
+import ansible.utils as utils
+import ansible.module_common as module_common
+import ansible.constants as C
+
+try:
+ import json
+except ImportError:
+ import simplejson as json
+
+def parse():
+ """parse command line
+
+ :return : (options, args)"""
+ parser = optparse.OptionParser()
+
+ parser.usage = "%prog -[options] (-h for help)"
+
+ parser.add_option('-m', '--module-path', dest='module_path',
+ help="REQUIRED: full path of module source to execute")
+ parser.add_option('-a', '--args', dest='module_args', default="",
+ help="module argument string")
+ parser.add_option('-D', '--debugger', dest='debugger',
+ help="path to python debugger (e.g. /usr/bin/pdb)")
+ parser.add_option('-I', '--interpreter', dest='interpreter',
+ help="path to interpreter to use for this module (e.g. ansible_python_interpreter=/usr/bin/python)",
+ metavar='INTERPRETER_TYPE=INTERPRETER_PATH')
+ parser.add_option('-c', '--check', dest='check', action='store_true',
+ help="run the module in check mode")
+ options, args = parser.parse_args()
+ if not options.module_path:
+ parser.print_help()
+ sys.exit(1)
+ else:
+ return options, args
+
+def write_argsfile(argstring, json=False):
+ """ Write args to a file for old-style module's use. """
+ argspath = os.path.expanduser("~/.ansible_test_module_arguments")
+ argsfile = open(argspath, 'w')
+ if json:
+ args = utils.parse_kv(argstring)
+ argstring = utils.jsonify(args)
+ argsfile.write(argstring)
+ argsfile.close()
+ return argspath
+
+def boilerplate_module(modfile, args, interpreter, check):
+ """ simulate what ansible does with new style modules """
+
+ #module_fh = open(modfile)
+ #module_data = module_fh.read()
+ #module_fh.close()
+
+ replacer = module_common.ModuleReplacer()
+
+ #included_boilerplate = module_data.find(module_common.REPLACER) != -1 or module_data.find("import ansible.module_utils") != -1
+
+ complex_args = {}
+ if args.startswith("@"):
+ # Argument is a YAML file (JSON is a subset of YAML)
+ complex_args = utils.combine_vars(complex_args, utils.parse_yaml_from_file(args[1:]))
+ args=''
+ elif args.startswith("{"):
+ # Argument is a YAML document (not a file)
+ complex_args = utils.combine_vars(complex_args, utils.parse_yaml(args))
+ args=''
+
+ inject = {}
+ if interpreter:
+ if '=' not in interpreter:
+ print 'interpreter must by in the form of ansible_python_interpreter=/usr/bin/python'
+ sys.exit(1)
+ interpreter_type, interpreter_path = interpreter.split('=')
+ if not interpreter_type.startswith('ansible_'):
+ interpreter_type = 'ansible_%s' % interpreter_type
+ if not interpreter_type.endswith('_interpreter'):
+ interpreter_type = '%s_interpreter' % interpreter_type
+ inject[interpreter_type] = interpreter_path
+
+ if check:
+ complex_args['CHECKMODE'] = True
+
+ (module_data, module_style, shebang) = replacer.modify_module(
+ modfile,
+ complex_args,
+ args,
+ inject
+ )
+
+ modfile2_path = os.path.expanduser("~/.ansible_module_generated")
+ print "* including generated source, if any, saving to: %s" % modfile2_path
+ print "* this may offset any line numbers in tracebacks/debuggers!"
+ modfile2 = open(modfile2_path, 'w')
+ modfile2.write(module_data)
+ modfile2.close()
+ modfile = modfile2_path
+
+ return (modfile2_path, module_style)
+
+def runtest( modfile, argspath):
+ """Test run a module, piping it's output for reporting."""
+
+ os.system("chmod +x %s" % modfile)
+
+ invoke = "%s" % (modfile)
+ if argspath is not None:
+ invoke = "%s %s" % (modfile, argspath)
+
+ cmd = subprocess.Popen(invoke, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ (out, err) = cmd.communicate()
+
+ try:
+ print "***********************************"
+ print "RAW OUTPUT"
+ print out
+ print err
+ results = utils.parse_json(out)
+ except:
+ print "***********************************"
+ print "INVALID OUTPUT FORMAT"
+ print out
+ traceback.print_exc()
+ sys.exit(1)
+
+ print "***********************************"
+ print "PARSED OUTPUT"
+ print utils.jsonify(results,format=True)
+
+def rundebug(debugger, modfile, argspath):
+ """Run interactively with console debugger."""
+
+ if argspath is not None:
+ subprocess.call("%s %s %s" % (debugger, modfile, argspath), shell=True)
+ else:
+ subprocess.call("%s %s" % (debugger, modfile), shell=True)
+
+def main():
+
+ options, args = parse()
+ (modfile, module_style) = boilerplate_module(options.module_path, options.module_args, options.interpreter, options.check)
+
+ argspath=None
+ if module_style != 'new':
+ if module_style == 'non_native_want_json':
+ argspath = write_argsfile(options.module_args, json=True)
+ elif module_style == 'old':
+ argspath = write_argsfile(options.module_args, json=False)
+ else:
+ raise Exception("internal error, unexpected module style: %s" % module_style)
+ if options.debugger:
+ rundebug(options.debugger, modfile, argspath)
+ else:
+ runtest(modfile, argspath)
+
+if __name__ == "__main__":
+ main()
+
diff --git a/v1/hacking/update.sh b/v1/hacking/update.sh
new file mode 100755
index 0000000000..5979dd0ab2
--- /dev/null
+++ b/v1/hacking/update.sh
@@ -0,0 +1,3 @@
+#!/bin/sh
+git pull --rebase
+git submodule update --init --recursive
From 7dd3ef7b60b09fb5c4a9ada0e96be87c5edd59ae Mon Sep 17 00:00:00 2001
From: Toshio Kuratomi
Date: Wed, 3 Jun 2015 13:27:31 -0700
Subject: [PATCH 191/971] Older python-six from early RHEL and ubuntu do not
have add_metaclass but do have with_metaclass
---
lib/ansible/plugins/cache/base.py | 5 ++---
lib/ansible/plugins/connections/__init__.py | 5 ++---
lib/ansible/plugins/inventory/__init__.py | 5 ++---
3 files changed, 6 insertions(+), 9 deletions(-)
diff --git a/lib/ansible/plugins/cache/base.py b/lib/ansible/plugins/cache/base.py
index 767964b281..e903c935e4 100644
--- a/lib/ansible/plugins/cache/base.py
+++ b/lib/ansible/plugins/cache/base.py
@@ -20,11 +20,10 @@ __metaclass__ = type
from abc import ABCMeta, abstractmethod
-from six import add_metaclass
+from six import with_metaclass
-@add_metaclass(ABCMeta)
-class BaseCacheModule:
+class BaseCacheModule(with_metaclass(ABCMeta, object)):
@abstractmethod
def get(self, key):
diff --git a/lib/ansible/plugins/connections/__init__.py b/lib/ansible/plugins/connections/__init__.py
index 70807b08f6..897bc58982 100644
--- a/lib/ansible/plugins/connections/__init__.py
+++ b/lib/ansible/plugins/connections/__init__.py
@@ -22,7 +22,7 @@ __metaclass__ = type
from abc import ABCMeta, abstractmethod, abstractproperty
-from six import add_metaclass
+from six import with_metaclass
from ansible import constants as C
from ansible.errors import AnsibleError
@@ -34,8 +34,7 @@ from ansible.utils.display import Display
__all__ = ['ConnectionBase']
-@add_metaclass(ABCMeta)
-class ConnectionBase:
+class ConnectionBase(with_metaclass(ABCMeta, object)):
'''
A base class for connections to contain common code.
'''
diff --git a/lib/ansible/plugins/inventory/__init__.py b/lib/ansible/plugins/inventory/__init__.py
index 03fd89429b..74dbccc1bb 100644
--- a/lib/ansible/plugins/inventory/__init__.py
+++ b/lib/ansible/plugins/inventory/__init__.py
@@ -23,10 +23,9 @@ __metaclass__ = type
from abc import ABCMeta, abstractmethod
-from six import add_metaclass
+from six import with_metaclass
-@add_metaclass(ABCMeta)
-class InventoryParser:
+class InventoryParser(with_metaclass(ABCMeta, object)):
'''Abstract Base Class for retrieving inventory information
Any InventoryParser functions by taking an inven_source. The caller then
From 337b1dc45c3bc101e13357bf3a4e21dd62546b14 Mon Sep 17 00:00:00 2001
From: Brian Coca
Date: Wed, 3 Jun 2015 20:55:55 -0400
Subject: [PATCH 192/971] minor doc fixes
---
docsite/rst/intro_configuration.rst | 4 ++--
docsite/rst/playbooks_filters.rst | 1 +
docsite/rst/playbooks_special_topics.rst | 1 +
3 files changed, 4 insertions(+), 2 deletions(-)
diff --git a/docsite/rst/intro_configuration.rst b/docsite/rst/intro_configuration.rst
index 2ff53c2248..ca5d581779 100644
--- a/docsite/rst/intro_configuration.rst
+++ b/docsite/rst/intro_configuration.rst
@@ -309,7 +309,7 @@ The valid values are either 'replace' (the default) or 'merge'.
hostfile
========
-This is a deprecated setting since 1.9, please look at :ref:`inventory` for the new setting.
+This is a deprecated setting since 1.9, please look at :ref:`inventory_file` for the new setting.
.. _host_key_checking:
@@ -321,7 +321,7 @@ implications and wish to disable it, you may do so here by setting the value to
host_key_checking=True
-.. _inventory:
+.. _inventory_file:
inventory
=========
diff --git a/docsite/rst/playbooks_filters.rst b/docsite/rst/playbooks_filters.rst
index ef6185f951..0cb42213b4 100644
--- a/docsite/rst/playbooks_filters.rst
+++ b/docsite/rst/playbooks_filters.rst
@@ -3,6 +3,7 @@ Jinja2 filters
.. contents:: Topics
+
Filters in Jinja2 are a way of transforming template expressions from one kind of data into another. Jinja2
ships with many of these. See `builtin filters`_ in the official Jinja2 template documentation.
diff --git a/docsite/rst/playbooks_special_topics.rst b/docsite/rst/playbooks_special_topics.rst
index c57f5796c9..74974cad10 100644
--- a/docsite/rst/playbooks_special_topics.rst
+++ b/docsite/rst/playbooks_special_topics.rst
@@ -7,6 +7,7 @@ and adopt these only if they seem relevant or useful to your environment.
.. toctree::
:maxdepth: 1
+ become
playbooks_acceleration
playbooks_async
playbooks_checkmode
From 0826106441d15820d086c1c9eaf6242aa80e4406 Mon Sep 17 00:00:00 2001
From: Brian Coca
Date: Wed, 3 Jun 2015 22:19:26 -0400
Subject: [PATCH 193/971] minor docs reformat - clearer 'version added' for
module options, now it sits under the option name - made notes a section, so
it now appears in toc - moved requirements and made it a list, more prominent
and more readable
---
hacking/templates/rst.j2 | 31 ++++++++++++++++++++-----------
1 file changed, 20 insertions(+), 11 deletions(-)
diff --git a/hacking/templates/rst.j2 b/hacking/templates/rst.j2
index f6f38e5910..a30e16e41f 100644
--- a/hacking/templates/rst.j2
+++ b/hacking/templates/rst.j2
@@ -43,6 +43,17 @@ Synopsis
@{ desc | convert_symbols_to_format }@
{% endfor %}
+
+{% if requirements %}
+Requirements
+------------
+
+{% for req in requirements %}
+ * @{ req | convert_symbols_to_format }@
+{% endfor %}
+{% endif %}
+
+
{% if options -%}
Options
-------
@@ -60,7 +71,7 @@ Options
{% for k in option_keys %}
{% set v = options[k] %}
- @{ k }@ |
+ @{ k }@
{% if v['version_added'] %} (added in @{v['version_added']}@){% endif %} |
{% if v.get('required', False) %}yes{% else %}no{% endif %} |
{% if v['default'] %}@{ v['default'] }@{% endif %} |
{% if v.get('type', 'not_bool') == 'bool' %}
@@ -68,21 +79,16 @@ Options
{% else %}
{% for choice in v.get('choices',[]) -%}- @{ choice }@
{% endfor -%} |
{% endif %}
- {% for desc in v.description -%}@{ desc | html_ify }@{% endfor -%}{% if v['version_added'] %} (added in Ansible @{v['version_added']}@){% endif %} |
+ {% for desc in v.description -%}@{ desc | html_ify }@{% endfor -%} |
{% endfor %}
+
{% endif %}
-{% if requirements %}
-{% for req in requirements %}
-.. note:: Requires @{ req | convert_symbols_to_format }@
-{% endfor %}
-{% endif %}
-
-{% if examples or plainexamples %}
+{% if examples or plainexamples -%}
Examples
--------
@@ -107,7 +113,7 @@ Examples
{% endif %}
-{% if returndocs %}
+{% if returndocs -%}
Return Values
-------------
@@ -164,7 +170,10 @@ Common return values are documented here :doc:`common_return_values`, the follow
{% endif %}
-{% if notes %}
+{% if notes -%}
+Notes
+-----
+
{% for note in notes %}
.. note:: @{ note | convert_symbols_to_format }@
{% endfor %}
From efc3d2931edc583f44c1644ab3c1d3afb29c894a Mon Sep 17 00:00:00 2001
From: joshainglis
Date: Thu, 4 Jun 2015 17:07:08 +1000
Subject: [PATCH 194/971] Fixed typo
---
plugins/inventory/ovirt.ini | 34 +++++
plugins/inventory/ovirt.py | 287 ++++++++++++++++++++++++++++++++++++
2 files changed, 321 insertions(+)
create mode 100644 plugins/inventory/ovirt.ini
create mode 100755 plugins/inventory/ovirt.py
diff --git a/plugins/inventory/ovirt.ini b/plugins/inventory/ovirt.ini
new file mode 100644
index 0000000000..2ea05dc55e
--- /dev/null
+++ b/plugins/inventory/ovirt.ini
@@ -0,0 +1,34 @@
+#!/usr/bin/python
+# Copyright 2013 Google Inc.
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see .
+
+
+# Author: Josha Inglis based on the gce.ini by Eric Johnson
+
+[ovirt]
+# ovirt Service Account configuration information can be stored in the
+# libcloud 'secrets.py' file. Ideally, the 'secrets.py' file will already
+# exist in your PYTHONPATH and be picked up automatically with an import
+# statement in the inventory script. However, you can specify an absolute
+# path to the secrets.py file with 'libcloud_secrets' parameter.
+ovirt_api_secrets =
+
+# If you are not going to use a 'secrets.py' file, you can set the necessary
+# authorization parameters here.
+ovirt_url =
+ovirt_username =
+ovirt_password =
diff --git a/plugins/inventory/ovirt.py b/plugins/inventory/ovirt.py
new file mode 100755
index 0000000000..6ce28bc2f3
--- /dev/null
+++ b/plugins/inventory/ovirt.py
@@ -0,0 +1,287 @@
+#!/usr/bin/env python
+# Copyright 2015 IIX Inc.
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see .
+
+"""
+ovirt external inventory script
+=================================
+
+Generates inventory that Ansible can understand by making API requests to
+oVirt via the ovirt-engine-sdk-python library. Full install/configuration
+instructions for the ovirt* modules can be found in the comments of
+ansible/test/ovirt_tests.py.
+
+When run against a specific host, this script returns the following variables
+based on the data obtained from the ovirt_sdk Node object:
+ - ovirt_uuid
+ - ovirt_id
+ - ovirt_image
+ - ovirt_machine_type
+ - ovirt_ips
+ - ovirt_name
+ - ovirt_description
+ - ovirt_status
+ - ovirt_zone
+ - ovirt_tags
+ - ovirt_stats
+
+When run in --list mode, instances are grouped by the following categories:
+
+ - zone:
+ zone group name.
+ - instance tags:
+ An entry is created for each tag. For example, if you have two instances
+ with a common tag called 'foo', they will both be grouped together under
+ the 'tag_foo' name.
+ - network name:
+ the name of the network is appended to 'network_' (e.g. the 'default'
+ network will result in a group named 'network_default')
+ - running status:
+ group name prefixed with 'status_' (e.g. status_up, status_down,..)
+
+Examples:
+ Execute uname on all instances in the us-central1-a zone
+ $ ansible -i ovirt.py us-central1-a -m shell -a "/bin/uname -a"
+
+ Use the ovirt inventory script to print out instance specific information
+ $ plugins/inventory/ovirt.py --host my_instance
+
+Author: Josha Inglis based on the gce.py by Eric Johnson
+Version: 0.0.1
+"""
+
+USER_AGENT_PRODUCT = "Ansible-ovirt_inventory_plugin"
+USER_AGENT_VERSION = "v1"
+
+import sys
+import os
+import argparse
+import ConfigParser
+from collections import defaultdict
+
+try:
+ import json
+except ImportError:
+ # noinspection PyUnresolvedReferences,PyPackageRequirements
+ import simplejson as json
+
+try:
+ # noinspection PyUnresolvedReferences
+ from ovirtsdk.api import API
+ # noinspection PyUnresolvedReferences
+ from ovirtsdk.xml import params
+except ImportError:
+ print("ovirt inventory script requires ovirt-engine-sdk-python")
+ sys.exit(1)
+
+
+class OVirtInventory(object):
+ def __init__(self):
+ # Read settings and parse CLI arguments
+ self.args = self.parse_cli_args()
+ self.driver = self.get_ovirt_driver()
+
+ # Just display data for specific host
+ if self.args.host:
+ print self.json_format_dict(
+ self.node_to_dict(self.get_instance(self.args.host)),
+ pretty=self.args.pretty
+ )
+ sys.exit(0)
+
+ # Otherwise, assume user wants all instances grouped
+ print(
+ self.json_format_dict(
+ data=self.group_instances(),
+ pretty=self.args.pretty
+ )
+ )
+ sys.exit(0)
+
+ @staticmethod
+ def get_ovirt_driver():
+ """
+ Determine the ovirt authorization settings and return a ovirt_sdk driver.
+
+ :rtype : ovirtsdk.api.API
+ """
+ kwargs = {}
+
+ ovirt_ini_default_path = os.path.join(
+ os.path.dirname(os.path.realpath(__file__)), "ovirt.ini")
+ ovirt_ini_path = os.environ.get('OVIRT_INI_PATH', ovirt_ini_default_path)
+
+ # Create a ConfigParser.
+ # This provides empty defaults to each key, so that environment
+ # variable configuration (as opposed to INI configuration) is able
+ # to work.
+ config = ConfigParser.SafeConfigParser(defaults={
+ 'ovirt_url': '',
+ 'ovirt_username': '',
+ 'ovirt_password': '',
+ 'ovirt_api_secrets': '',
+ })
+ if 'ovirt' not in config.sections():
+ config.add_section('ovirt')
+ config.read(ovirt_ini_path)
+
+ # Attempt to get ovirt params from a configuration file, if one
+ # exists.
+ secrets_path = config.get('ovirt', 'ovirt_api_secrets')
+ secrets_found = False
+ try:
+ # noinspection PyUnresolvedReferences,PyPackageRequirements
+ import secrets
+
+ kwargs = getattr(secrets, 'OVIRT_KEYWORD_PARAMS', {})
+ secrets_found = True
+ except ImportError:
+ pass
+
+ if not secrets_found and secrets_path:
+ if not secrets_path.endswith('secrets.py'):
+ err = "Must specify ovirt_sdk secrets file as /absolute/path/to/secrets.py"
+ print(err)
+ sys.exit(1)
+ sys.path.append(os.path.dirname(secrets_path))
+ try:
+ # noinspection PyUnresolvedReferences,PyPackageRequirements
+ import secrets
+
+ kwargs = getattr(secrets, 'OVIRT_KEYWORD_PARAMS', {})
+ except ImportError:
+ pass
+ if not secrets_found:
+ kwargs = {
+ 'url': config.get('ovirt', 'ovirt_url'),
+ 'username': config.get('ovirt', 'ovirt_username'),
+ 'password': config.get('ovirt', 'ovirt_password'),
+ }
+
+ # If the appropriate environment variables are set, they override
+ # other configuration; process those into our args and kwargs.
+ kwargs['url'] = os.environ.get('OVIRT_URL')
+ kwargs['username'] = os.environ.get('OVIRT_EMAIL')
+ kwargs['password'] = os.environ.get('OVIRT_PASS')
+
+ # Retrieve and return the ovirt driver.
+ return API(insecure=True, **kwargs)
+
+ @staticmethod
+ def parse_cli_args():
+ """
+ Command line argument processing
+
+ :rtype : argparse.Namespace
+ """
+
+ parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based on ovirt')
+ parser.add_argument('--list', action='store_true', default=True, help='List instances (default: True)')
+ parser.add_argument('--host', action='store', help='Get all information about an instance')
+ parser.add_argument('--pretty', action='store_true', default=False, help='Pretty format (default: False)')
+ return parser.parse_args()
+
+ def node_to_dict(self, inst):
+ """
+ :type inst: params.VM
+ """
+ if inst is None:
+ return {}
+
+ inst.get_custom_properties()
+ ips = [ip.get_address() for ip in inst.get_guest_info().get_ips().get_ip()] \
+ if inst.get_guest_info() is not None else []
+ stats = {y.get_name(): y.get_values().get_value()[0].get_datum() for y in inst.get_statistics().list()}
+
+ return {
+ 'ovirt_uuid': inst.get_id(),
+ 'ovirt_id': inst.get_id(),
+ 'ovirt_image': inst.get_os().get_type(),
+ 'ovirt_machine_type': inst.get_instance_type(),
+ 'ovirt_ips': ips,
+ 'ovirt_name': inst.get_name(),
+ 'ovirt_description': inst.get_description(),
+ 'ovirt_status': inst.get_status().get_state(),
+ 'ovirt_zone': inst.get_cluster().get_id(),
+ 'ovirt_tags': self.get_tags(inst),
+ 'ovirt_stats': stats,
+ # Hosts don't have a public name, so we add an IP
+ 'ansible_ssh_host': ips[0] if len(ips) > 0 else None
+ }
+
+ @staticmethod
+ def get_tags(inst):
+ """
+ :type inst: params.VM
+ """
+ return [x.get_name() for x in inst.get_tags().list()]
+
+ # noinspection PyBroadException,PyUnusedLocal
+ def get_instance(self, instance_name):
+ """Gets details about a specific instance """
+ try:
+ return self.driver.vms.get(name=instance_name)
+ except Exception as e:
+ return None
+
+ def group_instances(self):
+ """Group all instances"""
+ groups = defaultdict(list)
+ meta = {"hostvars": {}}
+
+ for node in self.driver.vms.list():
+ assert isinstance(node, params.VM)
+ name = node.get_name()
+
+ meta["hostvars"][name] = self.node_to_dict(node)
+
+ zone = node.get_cluster().get_name()
+ groups[zone].append(name)
+
+ tags = self.get_tags(node)
+ for t in tags:
+ tag = 'tag_%s' % t
+ groups[tag].append(name)
+
+ nets = [x.get_name() for x in node.get_nics().list()]
+ for net in nets:
+ net = 'network_%s' % net
+ groups[net].append(name)
+
+ status = node.get_status().get_state()
+ stat = 'status_%s' % status.lower()
+ if stat in groups:
+ groups[stat].append(name)
+ else:
+ groups[stat] = [name]
+
+ groups["_meta"] = meta
+
+ return groups
+
+ @staticmethod
+ def json_format_dict(data, pretty=False):
+ """ Converts a dict to a JSON object and dumps it as a formatted
+ string """
+
+ if pretty:
+ return json.dumps(data, sort_keys=True, indent=2)
+ else:
+ return json.dumps(data)
+
+# Run the script
+OVirtInventory()
From 76923915685be979a265efd291c4504f120406eb Mon Sep 17 00:00:00 2001
From: joshainglis
Date: Thu, 4 Jun 2015 17:35:10 +1000
Subject: [PATCH 195/971] Removed some text
---
plugins/inventory/ovirt.py | 4 +---
1 file changed, 1 insertion(+), 3 deletions(-)
diff --git a/plugins/inventory/ovirt.py b/plugins/inventory/ovirt.py
index 6ce28bc2f3..bccd83de86 100755
--- a/plugins/inventory/ovirt.py
+++ b/plugins/inventory/ovirt.py
@@ -21,9 +21,7 @@ ovirt external inventory script
=================================
Generates inventory that Ansible can understand by making API requests to
-oVirt via the ovirt-engine-sdk-python library. Full install/configuration
-instructions for the ovirt* modules can be found in the comments of
-ansible/test/ovirt_tests.py.
+oVirt via the ovirt-engine-sdk-python library.
When run against a specific host, this script returns the following variables
based on the data obtained from the ovirt_sdk Node object:
From 23460e64800d762a831449cbbbaedd2fab16fa6a Mon Sep 17 00:00:00 2001
From: joshainglis
Date: Thu, 4 Jun 2015 17:59:53 +1000
Subject: [PATCH 196/971] Removed a dictionary comprehension for python 2.6
support
---
plugins/inventory/ovirt.py | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
diff --git a/plugins/inventory/ovirt.py b/plugins/inventory/ovirt.py
index bccd83de86..4cb4b09eae 100755
--- a/plugins/inventory/ovirt.py
+++ b/plugins/inventory/ovirt.py
@@ -203,7 +203,9 @@ class OVirtInventory(object):
inst.get_custom_properties()
ips = [ip.get_address() for ip in inst.get_guest_info().get_ips().get_ip()] \
if inst.get_guest_info() is not None else []
- stats = {y.get_name(): y.get_values().get_value()[0].get_datum() for y in inst.get_statistics().list()}
+ stats = {}
+ for stat in inst.get_statistics().list():
+ stats[stat.get_name()] = stat.get_values().get_value()[0].get_datum()
return {
'ovirt_uuid': inst.get_id(),
From 6a97e49a06effe5d650fe31a1eae2d98fdddc58e Mon Sep 17 00:00:00 2001
From: Matt Martz
Date: Thu, 4 Jun 2015 08:15:25 -0500
Subject: [PATCH 197/971] Re-introduce ssh connection private key support
---
lib/ansible/plugins/connections/ssh.py | 7 ++-----
1 file changed, 2 insertions(+), 5 deletions(-)
diff --git a/lib/ansible/plugins/connections/ssh.py b/lib/ansible/plugins/connections/ssh.py
index b3ada343c0..1d79cb4e90 100644
--- a/lib/ansible/plugins/connections/ssh.py
+++ b/lib/ansible/plugins/connections/ssh.py
@@ -95,11 +95,8 @@ class Connection(ConnectionBase):
if self._connection_info.port is not None:
self._common_args += ("-o", "Port={0}".format(self._connection_info.port))
- # FIXME: need to get this from connection info
- #if self.private_key_file is not None:
- # self._common_args += ("-o", "IdentityFile=\"{0}\"".format(os.path.expanduser(self.private_key_file)))
- #elif self.runner.private_key_file is not None:
- # self._common_args += ("-o", "IdentityFile=\"{0}\"".format(os.path.expanduser(self.runner.private_key_file)))
+ if self._connection_info.private_key_file is not None:
+ self._common_args += ("-o", "IdentityFile=\"{0}\"".format(os.path.expanduser(self._connection_info.private_key_file)))
if self._connection_info.password:
self._common_args += ("-o", "GSSAPIAuthentication=no",
"-o", "PubkeyAuthentication=no")
From 23cbfc17e5eca7dc9393260dbe43011f73b65a4d Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Javier=20Mart=C3=ADnez?=
Date: Thu, 4 Jun 2015 17:52:37 +0200
Subject: [PATCH 198/971] Fixed Github examples directory URL
---
docsite/rst/YAMLSyntax.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docsite/rst/YAMLSyntax.rst b/docsite/rst/YAMLSyntax.rst
index d3eb843523..76683f6ba3 100644
--- a/docsite/rst/YAMLSyntax.rst
+++ b/docsite/rst/YAMLSyntax.rst
@@ -107,7 +107,7 @@ with a "{", YAML will think it is a dictionary, so you must quote it, like so::
Learn what playbooks can do and how to write/run them.
`YAMLLint `_
YAML Lint (online) helps you debug YAML syntax if you are having problems
- `Github examples directory `_
+ `Github examples directory `_
Complete playbook files from the github project source
`Mailing List `_
Questions? Help? Ideas? Stop by the list on Google Groups
From ccb8bcebd3a86ce6d30621cc85e32762b53dfe9a Mon Sep 17 00:00:00 2001
From: Matt Martz
Date: Thu, 4 Jun 2015 11:34:56 -0500
Subject: [PATCH 199/971] Resync the v1 directory with v1_last. Fixes #11162
---
v1/ansible/constants.py | 8 +-
v1/ansible/inventory/__init__.py | 4 +-
v1/ansible/module_utils/basic.py | 147 ++++++++++++--------
v1/ansible/module_utils/cloudstack.py | 2 -
v1/ansible/module_utils/facts.py | 48 ++++++-
v1/ansible/module_utils/powershell.ps1 | 4 +-
v1/ansible/module_utils/urls.py | 49 ++++---
v1/ansible/runner/connection_plugins/ssh.py | 67 ++-------
v1/ansible/utils/__init__.py | 8 +-
v1/ansible/utils/module_docs.py | 11 +-
10 files changed, 200 insertions(+), 148 deletions(-)
diff --git a/v1/ansible/constants.py b/v1/ansible/constants.py
index a9b4f40bb8..2cdc08d8ce 100644
--- a/v1/ansible/constants.py
+++ b/v1/ansible/constants.py
@@ -134,7 +134,10 @@ DEFAULT_SU_FLAGS = get_config(p, DEFAULTS, 'su_flags', 'ANSIBLE_SU_FLAG
DEFAULT_SU_USER = get_config(p, DEFAULTS, 'su_user', 'ANSIBLE_SU_USER', 'root')
DEFAULT_ASK_SU_PASS = get_config(p, DEFAULTS, 'ask_su_pass', 'ANSIBLE_ASK_SU_PASS', False, boolean=True)
DEFAULT_GATHERING = get_config(p, DEFAULTS, 'gathering', 'ANSIBLE_GATHERING', 'implicit').lower()
-DEFAULT_LOG_PATH = shell_expand_path(get_config(p, DEFAULTS, 'log_path', 'ANSIBLE_LOG_PATH', ''))
+DEFAULT_LOG_PATH = shell_expand_path(get_config(p, DEFAULTS, 'log_path', 'ANSIBLE_LOG_PATH', ''))
+
+# selinux
+DEFAULT_SELINUX_SPECIAL_FS = get_config(p, 'selinux', 'special_context_filesystems', None, 'fuse, nfs, vboxsf', islist=True)
#TODO: get rid of ternary chain mess
BECOME_METHODS = ['sudo','su','pbrun','pfexec','runas']
@@ -176,6 +179,9 @@ DEFAULT_LOAD_CALLBACK_PLUGINS = get_config(p, DEFAULTS, 'bin_ansible_callbacks'
DEFAULT_FORCE_HANDLERS = get_config(p, DEFAULTS, 'force_handlers', 'ANSIBLE_FORCE_HANDLERS', False, boolean=True)
+RETRY_FILES_ENABLED = get_config(p, DEFAULTS, 'retry_files_enabled', 'ANSIBLE_RETRY_FILES_ENABLED', True, boolean=True)
+RETRY_FILES_SAVE_PATH = get_config(p, DEFAULTS, 'retry_files_save_path', 'ANSIBLE_RETRY_FILES_SAVE_PATH', '~/')
+
# CONNECTION RELATED
ANSIBLE_SSH_ARGS = get_config(p, 'ssh_connection', 'ssh_args', 'ANSIBLE_SSH_ARGS', None)
ANSIBLE_SSH_CONTROL_PATH = get_config(p, 'ssh_connection', 'control_path', 'ANSIBLE_SSH_CONTROL_PATH', "%(directory)s/ansible-ssh-%%h-%%p-%%r")
diff --git a/v1/ansible/inventory/__init__.py b/v1/ansible/inventory/__init__.py
index 2048046d3c..f012246e22 100644
--- a/v1/ansible/inventory/__init__.py
+++ b/v1/ansible/inventory/__init__.py
@@ -36,7 +36,7 @@ class Inventory(object):
Host inventory for ansible.
"""
- __slots__ = [ 'host_list', 'groups', '_restriction', '_also_restriction', '_subset',
+ __slots__ = [ 'host_list', 'groups', '_restriction', '_also_restriction', '_subset',
'parser', '_vars_per_host', '_vars_per_group', '_hosts_cache', '_groups_list',
'_pattern_cache', '_vault_password', '_vars_plugins', '_playbook_basedir']
@@ -53,7 +53,7 @@ class Inventory(object):
self._vars_per_host = {}
self._vars_per_group = {}
self._hosts_cache = {}
- self._groups_list = {}
+ self._groups_list = {}
self._pattern_cache = {}
# to be set by calling set_playbook_basedir by playbook code
diff --git a/v1/ansible/module_utils/basic.py b/v1/ansible/module_utils/basic.py
index 54a1a9cfff..e772a12efc 100644
--- a/v1/ansible/module_utils/basic.py
+++ b/v1/ansible/module_utils/basic.py
@@ -38,6 +38,8 @@ BOOLEANS_TRUE = ['yes', 'on', '1', 'true', 1]
BOOLEANS_FALSE = ['no', 'off', '0', 'false', 0]
BOOLEANS = BOOLEANS_TRUE + BOOLEANS_FALSE
+SELINUX_SPECIAL_FS="<>"
+
# ansible modules can be written in any language. To simplify
# development of Python modules, the functions available here
# can be inserted in any module source automatically by including
@@ -181,7 +183,8 @@ def get_distribution():
''' return the distribution name '''
if platform.system() == 'Linux':
try:
- distribution = platform.linux_distribution()[0].capitalize()
+ supported_dists = platform._supported_dists + ('arch',)
+ distribution = platform.linux_distribution(supported_dists=supported_dists)[0].capitalize()
if not distribution and os.path.isfile('/etc/system-release'):
distribution = platform.linux_distribution(supported_dists=['system'])[0].capitalize()
if 'Amazon' in distribution:
@@ -334,7 +337,8 @@ class AnsibleModule(object):
def __init__(self, argument_spec, bypass_checks=False, no_log=False,
check_invalid_arguments=True, mutually_exclusive=None, required_together=None,
- required_one_of=None, add_file_common_args=False, supports_check_mode=False):
+ required_one_of=None, add_file_common_args=False, supports_check_mode=False,
+ required_if=None):
'''
common code for quickly building an ansible module in Python
@@ -382,6 +386,7 @@ class AnsibleModule(object):
self._check_argument_types()
self._check_required_together(required_together)
self._check_required_one_of(required_one_of)
+ self._check_required_if(required_if)
self._set_defaults(pre=False)
if not self.no_log:
@@ -528,10 +533,10 @@ class AnsibleModule(object):
path = os.path.dirname(path)
return path
- def is_nfs_path(self, path):
+ def is_special_selinux_path(self, path):
"""
- Returns a tuple containing (True, selinux_context) if the given path
- is on a NFS mount point, otherwise the return will be (False, None).
+ Returns a tuple containing (True, selinux_context) if the given path is on a
+ NFS or other 'special' fs mount point, otherwise the return will be (False, None).
"""
try:
f = open('/proc/mounts', 'r')
@@ -542,9 +547,13 @@ class AnsibleModule(object):
path_mount_point = self.find_mount_point(path)
for line in mount_data:
(device, mount_point, fstype, options, rest) = line.split(' ', 4)
- if path_mount_point == mount_point and 'nfs' in fstype:
- nfs_context = self.selinux_context(path_mount_point)
- return (True, nfs_context)
+
+ if path_mount_point == mount_point:
+ for fs in SELINUX_SPECIAL_FS.split(','):
+ if fs in fstype:
+ special_context = self.selinux_context(path_mount_point)
+ return (True, special_context)
+
return (False, None)
def set_default_selinux_context(self, path, changed):
@@ -562,9 +571,9 @@ class AnsibleModule(object):
# Iterate over the current context instead of the
# argument context, which may have selevel.
- (is_nfs, nfs_context) = self.is_nfs_path(path)
- if is_nfs:
- new_context = nfs_context
+ (is_special_se, sp_context) = self.is_special_selinux_path(path)
+ if is_special_se:
+ new_context = sp_context
else:
for i in range(len(cur_context)):
if len(context) > i:
@@ -861,6 +870,7 @@ class AnsibleModule(object):
locale.setlocale(locale.LC_ALL, 'C')
os.environ['LANG'] = 'C'
os.environ['LC_CTYPE'] = 'C'
+ os.environ['LC_MESSAGES'] = 'C'
except Exception, e:
self.fail_json(msg="An unknown error was encountered while attempting to validate the locale: %s" % e)
@@ -950,6 +960,20 @@ class AnsibleModule(object):
if len(missing) > 0:
self.fail_json(msg="missing required arguments: %s" % ",".join(missing))
+ def _check_required_if(self, spec):
+ ''' ensure that parameters which conditionally required are present '''
+ if spec is None:
+ return
+ for (key, val, requirements) in spec:
+ missing = []
+ if key in self.params and self.params[key] == val:
+ for check in requirements:
+ count = self._count_terms(check)
+ if count == 0:
+ missing.append(check)
+ if len(missing) > 0:
+ self.fail_json(msg="%s is %s but the following are missing: %s" % (key, val, ','.join(missing)))
+
def _check_argument_values(self):
''' ensure all arguments have the requested values, and there are no stray arguments '''
for (k,v) in self.argument_spec.iteritems():
@@ -1009,57 +1033,60 @@ class AnsibleModule(object):
value = self.params[k]
is_invalid = False
- if wanted == 'str':
- if not isinstance(value, basestring):
- self.params[k] = str(value)
- elif wanted == 'list':
- if not isinstance(value, list):
- if isinstance(value, basestring):
- self.params[k] = value.split(",")
- elif isinstance(value, int) or isinstance(value, float):
- self.params[k] = [ str(value) ]
- else:
- is_invalid = True
- elif wanted == 'dict':
- if not isinstance(value, dict):
- if isinstance(value, basestring):
- if value.startswith("{"):
- try:
- self.params[k] = json.loads(value)
- except:
- (result, exc) = self.safe_eval(value, dict(), include_exceptions=True)
- if exc is not None:
- self.fail_json(msg="unable to evaluate dictionary for %s" % k)
- self.params[k] = result
- elif '=' in value:
- self.params[k] = dict([x.strip().split("=", 1) for x in value.split(",")])
+ try:
+ if wanted == 'str':
+ if not isinstance(value, basestring):
+ self.params[k] = str(value)
+ elif wanted == 'list':
+ if not isinstance(value, list):
+ if isinstance(value, basestring):
+ self.params[k] = value.split(",")
+ elif isinstance(value, int) or isinstance(value, float):
+ self.params[k] = [ str(value) ]
else:
- self.fail_json(msg="dictionary requested, could not parse JSON or key=value")
- else:
- is_invalid = True
- elif wanted == 'bool':
- if not isinstance(value, bool):
- if isinstance(value, basestring):
- self.params[k] = self.boolean(value)
- else:
- is_invalid = True
- elif wanted == 'int':
- if not isinstance(value, int):
- if isinstance(value, basestring):
- self.params[k] = int(value)
- else:
- is_invalid = True
- elif wanted == 'float':
- if not isinstance(value, float):
- if isinstance(value, basestring):
- self.params[k] = float(value)
- else:
- is_invalid = True
- else:
- self.fail_json(msg="implementation error: unknown type %s requested for %s" % (wanted, k))
+ is_invalid = True
+ elif wanted == 'dict':
+ if not isinstance(value, dict):
+ if isinstance(value, basestring):
+ if value.startswith("{"):
+ try:
+ self.params[k] = json.loads(value)
+ except:
+ (result, exc) = self.safe_eval(value, dict(), include_exceptions=True)
+ if exc is not None:
+ self.fail_json(msg="unable to evaluate dictionary for %s" % k)
+ self.params[k] = result
+ elif '=' in value:
+ self.params[k] = dict([x.strip().split("=", 1) for x in value.split(",")])
+ else:
+ self.fail_json(msg="dictionary requested, could not parse JSON or key=value")
+ else:
+ is_invalid = True
+ elif wanted == 'bool':
+ if not isinstance(value, bool):
+ if isinstance(value, basestring):
+ self.params[k] = self.boolean(value)
+ else:
+ is_invalid = True
+ elif wanted == 'int':
+ if not isinstance(value, int):
+ if isinstance(value, basestring):
+ self.params[k] = int(value)
+ else:
+ is_invalid = True
+ elif wanted == 'float':
+ if not isinstance(value, float):
+ if isinstance(value, basestring):
+ self.params[k] = float(value)
+ else:
+ is_invalid = True
+ else:
+ self.fail_json(msg="implementation error: unknown type %s requested for %s" % (wanted, k))
- if is_invalid:
- self.fail_json(msg="argument %s is of invalid type: %s, required: %s" % (k, type(value), wanted))
+ if is_invalid:
+ self.fail_json(msg="argument %s is of invalid type: %s, required: %s" % (k, type(value), wanted))
+ except ValueError, e:
+ self.fail_json(msg="value of argument %s is not of type %s and we were unable to automatically convert" % (k, wanted))
def _set_defaults(self, pre=True):
for (k,v) in self.argument_spec.iteritems():
diff --git a/v1/ansible/module_utils/cloudstack.py b/v1/ansible/module_utils/cloudstack.py
index 82306b9a0b..e887367c2f 100644
--- a/v1/ansible/module_utils/cloudstack.py
+++ b/v1/ansible/module_utils/cloudstack.py
@@ -64,14 +64,12 @@ class AnsibleCloudStack:
api_secret = self.module.params.get('secret_key')
api_url = self.module.params.get('api_url')
api_http_method = self.module.params.get('api_http_method')
- api_timeout = self.module.params.get('api_timeout')
if api_key and api_secret and api_url:
self.cs = CloudStack(
endpoint=api_url,
key=api_key,
secret=api_secret,
- timeout=api_timeout,
method=api_http_method
)
else:
diff --git a/v1/ansible/module_utils/facts.py b/v1/ansible/module_utils/facts.py
index b223c5f5f7..1162e05b9c 100644
--- a/v1/ansible/module_utils/facts.py
+++ b/v1/ansible/module_utils/facts.py
@@ -99,8 +99,9 @@ class Facts(object):
('/etc/os-release', 'SuSE'),
('/etc/gentoo-release', 'Gentoo'),
('/etc/os-release', 'Debian'),
+ ('/etc/lsb-release', 'Mandriva'),
('/etc/os-release', 'NA'),
- ('/etc/lsb-release', 'Mandriva'))
+ )
SELINUX_MODE_DICT = { 1: 'enforcing', 0: 'permissive', -1: 'disabled' }
# A list of dicts. If there is a platform with more than one
@@ -416,11 +417,13 @@ class Facts(object):
self.facts['distribution_version'] = self.facts['distribution_version'] + '.' + release.group(1)
elif name == 'Debian':
data = get_file_content(path)
- if 'Debian' in data or 'Raspbian' in data:
+ if 'Ubuntu' in data:
+ break # Ubuntu gets correct info from python functions
+ elif 'Debian' in data or 'Raspbian' in data:
release = re.search("PRETTY_NAME=[^(]+ \(?([^)]+?)\)", data)
if release:
self.facts['distribution_release'] = release.groups()[0]
- break
+ break
elif name == 'Mandriva':
data = get_file_content(path)
if 'Mandriva' in data:
@@ -2160,7 +2163,7 @@ class DarwinNetwork(GenericBsdIfconfigNetwork, Network):
current_if['media'] = 'Unknown' # Mac does not give us this
current_if['media_select'] = words[1]
if len(words) > 2:
- current_if['media_type'] = words[2][1:]
+ current_if['media_type'] = words[2][1:-1]
if len(words) > 3:
current_if['media_options'] = self.get_options(words[3])
@@ -2545,6 +2548,43 @@ class LinuxVirtual(Virtual):
self.facts['virtualization_role'] = 'NA'
return
+class FreeBSDVirtual(Virtual):
+ """
+ This is a FreeBSD-specific subclass of Virtual. It defines
+ - virtualization_type
+ - virtualization_role
+ """
+ platform = 'FreeBSD'
+
+ def __init__(self):
+ Virtual.__init__(self)
+
+ def populate(self):
+ self.get_virtual_facts()
+ return self.facts
+
+ def get_virtual_facts(self):
+ self.facts['virtualization_type'] = ''
+ self.facts['virtualization_role'] = ''
+
+class OpenBSDVirtual(Virtual):
+ """
+ This is a OpenBSD-specific subclass of Virtual. It defines
+ - virtualization_type
+ - virtualization_role
+ """
+ platform = 'OpenBSD'
+
+ def __init__(self):
+ Virtual.__init__(self)
+
+ def populate(self):
+ self.get_virtual_facts()
+ return self.facts
+
+ def get_virtual_facts(self):
+ self.facts['virtualization_type'] = ''
+ self.facts['virtualization_role'] = ''
class HPUXVirtual(Virtual):
"""
diff --git a/v1/ansible/module_utils/powershell.ps1 b/v1/ansible/module_utils/powershell.ps1
index ee7d3ddeca..9606f47783 100644
--- a/v1/ansible/module_utils/powershell.ps1
+++ b/v1/ansible/module_utils/powershell.ps1
@@ -65,7 +65,7 @@ Function Exit-Json($obj)
$obj = New-Object psobject
}
- echo $obj | ConvertTo-Json -Depth 99
+ echo $obj | ConvertTo-Json -Compress -Depth 99
Exit
}
@@ -89,7 +89,7 @@ Function Fail-Json($obj, $message = $null)
Set-Attr $obj "msg" $message
Set-Attr $obj "failed" $true
- echo $obj | ConvertTo-Json -Depth 99
+ echo $obj | ConvertTo-Json -Compress -Depth 99
Exit 1
}
diff --git a/v1/ansible/module_utils/urls.py b/v1/ansible/module_utils/urls.py
index d56cc89395..18317e86ae 100644
--- a/v1/ansible/module_utils/urls.py
+++ b/v1/ansible/module_utils/urls.py
@@ -50,6 +50,15 @@ try:
except:
HAS_SSL=False
+HAS_MATCH_HOSTNAME = True
+try:
+ from ssl import match_hostname, CertificateError
+except ImportError:
+ try:
+ from backports.ssl_match_hostname import match_hostname, CertificateError
+ except ImportError:
+ HAS_MATCH_HOSTNAME = False
+
import httplib
import os
import re
@@ -293,11 +302,13 @@ class SSLValidationHandler(urllib2.BaseHandler):
connect_result = s.recv(4096)
self.validate_proxy_response(connect_result)
ssl_s = ssl.wrap_socket(s, ca_certs=tmp_ca_cert_path, cert_reqs=ssl.CERT_REQUIRED)
+ match_hostname(ssl_s.getpeercert(), self.hostname)
else:
self.module.fail_json(msg='Unsupported proxy scheme: %s. Currently ansible only supports HTTP proxies.' % proxy_parts.get('scheme'))
else:
s.connect((self.hostname, self.port))
ssl_s = ssl.wrap_socket(s, ca_certs=tmp_ca_cert_path, cert_reqs=ssl.CERT_REQUIRED)
+ match_hostname(ssl_s.getpeercert(), self.hostname)
# close the ssl connection
#ssl_s.unwrap()
s.close()
@@ -311,6 +322,9 @@ class SSLValidationHandler(urllib2.BaseHandler):
'Use validate_certs=no or make sure your managed systems have a valid CA certificate installed. ' + \
'Paths checked for this platform: %s' % ", ".join(paths_checked)
)
+ except CertificateError:
+ self.module.fail_json(msg="SSL Certificate does not belong to %s. Make sure the url has a certificate that belongs to it or use validate_certs=no (insecure)" % self.hostname)
+
try:
# cleanup the temp file created, don't worry
# if it fails for some reason
@@ -363,28 +377,29 @@ def fetch_url(module, url, data=None, headers=None, method=None,
# FIXME: change the following to use the generic_urlparse function
# to remove the indexed references for 'parsed'
parsed = urlparse.urlparse(url)
- if parsed[0] == 'https':
- if not HAS_SSL and validate_certs:
+ if parsed[0] == 'https' and validate_certs:
+ if not HAS_SSL:
if distribution == 'Redhat':
module.fail_json(msg='SSL validation is not available in your version of python. You can use validate_certs=no, however this is unsafe and not recommended. You can also install python-ssl from EPEL')
else:
module.fail_json(msg='SSL validation is not available in your version of python. You can use validate_certs=no, however this is unsafe and not recommended')
+ if not HAS_MATCH_HOSTNAME:
+ module.fail_json(msg='Available SSL validation does not check that the certificate matches the hostname. You can install backports.ssl_match_hostname or update your managed machine to python-2.7.9 or newer. You could also use validate_certs=no, however this is unsafe and not recommended')
- elif validate_certs:
- # do the cert validation
- netloc = parsed[1]
- if '@' in netloc:
- netloc = netloc.split('@', 1)[1]
- if ':' in netloc:
- hostname, port = netloc.split(':', 1)
- port = int(port)
- else:
- hostname = netloc
- port = 443
- # create the SSL validation handler and
- # add it to the list of handlers
- ssl_handler = SSLValidationHandler(module, hostname, port)
- handlers.append(ssl_handler)
+ # do the cert validation
+ netloc = parsed[1]
+ if '@' in netloc:
+ netloc = netloc.split('@', 1)[1]
+ if ':' in netloc:
+ hostname, port = netloc.split(':', 1)
+ port = int(port)
+ else:
+ hostname = netloc
+ port = 443
+ # create the SSL validation handler and
+ # add it to the list of handlers
+ ssl_handler = SSLValidationHandler(module, hostname, port)
+ handlers.append(ssl_handler)
if parsed[0] != 'ftp':
username = module.params.get('url_username', '')
diff --git a/v1/ansible/runner/connection_plugins/ssh.py b/v1/ansible/runner/connection_plugins/ssh.py
index ff7e8e03c8..036175f6a9 100644
--- a/v1/ansible/runner/connection_plugins/ssh.py
+++ b/v1/ansible/runner/connection_plugins/ssh.py
@@ -16,22 +16,21 @@
# along with Ansible. If not, see .
#
-import fcntl
-import gettext
-import hmac
import os
-import pipes
-import pty
-import pwd
-import random
import re
-import select
-import shlex
import subprocess
-import time
+import shlex
+import pipes
+import random
+import select
+import fcntl
+import hmac
+import pwd
+import gettext
+import pty
from hashlib import sha1
import ansible.constants as C
-from ansible.callbacks import vvv, vv
+from ansible.callbacks import vvv
from ansible import errors
from ansible import utils
@@ -257,51 +256,7 @@ class Connection(object):
vvv("EXEC previous known host file not found for %s" % host)
return True
- def exec_command(self, *args, **kwargs):
- """ Wrapper around _exec_command to retry in the case of an ssh
- failure
-
- Will retry if:
- * an exception is caught
- * ssh returns 255
-
- Will not retry if
- * remaining_tries is <2
- * retries limit reached
- """
- remaining_tries = C.get_config(
- C.p, 'ssh_connection', 'retries',
- 'ANSIBLE_SSH_RETRIES', 3, integer=True) + 1
- cmd_summary = "%s %s..." % (args[0], str(kwargs)[:200])
- for attempt in xrange(remaining_tries):
- pause = 2 ** attempt - 1
- if pause > 30:
- pause = 30
- time.sleep(pause)
- try:
- return_tuple = self._exec_command(*args, **kwargs)
- except Exception as e:
- msg = ("ssh_retry: attempt: %d, caught exception(%s) from cmd "
- "(%s).") % (attempt, e, cmd_summary)
- vv(msg)
- if attempt == remaining_tries - 1:
- raise e
- else:
- continue
- # 0 = success
- # 1-254 = remote command return code
- # 255 = failure from the ssh command itself
- if return_tuple[0] != 255:
- break
- else:
- msg = ('ssh_retry: attempt: %d, ssh return code is 255. cmd '
- '(%s).') % (attempt, cmd_summary)
- vv(msg)
-
- return return_tuple
-
-
- def _exec_command(self, cmd, tmp_path, become_user=None, sudoable=False, executable='/bin/sh', in_data=None):
+ def exec_command(self, cmd, tmp_path, become_user=None, sudoable=False, executable='/bin/sh', in_data=None):
''' run a command on the remote host '''
if sudoable and self.runner.become and self.runner.become_method not in self.become_methods_supported:
diff --git a/v1/ansible/utils/__init__.py b/v1/ansible/utils/__init__.py
index 7ed07a54c8..eb6fa2a712 100644
--- a/v1/ansible/utils/__init__.py
+++ b/v1/ansible/utils/__init__.py
@@ -1024,9 +1024,9 @@ def base_parser(constants=C, usage="", output_opts=False, runas_opts=False,
if runas_opts:
# priv user defaults to root later on to enable detecting when this option was given here
- parser.add_option('-K', '--ask-sudo-pass', default=False, dest='ask_sudo_pass', action='store_true',
+ parser.add_option('-K', '--ask-sudo-pass', default=constants.DEFAULT_ASK_SUDO_PASS, dest='ask_sudo_pass', action='store_true',
help='ask for sudo password (deprecated, use become)')
- parser.add_option('--ask-su-pass', default=False, dest='ask_su_pass', action='store_true',
+ parser.add_option('--ask-su-pass', default=constants.DEFAULT_ASK_SU_PASS, dest='ask_su_pass', action='store_true',
help='ask for su password (deprecated, use become)')
parser.add_option("-s", "--sudo", default=constants.DEFAULT_SUDO, action="store_true", dest='sudo',
help="run operations with sudo (nopasswd) (deprecated, use become)")
@@ -1617,7 +1617,9 @@ def _load_vars_from_folder(folder_path, results, vault_password=None):
names.sort()
# do not parse hidden files or dirs, e.g. .svn/
- paths = [os.path.join(folder_path, name) for name in names if not name.startswith('.')]
+ paths = [os.path.join(folder_path, name) for name in names
+ if not name.startswith('.')
+ and os.path.splitext(name)[1] in C.YAML_FILENAME_EXTENSIONS]
for path in paths:
_found, results = _load_vars_from_path(path, results, vault_password=vault_password)
return results
diff --git a/v1/ansible/utils/module_docs.py b/v1/ansible/utils/module_docs.py
index ee99af2cb5..c692057172 100644
--- a/v1/ansible/utils/module_docs.py
+++ b/v1/ansible/utils/module_docs.py
@@ -23,6 +23,8 @@ import ast
import yaml
import traceback
+from collections import MutableMapping, MutableSet, MutableSequence
+
from ansible import utils
# modules that are ok that they do not have documentation strings
@@ -86,7 +88,14 @@ def get_docstring(filename, verbose=False):
if not doc.has_key(key):
doc[key] = value
else:
- doc[key].update(value)
+ if isinstance(doc[key], MutableMapping):
+ doc[key].update(value)
+ elif isinstance(doc[key], MutableSet):
+ doc[key].add(value)
+ elif isinstance(doc[key], MutableSequence):
+ doc[key] = sorted(frozenset(doc[key] + value))
+ else:
+ raise Exception("Attempt to extend a documentation fragement of unknown type")
if 'EXAMPLES' in (t.id for t in child.targets):
plainexamples = child.value.s[1:] # Skip first empty line
From f3f3fb7c491effe9e61ae5a429ac796558c2963a Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Thu, 4 Jun 2015 13:54:39 -0400
Subject: [PATCH 200/971] Fixing vars_prompt
---
lib/ansible/executor/task_queue_manager.py | 64 ++++++++++++++++++++++
lib/ansible/playbook/play.py | 5 +-
lib/ansible/plugins/callback/__init__.py | 2 -
lib/ansible/plugins/callback/default.py | 4 +-
4 files changed, 70 insertions(+), 5 deletions(-)
diff --git a/lib/ansible/executor/task_queue_manager.py b/lib/ansible/executor/task_queue_manager.py
index a875c310d5..b8ca427370 100644
--- a/lib/ansible/executor/task_queue_manager.py
+++ b/lib/ansible/executor/task_queue_manager.py
@@ -19,6 +19,7 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
+import getpass
import multiprocessing
import os
import socket
@@ -150,6 +151,50 @@ class TaskQueueManager:
return loaded_plugins
+ def _do_var_prompt(self, varname, private=True, prompt=None, encrypt=None, confirm=False, salt_size=None, salt=None, default=None):
+
+ if prompt and default is not None:
+ msg = "%s [%s]: " % (prompt, default)
+ elif prompt:
+ msg = "%s: " % prompt
+ else:
+ msg = 'input for %s: ' % varname
+
+ def do_prompt(prompt, private):
+ if sys.stdout.encoding:
+ msg = prompt.encode(sys.stdout.encoding)
+ else:
+ # when piping the output, or at other times when stdout
+ # may not be the standard file descriptor, the stdout
+ # encoding may not be set, so default to something sane
+ msg = prompt.encode(locale.getpreferredencoding())
+ if private:
+ return getpass.getpass(msg)
+ return raw_input(msg)
+
+ if confirm:
+ while True:
+ result = do_prompt(msg, private)
+ second = do_prompt("confirm " + msg, private)
+ if result == second:
+ break
+ display("***** VALUES ENTERED DO NOT MATCH ****")
+ else:
+ result = do_prompt(msg, private)
+
+ # if result is false and default is not None
+ if not result and default is not None:
+ result = default
+
+ # FIXME: make this work with vault or whatever this old method was
+ #if encrypt:
+ # result = utils.do_encrypt(result, encrypt, salt_size, salt)
+
+ # handle utf-8 chars
+ # FIXME: make this work
+ #result = to_unicode(result, errors='strict')
+ return result
+
def run(self, play):
'''
Iterates over the roles/tasks in a play, using the given (or default)
@@ -159,6 +204,25 @@ class TaskQueueManager:
are done with the current task).
'''
+ if play.vars_prompt:
+ for var in play.vars_prompt:
+ if 'name' not in var:
+ raise AnsibleError("'vars_prompt' item is missing 'name:'", obj=play._ds)
+
+ vname = var['name']
+ prompt = var.get("prompt", vname)
+ default = var.get("default", None)
+ private = var.get("private", True)
+
+ confirm = var.get("confirm", False)
+ encrypt = var.get("encrypt", None)
+ salt_size = var.get("salt_size", None)
+ salt = var.get("salt", None)
+
+ if vname not in play.vars:
+ self.send_callback('v2_playbook_on_vars_prompt', vname, private, prompt, encrypt, confirm, salt_size, salt, default)
+ play.vars[vname] = self._do_var_prompt(vname, private, prompt, encrypt, confirm, salt_size, salt, default)
+
all_vars = self._variable_manager.get_vars(loader=self._loader, play=play)
templar = Templar(loader=self._loader, variables=all_vars, fail_on_undefined=False)
diff --git a/lib/ansible/playbook/play.py b/lib/ansible/playbook/play.py
index c891571a98..49a986555c 100644
--- a/lib/ansible/playbook/play.py
+++ b/lib/ansible/playbook/play.py
@@ -62,7 +62,7 @@ class Play(Base, Taggable, Become):
# Variable Attributes
_vars_files = FieldAttribute(isa='list', default=[])
- _vars_prompt = FieldAttribute(isa='dict', default=dict())
+ _vars_prompt = FieldAttribute(isa='list', default=[])
_vault_password = FieldAttribute(isa='string')
# Block (Task) Lists Attributes
@@ -116,6 +116,9 @@ class Play(Base, Taggable, Become):
ds['remote_user'] = ds['user']
del ds['user']
+ if 'vars_prompt' in ds and not isinstance(ds['vars_prompt'], list):
+ ds['vars_prompt'] = [ ds['vars_prompt'] ]
+
return super(Play, self).preprocess_data(ds)
def _load_vars(self, attr, ds):
diff --git a/lib/ansible/plugins/callback/__init__.py b/lib/ansible/plugins/callback/__init__.py
index 2c2e7e74c6..c03f6981d9 100644
--- a/lib/ansible/plugins/callback/__init__.py
+++ b/lib/ansible/plugins/callback/__init__.py
@@ -19,8 +19,6 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-#from ansible.utils.display import Display
-
__all__ = ["CallbackBase"]
class CallbackBase:
diff --git a/lib/ansible/plugins/callback/default.py b/lib/ansible/plugins/callback/default.py
index de6548ef18..5b50b49cc8 100644
--- a/lib/ansible/plugins/callback/default.py
+++ b/lib/ansible/plugins/callback/default.py
@@ -110,8 +110,8 @@ class CallbackModule(CallbackBase):
def v2_playbook_on_handler_task_start(self, task):
self._display.banner("RUNNING HANDLER [%s]" % task.get_name().strip())
- def v2_playbook_on_vars_prompt(self, varname, private=True, prompt=None, encrypt=None, confirm=False, salt_size=None, salt=None, default=None):
- pass
+ #def v2_playbook_on_vars_prompt(self, varname, private=True, prompt=None, encrypt=None, confirm=False, salt_size=None, salt=None, default=None):
+ # pass
def v2_playbook_on_setup(self):
pass
From 9754c67138f77264652606ac26d6e220903dd258 Mon Sep 17 00:00:00 2001
From: Matt Martz
Date: Wed, 13 May 2015 10:58:46 -0500
Subject: [PATCH 201/971] Use a decorator to ensure jit connection, instead of
an explicit call to _connect
---
lib/ansible/executor/task_executor.py | 1 -
lib/ansible/plugins/connections/__init__.py | 12 +++++++++++-
lib/ansible/plugins/connections/paramiko_ssh.py | 8 ++++++--
lib/ansible/plugins/connections/ssh.py | 6 +++++-
lib/ansible/plugins/connections/winrm.py | 6 +++++-
5 files changed, 27 insertions(+), 6 deletions(-)
diff --git a/lib/ansible/executor/task_executor.py b/lib/ansible/executor/task_executor.py
index 69cbb63f47..8de8f7027a 100644
--- a/lib/ansible/executor/task_executor.py
+++ b/lib/ansible/executor/task_executor.py
@@ -210,7 +210,6 @@ class TaskExecutor:
# get the connection and the handler for this execution
self._connection = self._get_connection(variables)
self._connection.set_host_overrides(host=self._host)
- self._connection._connect()
self._handler = self._get_action_handler(connection=self._connection, templar=templar)
diff --git a/lib/ansible/plugins/connections/__init__.py b/lib/ansible/plugins/connections/__init__.py
index 897bc58982..da0775530d 100644
--- a/lib/ansible/plugins/connections/__init__.py
+++ b/lib/ansible/plugins/connections/__init__.py
@@ -22,6 +22,7 @@ __metaclass__ = type
from abc import ABCMeta, abstractmethod, abstractproperty
+from functools import wraps
from six import with_metaclass
from ansible import constants as C
@@ -32,7 +33,16 @@ from ansible.errors import AnsibleError
# which may want to output display/logs too
from ansible.utils.display import Display
-__all__ = ['ConnectionBase']
+__all__ = ['ConnectionBase', 'ensure_connect']
+
+
+def ensure_connect(func):
+ @wraps(func)
+ def wrapped(self, *args, **kwargs):
+ self._connect()
+ return func(self, *args, **kwargs)
+ return wrapped
+
class ConnectionBase(with_metaclass(ABCMeta, object)):
'''
diff --git a/lib/ansible/plugins/connections/paramiko_ssh.py b/lib/ansible/plugins/connections/paramiko_ssh.py
index 0d7a82c34b..8beaecf492 100644
--- a/lib/ansible/plugins/connections/paramiko_ssh.py
+++ b/lib/ansible/plugins/connections/paramiko_ssh.py
@@ -41,7 +41,7 @@ from binascii import hexlify
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleFileNotFound
-from ansible.plugins.connections import ConnectionBase
+from ansible.plugins.connections import ConnectionBase, ensure_connect
from ansible.utils.path import makedirs_safe
AUTHENTICITY_MSG="""
@@ -61,6 +61,7 @@ with warnings.catch_warnings():
except ImportError:
pass
+
class MyAddPolicy(object):
"""
Based on AutoAddPolicy in paramiko so we can determine when keys are added
@@ -188,6 +189,7 @@ class Connection(ConnectionBase):
return ssh
+ @ensure_connect
def exec_command(self, cmd, tmp_path, executable='/bin/sh', in_data=None):
''' run a command on the remote host '''
@@ -248,6 +250,7 @@ class Connection(ConnectionBase):
return (chan.recv_exit_status(), '', no_prompt_out + stdout, no_prompt_out + stderr)
+ @ensure_connect
def put_file(self, in_path, out_path):
''' transfer a file from local to remote '''
@@ -272,9 +275,10 @@ class Connection(ConnectionBase):
if cache_key in SFTP_CONNECTION_CACHE:
return SFTP_CONNECTION_CACHE[cache_key]
else:
- result = SFTP_CONNECTION_CACHE[cache_key] = self.connect().ssh.open_sftp()
+ result = SFTP_CONNECTION_CACHE[cache_key] = self._connect().ssh.open_sftp()
return result
+ @ensure_connect
def fetch_file(self, in_path, out_path):
''' save a remote file to the specified path '''
diff --git a/lib/ansible/plugins/connections/ssh.py b/lib/ansible/plugins/connections/ssh.py
index b3ada343c0..5a435093d0 100644
--- a/lib/ansible/plugins/connections/ssh.py
+++ b/lib/ansible/plugins/connections/ssh.py
@@ -34,7 +34,8 @@ from hashlib import sha1
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleFileNotFound
-from ansible.plugins.connections import ConnectionBase
+from ansible.plugins.connections import ConnectionBase, ensure_connect
+
class Connection(ConnectionBase):
''' ssh based connections '''
@@ -269,6 +270,7 @@ class Connection(ConnectionBase):
self._display.vvv("EXEC previous known host file not found for {0}".format(host))
return True
+ @ensure_connect
def exec_command(self, cmd, tmp_path, executable='/bin/sh', in_data=None):
''' run a command on the remote host '''
@@ -390,6 +392,7 @@ class Connection(ConnectionBase):
return (p.returncode, '', no_prompt_out + stdout, no_prompt_err + stderr)
+ @ensure_connect
def put_file(self, in_path, out_path):
''' transfer a file from local to remote '''
self._display.vvv("PUT {0} TO {1}".format(in_path, out_path), host=self._connection_info.remote_addr)
@@ -425,6 +428,7 @@ class Connection(ConnectionBase):
if returncode != 0:
raise AnsibleError("failed to transfer file to {0}:\n{1}\n{2}".format(out_path, stdout, stderr))
+ @ensure_connect
def fetch_file(self, in_path, out_path):
''' fetch a file from remote to local '''
self._display.vvv("FETCH {0} TO {1}".format(in_path, out_path), host=self._connection_info.remote_addr)
diff --git a/lib/ansible/plugins/connections/winrm.py b/lib/ansible/plugins/connections/winrm.py
index f16da0f6e6..ee28749189 100644
--- a/lib/ansible/plugins/connections/winrm.py
+++ b/lib/ansible/plugins/connections/winrm.py
@@ -42,10 +42,11 @@ except ImportError:
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleFileNotFound
-from ansible.plugins.connections import ConnectionBase
+from ansible.plugins.connections import ConnectionBase, ensure_connect
from ansible.plugins import shell_loader
from ansible.utils.path import makedirs_safe
+
class Connection(ConnectionBase):
'''WinRM connections over HTTP/HTTPS.'''
@@ -151,6 +152,7 @@ class Connection(ConnectionBase):
self.protocol = self._winrm_connect()
return self
+ @ensure_connect
def exec_command(self, cmd, tmp_path, executable='/bin/sh', in_data=None):
cmd = cmd.encode('utf-8')
@@ -172,6 +174,7 @@ class Connection(ConnectionBase):
raise AnsibleError("failed to exec cmd %s" % cmd)
return (result.status_code, '', result.std_out.encode('utf-8'), result.std_err.encode('utf-8'))
+ @ensure_connect
def put_file(self, in_path, out_path):
self._display.vvv("PUT %s TO %s" % (in_path, out_path), host=self._connection_info.remote_addr)
if not os.path.exists(in_path):
@@ -210,6 +213,7 @@ class Connection(ConnectionBase):
traceback.print_exc()
raise AnsibleError("failed to transfer file to %s" % out_path)
+ @ensure_connect
def fetch_file(self, in_path, out_path):
out_path = out_path.replace('\\', '/')
self._display.vvv("FETCH %s TO %s" % (in_path, out_path), host=self._connection_info.remote_addr)
From bce281014cfc8aaa2675c129ca3117a360041e5c Mon Sep 17 00:00:00 2001
From: Matt Martz
Date: Thu, 4 Jun 2015 13:27:18 -0500
Subject: [PATCH 202/971] Decorate the ConnectionBase methods, switch to
calling super from individual connection classes
---
lib/ansible/plugins/connections/__init__.py | 3 +++
lib/ansible/plugins/connections/local.py | 7 +++++++
lib/ansible/plugins/connections/paramiko_ssh.py | 11 +++++++----
lib/ansible/plugins/connections/ssh.py | 13 +++++++++----
lib/ansible/plugins/connections/winrm.py | 10 ++++++----
5 files changed, 32 insertions(+), 12 deletions(-)
diff --git a/lib/ansible/plugins/connections/__init__.py b/lib/ansible/plugins/connections/__init__.py
index da0775530d..1d3a2bdeed 100644
--- a/lib/ansible/plugins/connections/__init__.py
+++ b/lib/ansible/plugins/connections/__init__.py
@@ -92,16 +92,19 @@ class ConnectionBase(with_metaclass(ABCMeta, object)):
"""Connect to the host we've been initialized with"""
pass
+ @ensure_connect
@abstractmethod
def exec_command(self, cmd, tmp_path, executable=None, in_data=None):
"""Run a command on the remote host"""
pass
+ @ensure_connect
@abstractmethod
def put_file(self, in_path, out_path):
"""Transfer a file from local to remote"""
pass
+ @ensure_connect
@abstractmethod
def fetch_file(self, in_path, out_path):
"""Fetch a file from remote to local"""
diff --git a/lib/ansible/plugins/connections/local.py b/lib/ansible/plugins/connections/local.py
index 1dc6076b0d..85bc51de0a 100644
--- a/lib/ansible/plugins/connections/local.py
+++ b/lib/ansible/plugins/connections/local.py
@@ -49,6 +49,8 @@ class Connection(ConnectionBase):
def exec_command(self, cmd, tmp_path, executable='/bin/sh', in_data=None):
''' run a command on the local host '''
+ super(Connection, self).exec_command(cmd, tmp_path, executable=executable, in_data=in_data)
+
debug("in local.exec_command()")
# su requires to be run from a terminal, and therefore isn't supported here (yet?)
#if self._connection_info.su:
@@ -108,6 +110,8 @@ class Connection(ConnectionBase):
def put_file(self, in_path, out_path):
''' transfer a file from local to local '''
+ super(Connection, self).put_file(in_path, out_path)
+
#vvv("PUT {0} TO {1}".format(in_path, out_path), host=self.host)
self._display.vvv("{0} PUT {1} TO {2}".format(self._connection_info.remote_addr, in_path, out_path))
if not os.path.exists(in_path):
@@ -123,6 +127,9 @@ class Connection(ConnectionBase):
def fetch_file(self, in_path, out_path):
''' fetch a file from local to local -- for copatibility '''
+
+ super(Connection, self).fetch_file(in_path, out_path)
+
#vvv("FETCH {0} TO {1}".format(in_path, out_path), host=self.host)
self._display.vvv("{0} FETCH {1} TO {2}".format(self._connection_info.remote_addr, in_path, out_path))
self.put_file(in_path, out_path)
diff --git a/lib/ansible/plugins/connections/paramiko_ssh.py b/lib/ansible/plugins/connections/paramiko_ssh.py
index 8beaecf492..5a5259c5fc 100644
--- a/lib/ansible/plugins/connections/paramiko_ssh.py
+++ b/lib/ansible/plugins/connections/paramiko_ssh.py
@@ -41,7 +41,7 @@ from binascii import hexlify
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleFileNotFound
-from ansible.plugins.connections import ConnectionBase, ensure_connect
+from ansible.plugins.connections import ConnectionBase
from ansible.utils.path import makedirs_safe
AUTHENTICITY_MSG="""
@@ -189,10 +189,11 @@ class Connection(ConnectionBase):
return ssh
- @ensure_connect
def exec_command(self, cmd, tmp_path, executable='/bin/sh', in_data=None):
''' run a command on the remote host '''
+ super(Connection, self).exec_command(cmd, tmp_path, executable=executable, in_data=in_data)
+
if in_data:
raise AnsibleError("Internal Error: this module does not support optimized module pipelining")
@@ -250,10 +251,11 @@ class Connection(ConnectionBase):
return (chan.recv_exit_status(), '', no_prompt_out + stdout, no_prompt_out + stderr)
- @ensure_connect
def put_file(self, in_path, out_path):
''' transfer a file from local to remote '''
+ super(Connection, self).put_file(in_path, out_path)
+
self._display.vvv("PUT %s TO %s" % (in_path, out_path), host=self._connection_info.remote_addr)
if not os.path.exists(in_path):
@@ -278,10 +280,11 @@ class Connection(ConnectionBase):
result = SFTP_CONNECTION_CACHE[cache_key] = self._connect().ssh.open_sftp()
return result
- @ensure_connect
def fetch_file(self, in_path, out_path):
''' save a remote file to the specified path '''
+ super(Connection, self).fetch_file(in_path, out_path)
+
self._display.vvv("FETCH %s TO %s" % (in_path, out_path), host=self._connection_info.remote_addr)
try:
diff --git a/lib/ansible/plugins/connections/ssh.py b/lib/ansible/plugins/connections/ssh.py
index 5a435093d0..e2251ca5b0 100644
--- a/lib/ansible/plugins/connections/ssh.py
+++ b/lib/ansible/plugins/connections/ssh.py
@@ -34,7 +34,7 @@ from hashlib import sha1
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleFileNotFound
-from ansible.plugins.connections import ConnectionBase, ensure_connect
+from ansible.plugins.connections import ConnectionBase
class Connection(ConnectionBase):
@@ -270,10 +270,11 @@ class Connection(ConnectionBase):
self._display.vvv("EXEC previous known host file not found for {0}".format(host))
return True
- @ensure_connect
def exec_command(self, cmd, tmp_path, executable='/bin/sh', in_data=None):
''' run a command on the remote host '''
+ super(Connection, self).exec_command(cmd, tmp_path, executable=executable, in_data=in_data)
+
ssh_cmd = self._password_cmd()
ssh_cmd += ("ssh", "-C")
if not in_data:
@@ -392,9 +393,11 @@ class Connection(ConnectionBase):
return (p.returncode, '', no_prompt_out + stdout, no_prompt_err + stderr)
- @ensure_connect
def put_file(self, in_path, out_path):
''' transfer a file from local to remote '''
+
+ super(Connection, self).put_file(in_path, out_path)
+
self._display.vvv("PUT {0} TO {1}".format(in_path, out_path), host=self._connection_info.remote_addr)
if not os.path.exists(in_path):
raise AnsibleFileNotFound("file or module does not exist: {0}".format(in_path))
@@ -428,9 +431,11 @@ class Connection(ConnectionBase):
if returncode != 0:
raise AnsibleError("failed to transfer file to {0}:\n{1}\n{2}".format(out_path, stdout, stderr))
- @ensure_connect
def fetch_file(self, in_path, out_path):
''' fetch a file from remote to local '''
+
+ super(Connection, self).fetch_file(in_path, out_path)
+
self._display.vvv("FETCH {0} TO {1}".format(in_path, out_path), host=self._connection_info.remote_addr)
cmd = self._password_cmd()
diff --git a/lib/ansible/plugins/connections/winrm.py b/lib/ansible/plugins/connections/winrm.py
index ee28749189..2bc1ee0053 100644
--- a/lib/ansible/plugins/connections/winrm.py
+++ b/lib/ansible/plugins/connections/winrm.py
@@ -42,7 +42,7 @@ except ImportError:
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleFileNotFound
-from ansible.plugins.connections import ConnectionBase, ensure_connect
+from ansible.plugins.connections import ConnectionBase
from ansible.plugins import shell_loader
from ansible.utils.path import makedirs_safe
@@ -152,8 +152,8 @@ class Connection(ConnectionBase):
self.protocol = self._winrm_connect()
return self
- @ensure_connect
def exec_command(self, cmd, tmp_path, executable='/bin/sh', in_data=None):
+ super(Connection, self).exec_command(cmd, tmp_path, executable=executable, in_data,in_data)
cmd = cmd.encode('utf-8')
cmd_parts = shlex.split(cmd, posix=False)
@@ -174,8 +174,9 @@ class Connection(ConnectionBase):
raise AnsibleError("failed to exec cmd %s" % cmd)
return (result.status_code, '', result.std_out.encode('utf-8'), result.std_err.encode('utf-8'))
- @ensure_connect
def put_file(self, in_path, out_path):
+ super(Connection, self).put_file(in_path, out_path)
+
self._display.vvv("PUT %s TO %s" % (in_path, out_path), host=self._connection_info.remote_addr)
if not os.path.exists(in_path):
raise AnsibleFileNotFound("file or module does not exist: %s" % in_path)
@@ -213,8 +214,9 @@ class Connection(ConnectionBase):
traceback.print_exc()
raise AnsibleError("failed to transfer file to %s" % out_path)
- @ensure_connect
def fetch_file(self, in_path, out_path):
+ super(Connection, self).fetch_file(in_path, out_path)
+
out_path = out_path.replace('\\', '/')
self._display.vvv("FETCH %s TO %s" % (in_path, out_path), host=self._connection_info.remote_addr)
buffer_size = 2**19 # 0.5MB chunks
From ee06eebea3d7e218783385424a6f575e8bb7e5b3 Mon Sep 17 00:00:00 2001
From: Davide Guerri
Date: Thu, 4 Jun 2015 19:46:09 +0100
Subject: [PATCH 203/971] Fix lookup() plugin
lookup() plugin is currently broken because _get_file_contents() now
returns a tuple: (contents, show_data).
This patch fix that issue.
---
lib/ansible/plugins/lookup/file.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/lib/ansible/plugins/lookup/file.py b/lib/ansible/plugins/lookup/file.py
index ea53c37e03..30247c150c 100644
--- a/lib/ansible/plugins/lookup/file.py
+++ b/lib/ansible/plugins/lookup/file.py
@@ -53,7 +53,7 @@ class LookupModule(LookupBase):
for path in (basedir_path, relative_path, playbook_path):
try:
- contents = self._loader._get_file_contents(path)
+ contents, show_data = self._loader._get_file_contents(path)
ret.append(contents.rstrip())
break
except AnsibleParserError:
From ee5e166563ca01a556a921b177a632ea5c2f1a44 Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Thu, 4 Jun 2015 15:43:07 -0400
Subject: [PATCH 204/971] Fixing ansible_*_interpreter use
Fixes ansible/ansible-modules-core#1459
---
lib/ansible/executor/module_common.py | 25 +++++++++--------------
lib/ansible/plugins/action/__init__.py | 8 ++++----
lib/ansible/plugins/action/assemble.py | 8 ++++----
lib/ansible/plugins/action/async.py | 6 +++---
lib/ansible/plugins/action/copy.py | 12 +++++------
lib/ansible/plugins/action/fetch.py | 2 +-
lib/ansible/plugins/action/normal.py | 2 +-
lib/ansible/plugins/action/patch.py | 4 ++--
lib/ansible/plugins/action/script.py | 4 ++--
lib/ansible/plugins/action/synchronize.py | 2 +-
lib/ansible/plugins/action/template.py | 4 ++--
lib/ansible/plugins/action/unarchive.py | 4 ++--
12 files changed, 38 insertions(+), 43 deletions(-)
diff --git a/lib/ansible/executor/module_common.py b/lib/ansible/executor/module_common.py
index 535fbd45e3..85dcafb961 100644
--- a/lib/ansible/executor/module_common.py
+++ b/lib/ansible/executor/module_common.py
@@ -31,6 +31,7 @@ from ansible import __version__
from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.parsing.utils.jsonify import jsonify
+from ansible.utils.unicode import to_bytes
REPLACER = "#<>"
REPLACER_ARGS = "\"<>\""
@@ -113,7 +114,7 @@ def _find_snippet_imports(module_data, module_path, strip_comments):
# ******************************************************************************
-def modify_module(module_path, module_args, strip_comments=False):
+def modify_module(module_path, module_args, task_vars=dict(), strip_comments=False):
"""
Used to insert chunks of code into modules before transfer rather than
doing regular python imports. This allows for more efficient transfer in
@@ -158,7 +159,6 @@ def modify_module(module_path, module_args, strip_comments=False):
(module_data, module_style) = _find_snippet_imports(module_data, module_path, strip_comments)
- #module_args_json = jsonify(module_args)
module_args_json = json.dumps(module_args)
encoded_args = repr(module_args_json.encode('utf-8'))
@@ -166,14 +166,11 @@ def modify_module(module_path, module_args, strip_comments=False):
module_data = module_data.replace(REPLACER_VERSION, repr(__version__))
module_data = module_data.replace(REPLACER_COMPLEX, encoded_args)
- # FIXME: we're not passing around an inject dictionary anymore, so
- # this needs to be fixed with whatever method we use for vars
- # like this moving forward
- #if module_style == 'new':
- # facility = C.DEFAULT_SYSLOG_FACILITY
- # if 'ansible_syslog_facility' in inject:
- # facility = inject['ansible_syslog_facility']
- # module_data = module_data.replace('syslog.LOG_USER', "syslog.%s" % facility)
+ if module_style == 'new':
+ facility = C.DEFAULT_SYSLOG_FACILITY
+ if 'ansible_syslog_facility' in task_vars:
+ facility = task_vars['ansible_syslog_facility']
+ module_data = module_data.replace('syslog.LOG_USER', "syslog.%s" % facility)
lines = module_data.split(b"\n", 1)
shebang = None
@@ -183,11 +180,9 @@ def modify_module(module_path, module_args, strip_comments=False):
interpreter = args[0]
interpreter_config = 'ansible_%s_interpreter' % os.path.basename(interpreter)
- # FIXME: more inject stuff here...
- #from ansible.utils.unicode import to_bytes
- #if interpreter_config in inject:
- # interpreter = to_bytes(inject[interpreter_config], errors='strict')
- # lines[0] = shebang = b"#!{0} {1}".format(interpreter, b" ".join(args[1:]))
+ if interpreter_config in task_vars:
+ interpreter = to_bytes(task_vars[interpreter_config], errors='strict')
+ lines[0] = shebang = b"#!{0} {1}".format(interpreter, b" ".join(args[1:]))
lines.insert(1, ENCODING_STRING)
else:
diff --git a/lib/ansible/plugins/action/__init__.py b/lib/ansible/plugins/action/__init__.py
index d6861118b2..5509bb2d94 100644
--- a/lib/ansible/plugins/action/__init__.py
+++ b/lib/ansible/plugins/action/__init__.py
@@ -67,7 +67,7 @@ class ActionBase:
self._supports_check_mode = True
- def _configure_module(self, module_name, module_args):
+ def _configure_module(self, module_name, module_args, task_vars=dict()):
'''
Handles the loading and templating of the module code through the
modify_module() function.
@@ -86,7 +86,7 @@ class ActionBase:
"run 'git submodule update --init --recursive' to correct this problem." % (module_name))
# insert shared code and arguments into the module
- (module_data, module_style, module_shebang) = modify_module(module_path, module_args)
+ (module_data, module_style, module_shebang) = modify_module(module_path, module_args, task_vars=task_vars)
return (module_style, module_shebang, module_data)
@@ -314,7 +314,7 @@ class ActionBase:
filtered_lines.write(line + '\n')
return filtered_lines.getvalue()
- def _execute_module(self, module_name=None, module_args=None, tmp=None, persist_files=False, delete_remote_tmp=True):
+ def _execute_module(self, module_name=None, module_args=None, tmp=None, task_vars=dict(), persist_files=False, delete_remote_tmp=True):
'''
Transfer and run a module along with its arguments.
'''
@@ -338,7 +338,7 @@ class ActionBase:
debug("in _execute_module (%s, %s)" % (module_name, module_args))
- (module_style, shebang, module_data) = self._configure_module(module_name=module_name, module_args=module_args)
+ (module_style, shebang, module_data) = self._configure_module(module_name=module_name, module_args=module_args, task_vars=task_vars)
if not shebang:
raise AnsibleError("module is missing interpreter line")
diff --git a/lib/ansible/plugins/action/assemble.py b/lib/ansible/plugins/action/assemble.py
index 4e796bddb6..49f861f08e 100644
--- a/lib/ansible/plugins/action/assemble.py
+++ b/lib/ansible/plugins/action/assemble.py
@@ -87,7 +87,7 @@ class ActionModule(ActionBase):
return dict(failed=True, msg="src and dest are required")
if boolean(remote_src):
- return self._execute_module(tmp=tmp)
+ return self._execute_module(tmp=tmp, task_vars=task_vars)
elif self._task._role is not None:
src = self._loader.path_dwim_relative(self._task._role._role_path, 'files', src)
else:
@@ -109,7 +109,7 @@ class ActionModule(ActionBase):
resultant = file(path).read()
# FIXME: diff needs to be moved somewhere else
#if self.runner.diff:
- # dest_result = self._execute_module(module_name='slurp', module_args=dict(path=dest), tmp=tmp, persist_files=True)
+ # dest_result = self._execute_module(module_name='slurp', module_args=dict(path=dest), task_vars=task_vars, tmp=tmp, persist_files=True)
# if 'content' in dest_result:
# dest_contents = dest_result['content']
# if dest_result['encoding'] == 'base64':
@@ -140,7 +140,7 @@ class ActionModule(ActionBase):
# res = self.runner._execute_module(conn, tmp, 'copy', module_args_tmp, inject=inject)
# res.diff = dict(after=resultant)
# return res
- res = self._execute_module(module_name='copy', module_args=new_module_args, tmp=tmp)
+ res = self._execute_module(module_name='copy', module_args=new_module_args, task_vars=task_vars, tmp=tmp)
#res.diff = dict(after=resultant)
return res
else:
@@ -153,4 +153,4 @@ class ActionModule(ActionBase):
)
)
- return self._execute_module(module_name='file', module_args=new_module_args, tmp=tmp)
+ return self._execute_module(module_name='file', module_args=new_module_args, task_vars=task_vars, tmp=tmp)
diff --git a/lib/ansible/plugins/action/async.py b/lib/ansible/plugins/action/async.py
index 7c02e09757..7fedd544d6 100644
--- a/lib/ansible/plugins/action/async.py
+++ b/lib/ansible/plugins/action/async.py
@@ -42,12 +42,12 @@ class ActionModule(ActionBase):
env_string = self._compute_environment_string()
# configure, upload, and chmod the target module
- (module_style, shebang, module_data) = self._configure_module(module_name=module_name, module_args=self._task.args)
+ (module_style, shebang, module_data) = self._configure_module(module_name=module_name, module_args=self._task.args, task_vars=task_vars)
self._transfer_data(remote_module_path, module_data)
self._remote_chmod(tmp, 'a+rx', remote_module_path)
# configure, upload, and chmod the async_wrapper module
- (async_module_style, shebang, async_module_data) = self._configure_module(module_name='async_wrapper', module_args=dict())
+ (async_module_style, shebang, async_module_data) = self._configure_module(module_name='async_wrapper', module_args=dict(), task_vars=task_vars)
self._transfer_data(async_module_path, async_module_data)
self._remote_chmod(tmp, 'a+rx', async_module_path)
@@ -57,7 +57,7 @@ class ActionModule(ActionBase):
async_jid = str(random.randint(0, 999999999999))
async_cmd = " ".join([str(x) for x in [async_module_path, async_jid, async_limit, remote_module_path, argsfile]])
- result = self._low_level_execute_command(cmd=async_cmd, tmp=None)
+ result = self._low_level_execute_command(cmd=async_cmd, task_vars=task_vars, tmp=None)
# clean up after
if tmp and "tmp" in tmp and not C.DEFAULT_KEEP_REMOTE_FILES:
diff --git a/lib/ansible/plugins/action/copy.py b/lib/ansible/plugins/action/copy.py
index 6db130ad7f..2d404029c5 100644
--- a/lib/ansible/plugins/action/copy.py
+++ b/lib/ansible/plugins/action/copy.py
@@ -191,7 +191,7 @@ class ActionModule(ActionBase):
# FIXME: runner shouldn't have the diff option there
#if self.runner.diff and not raw:
- # diff = self._get_diff_data(tmp, dest_file, source_full)
+ # diff = self._get_diff_data(tmp, dest_file, source_full, task_vars)
#else:
# diff = {}
diff = {}
@@ -236,7 +236,7 @@ class ActionModule(ActionBase):
)
)
- module_return = self._execute_module(module_name='copy', module_args=new_module_args, delete_remote_tmp=delete_remote_tmp)
+ module_return = self._execute_module(module_name='copy', module_args=new_module_args, task_vars=task_vars, delete_remote_tmp=delete_remote_tmp)
module_executed = True
else:
@@ -260,7 +260,7 @@ class ActionModule(ActionBase):
)
# Execute the file module.
- module_return = self._execute_module(module_name='file', module_args=new_module_args, delete_remote_tmp=delete_remote_tmp)
+ module_return = self._execute_module(module_name='file', module_args=new_module_args, task_vars=task_vars, delete_remote_tmp=delete_remote_tmp)
module_executed = True
if not module_return.get('checksum'):
@@ -304,8 +304,8 @@ class ActionModule(ActionBase):
f.close()
return content_tempfile
- def _get_diff_data(self, tmp, destination, source):
- peek_result = self._execute_module(module_name='file', module_args=dict(path=destination, diff_peek=True), persist_files=True)
+ def _get_diff_data(self, tmp, destination, source, task_vars):
+ peek_result = self._execute_module(module_name='file', module_args=dict(path=destination, diff_peek=True), task_vars=task_vars, persist_files=True)
if 'failed' in peek_result and peek_result['failed'] or peek_result.get('rc', 0) != 0:
return {}
@@ -318,7 +318,7 @@ class ActionModule(ActionBase):
#elif peek_result['size'] > utils.MAX_FILE_SIZE_FOR_DIFF:
# diff['dst_larger'] = utils.MAX_FILE_SIZE_FOR_DIFF
else:
- dest_result = self._execute_module(module_name='slurp', module_args=dict(path=destination), tmp=tmp, persist_files=True)
+ dest_result = self._execute_module(module_name='slurp', module_args=dict(path=destination), task_vars=task_vars, tmp=tmp, persist_files=True)
if 'content' in dest_result:
dest_contents = dest_result['content']
if dest_result['encoding'] == 'base64':
diff --git a/lib/ansible/plugins/action/fetch.py b/lib/ansible/plugins/action/fetch.py
index 6a903ae5a2..2123c5b162 100644
--- a/lib/ansible/plugins/action/fetch.py
+++ b/lib/ansible/plugins/action/fetch.py
@@ -61,7 +61,7 @@ class ActionModule(ActionBase):
# use slurp if sudo and permissions are lacking
remote_data = None
if remote_checksum in ('1', '2') or self._connection_info.become:
- slurpres = self._execute_module(module_name='slurp', module_args=dict(src=source), tmp=tmp)
+ slurpres = self._execute_module(module_name='slurp', module_args=dict(src=source), task_vars=task_vars, tmp=tmp)
if slurpres.get('rc') == 0:
if slurpres['encoding'] == 'base64':
remote_data = base64.b64decode(slurpres['content'])
diff --git a/lib/ansible/plugins/action/normal.py b/lib/ansible/plugins/action/normal.py
index 431d9b0eeb..445d8a7ae7 100644
--- a/lib/ansible/plugins/action/normal.py
+++ b/lib/ansible/plugins/action/normal.py
@@ -24,6 +24,6 @@ class ActionModule(ActionBase):
def run(self, tmp=None, task_vars=dict()):
#vv("REMOTE_MODULE %s %s" % (module_name, module_args), host=conn.host)
- return self._execute_module(tmp)
+ return self._execute_module(tmp, task_vars=task_vars)
diff --git a/lib/ansible/plugins/action/patch.py b/lib/ansible/plugins/action/patch.py
index bf2af1be1e..31dbd31fa4 100644
--- a/lib/ansible/plugins/action/patch.py
+++ b/lib/ansible/plugins/action/patch.py
@@ -36,7 +36,7 @@ class ActionModule(ActionBase):
elif remote_src:
# everything is remote, so we just execute the module
# without changing any of the module arguments
- return self._execute_module()
+ return self._execute_module(task_vars=task_vars)
if self._task._role is not None:
src = self._loader.path_dwim_relative(self._task._role._role_path, 'files', src)
@@ -63,4 +63,4 @@ class ActionModule(ActionBase):
)
)
- return self._execute_module('patch', module_args=new_module_args)
+ return self._execute_module('patch', module_args=new_module_args, task_vars=task_vars)
diff --git a/lib/ansible/plugins/action/script.py b/lib/ansible/plugins/action/script.py
index 3ca7dc6a34..7c24845515 100644
--- a/lib/ansible/plugins/action/script.py
+++ b/lib/ansible/plugins/action/script.py
@@ -42,7 +42,7 @@ class ActionModule(ActionBase):
# do not run the command if the line contains creates=filename
# and the filename already exists. This allows idempotence
# of command executions.
- result = self._execute_module(module_name='stat', module_args=dict(path=creates), tmp=tmp, persist_files=True)
+ result = self._execute_module(module_name='stat', module_args=dict(path=creates), task_vars=task_vars, tmp=tmp, persist_files=True)
stat = result.get('stat', None)
if stat and stat.get('exists', False):
return dict(skipped=True, msg=("skipped, since %s exists" % creates))
@@ -52,7 +52,7 @@ class ActionModule(ActionBase):
# do not run the command if the line contains removes=filename
# and the filename does not exist. This allows idempotence
# of command executions.
- result = self._execute_module(module_name='stat', module_args=dict(path=removes), tmp=tmp, persist_files=True)
+ result = self._execute_module(module_name='stat', module_args=dict(path=removes), task_vars=task_vars, tmp=tmp, persist_files=True)
stat = result.get('stat', None)
if stat and not stat.get('exists', False):
return dict(skipped=True, msg=("skipped, since %s does not exist" % removes))
diff --git a/lib/ansible/plugins/action/synchronize.py b/lib/ansible/plugins/action/synchronize.py
index 219a982cb1..aa0a810a2a 100644
--- a/lib/ansible/plugins/action/synchronize.py
+++ b/lib/ansible/plugins/action/synchronize.py
@@ -170,7 +170,7 @@ class ActionModule(ActionBase):
self._task.args['ssh_args'] = constants.ANSIBLE_SSH_ARGS
# run the module and store the result
- result = self._execute_module('synchronize')
+ result = self._execute_module('synchronize', task_vars=task_vars)
return result
diff --git a/lib/ansible/plugins/action/template.py b/lib/ansible/plugins/action/template.py
index 7300848e6b..ea033807df 100644
--- a/lib/ansible/plugins/action/template.py
+++ b/lib/ansible/plugins/action/template.py
@@ -152,7 +152,7 @@ class ActionModule(ActionBase):
# res.diff = dict(before=dest_contents, after=resultant)
# return res
- result = self._execute_module(module_name='copy', module_args=new_module_args)
+ result = self._execute_module(module_name='copy', module_args=new_module_args, task_vars=task_vars)
if result.get('changed', False):
result['diff'] = dict(before=dest_contents, after=resultant)
return result
@@ -180,5 +180,5 @@ class ActionModule(ActionBase):
#if self.runner.noop_on_check(task_vars):
# new_module_args['CHECKMODE'] = True
- return self._execute_module(module_name='file', module_args=new_module_args)
+ return self._execute_module(module_name='file', module_args=new_module_args, task_vars=task_vars)
diff --git a/lib/ansible/plugins/action/unarchive.py b/lib/ansible/plugins/action/unarchive.py
index b7601ed910..ef5320b719 100644
--- a/lib/ansible/plugins/action/unarchive.py
+++ b/lib/ansible/plugins/action/unarchive.py
@@ -47,7 +47,7 @@ class ActionModule(ActionBase):
# and the filename already exists. This allows idempotence
# of command executions.
module_args_tmp = "path=%s" % creates
- result = self._execute_module(module_name='stat', module_args=dict(path=creates))
+ result = self._execute_module(module_name='stat', module_args=dict(path=creates), task_vars=task_vars)
stat = result.get('stat', None)
if stat and stat.get('exists', False):
return dict(skipped=True, msg=("skipped, since %s exists" % creates))
@@ -110,5 +110,5 @@ class ActionModule(ActionBase):
# module_args += " CHECKMODE=True"
# execute the unarchive module now, with the updated args
- return self._execute_module(module_args=new_module_args)
+ return self._execute_module(module_args=new_module_args, task_vars=task_vars)
From 73c956366e856502598021756b3f231723af30b0 Mon Sep 17 00:00:00 2001
From: James Cammarata
Date: Fri, 5 Jun 2015 07:15:35 -0400
Subject: [PATCH 205/971] Correctly determine failed task state when checking
results
Fixes #11172
---
lib/ansible/plugins/strategies/__init__.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/lib/ansible/plugins/strategies/__init__.py b/lib/ansible/plugins/strategies/__init__.py
index bb839f20f4..57630f4f21 100644
--- a/lib/ansible/plugins/strategies/__init__.py
+++ b/lib/ansible/plugins/strategies/__init__.py
@@ -149,7 +149,7 @@ class StrategyBase:
task_result = result[1]
host = task_result._host
task = task_result._task
- if result[0] == 'host_task_failed' or 'failed' in task_result._result:
+ if result[0] == 'host_task_failed' or task_result.is_failed():
if not task.ignore_errors:
debug("marking %s as failed" % host.name)
iterator.mark_host_failed(host)
From 9ac624d2c90be1c18d2aa27b78c373e66aa16661 Mon Sep 17 00:00:00 2001
From: James Cammarata