2012-03-03 03:08:48 +01:00
|
|
|
# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
|
|
|
|
#
|
|
|
|
# This file is part of Ansible
|
|
|
|
#
|
|
|
|
# Ansible is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# Ansible is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
|
|
|
import sys
|
2012-03-03 04:10:51 +01:00
|
|
|
import os
|
2012-03-15 02:16:15 +01:00
|
|
|
import shlex
|
2012-03-20 00:32:38 +01:00
|
|
|
import re
|
2012-05-09 01:03:51 +02:00
|
|
|
import codecs
|
2012-03-20 00:23:14 +01:00
|
|
|
import jinja2
|
2012-03-20 03:42:31 +01:00
|
|
|
import yaml
|
2012-04-06 16:59:15 +02:00
|
|
|
import optparse
|
2012-07-15 14:46:58 +02:00
|
|
|
import operator
|
|
|
|
from ansible import errors
|
2012-07-30 02:28:11 +02:00
|
|
|
from ansible import __version__
|
2012-07-15 14:46:58 +02:00
|
|
|
import ansible.constants as C
|
2012-08-09 03:05:58 +02:00
|
|
|
import time
|
2012-08-11 17:59:14 +02:00
|
|
|
import StringIO
|
2012-08-18 16:52:24 +02:00
|
|
|
import imp
|
|
|
|
import glob
|
2012-09-17 14:02:30 +02:00
|
|
|
import subprocess
|
2012-09-24 20:47:59 +02:00
|
|
|
import stat
|
2012-09-22 08:07:49 +02:00
|
|
|
import termios
|
|
|
|
import tty
|
2012-09-27 05:50:54 +02:00
|
|
|
from multiprocessing import Manager
|
2012-07-09 09:52:00 +02:00
|
|
|
|
2012-08-09 03:09:14 +02:00
|
|
|
VERBOSITY=0
|
|
|
|
|
2012-03-03 03:08:48 +01:00
|
|
|
try:
|
|
|
|
import json
|
|
|
|
except ImportError:
|
|
|
|
import simplejson as json
|
|
|
|
|
2012-07-09 09:52:00 +02:00
|
|
|
try:
|
2012-07-09 19:27:47 +02:00
|
|
|
from hashlib import md5 as _md5
|
2012-08-07 02:07:02 +02:00
|
|
|
except ImportError:
|
2012-07-09 19:27:47 +02:00
|
|
|
from md5 import md5 as _md5
|
2012-07-09 09:52:00 +02:00
|
|
|
|
2012-08-09 16:56:40 +02:00
|
|
|
PASSLIB_AVAILABLE = False
|
|
|
|
try:
|
|
|
|
import passlib.hash
|
|
|
|
PASSLIB_AVAILABLE = True
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
2012-09-27 05:50:54 +02:00
|
|
|
KEYCZAR_AVAILABLE=False
|
|
|
|
try:
|
|
|
|
from keyczar.keys import AesKey
|
|
|
|
KEYCZAR_AVAILABLE=True
|
|
|
|
except ImportError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
###############################################################
|
|
|
|
# abtractions around keyczar
|
|
|
|
|
|
|
|
def key_for_hostname(hostname):
|
|
|
|
# fireball mode is an implementation of ansible firing up zeromq via SSH
|
|
|
|
# to use no persistent daemons or key management
|
|
|
|
|
|
|
|
key_path = os.path.expanduser("~/.fireball.keys")
|
|
|
|
if not os.path.exists(key_path):
|
|
|
|
os.makedirs(key_path)
|
|
|
|
key_path = os.path.expanduser("~/.fireball.keys/%s" % hostname)
|
|
|
|
|
|
|
|
# use new AES keys every 2 hours, which means fireball must not allow running for longer either
|
|
|
|
if not os.path.exists(key_path) or (time.time() - os.path.getmtime(key_path) > 60*60*2):
|
|
|
|
key = AesKey.Generate()
|
|
|
|
fh = open(key_path, "w")
|
|
|
|
fh.write(str(key))
|
|
|
|
fh.close()
|
|
|
|
return key
|
|
|
|
else:
|
|
|
|
fh = open(key_path)
|
|
|
|
key = AesKey.Read(fh.read())
|
|
|
|
fh.close()
|
|
|
|
return key
|
|
|
|
|
|
|
|
def encrypt(key, msg):
|
|
|
|
return key.Encrypt(msg)
|
|
|
|
|
|
|
|
def decrypt(key, msg):
|
|
|
|
try:
|
|
|
|
return key.Decrypt(msg)
|
|
|
|
except keyczar.errors.InvalidSignatureError:
|
|
|
|
raise errors.AnsibleError("decryption failed")
|
|
|
|
|
2012-03-03 03:16:29 +01:00
|
|
|
###############################################################
|
|
|
|
# UTILITY FUNCTIONS FOR COMMAND LINE TOOLS
|
|
|
|
###############################################################
|
|
|
|
|
2012-03-03 03:08:48 +01:00
|
|
|
def err(msg):
|
2012-03-03 03:16:29 +01:00
|
|
|
''' print an error message to stderr '''
|
2012-07-15 18:29:53 +02:00
|
|
|
|
2012-03-03 03:08:48 +01:00
|
|
|
print >> sys.stderr, msg
|
|
|
|
|
|
|
|
def exit(msg, rc=1):
|
2012-03-03 03:16:29 +01:00
|
|
|
''' quit with an error to stdout and a failure code '''
|
2012-07-15 18:29:53 +02:00
|
|
|
|
2012-03-03 03:08:48 +01:00
|
|
|
err(msg)
|
|
|
|
sys.exit(rc)
|
|
|
|
|
2012-07-15 16:12:49 +02:00
|
|
|
def jsonify(result, format=False):
|
|
|
|
''' format JSON output (uncompressed or uncompressed) '''
|
2012-07-15 18:29:53 +02:00
|
|
|
|
2012-03-21 03:29:21 +01:00
|
|
|
result2 = result.copy()
|
2012-07-15 16:12:49 +02:00
|
|
|
if format:
|
|
|
|
return json.dumps(result2, sort_keys=True, indent=4)
|
2012-03-03 03:08:48 +01:00
|
|
|
else:
|
2012-07-15 16:12:49 +02:00
|
|
|
return json.dumps(result2, sort_keys=True)
|
2012-03-03 03:08:48 +01:00
|
|
|
|
2012-03-03 04:10:51 +01:00
|
|
|
def write_tree_file(tree, hostname, buf):
|
2012-03-03 03:16:29 +01:00
|
|
|
''' write something into treedir/hostname '''
|
2012-07-15 18:29:53 +02:00
|
|
|
|
2012-03-03 03:16:29 +01:00
|
|
|
# TODO: might be nice to append playbook runs per host in a similar way
|
|
|
|
# in which case, we'd want append mode.
|
2012-03-03 04:10:51 +01:00
|
|
|
path = os.path.join(tree, hostname)
|
2012-03-03 03:08:48 +01:00
|
|
|
fd = open(path, "w+")
|
|
|
|
fd.write(buf)
|
|
|
|
fd.close()
|
|
|
|
|
|
|
|
def is_failed(result):
|
2012-03-03 03:16:29 +01:00
|
|
|
''' is a given JSON result a failed result? '''
|
2012-07-15 18:29:53 +02:00
|
|
|
|
2012-07-15 17:09:15 +02:00
|
|
|
return ((result.get('rc', 0) != 0) or (result.get('failed', False) in [ True, 'True', 'true']))
|
2012-03-03 03:08:48 +01:00
|
|
|
|
2012-09-24 21:06:34 +02:00
|
|
|
def check_conditional(conditional):
|
|
|
|
def is_set(var):
|
|
|
|
return not var.startswith("$")
|
|
|
|
def is_unset(var):
|
|
|
|
return var.startswith("$")
|
|
|
|
return eval(conditional)
|
|
|
|
|
2012-09-24 20:47:59 +02:00
|
|
|
def is_executable(path):
|
|
|
|
'''is the given path executable?'''
|
|
|
|
return (stat.S_IXUSR & os.stat(path)[stat.ST_MODE]
|
|
|
|
or stat.S_IXGRP & os.stat(path)[stat.ST_MODE]
|
|
|
|
or stat.S_IXOTH & os.stat(path)[stat.ST_MODE])
|
|
|
|
|
2012-03-03 03:08:48 +01:00
|
|
|
def prepare_writeable_dir(tree):
|
2012-03-03 03:16:29 +01:00
|
|
|
''' make sure a directory exists and is writeable '''
|
2012-07-15 18:29:53 +02:00
|
|
|
|
2012-03-03 03:08:48 +01:00
|
|
|
if tree != '/':
|
2012-03-03 04:10:51 +01:00
|
|
|
tree = os.path.realpath(os.path.expanduser(tree))
|
2012-03-03 03:08:48 +01:00
|
|
|
if not os.path.exists(tree):
|
|
|
|
try:
|
|
|
|
os.makedirs(tree)
|
|
|
|
except (IOError, OSError), e:
|
|
|
|
exit("Could not make dir %s: %s" % (tree, e))
|
|
|
|
if not os.access(tree, os.W_OK):
|
|
|
|
exit("Cannot write to path %s" % tree)
|
|
|
|
|
2012-03-03 16:53:15 +01:00
|
|
|
def path_dwim(basedir, given):
|
2012-03-11 23:40:35 +01:00
|
|
|
''' make relative paths work like folks expect '''
|
2012-07-15 18:29:53 +02:00
|
|
|
|
2012-03-03 16:53:15 +01:00
|
|
|
if given.startswith("/"):
|
|
|
|
return given
|
|
|
|
elif given.startswith("~/"):
|
|
|
|
return os.path.expanduser(given)
|
|
|
|
else:
|
|
|
|
return os.path.join(basedir, given)
|
2012-03-03 03:08:48 +01:00
|
|
|
|
2012-03-18 22:53:58 +01:00
|
|
|
def json_loads(data):
|
2012-07-07 14:18:33 +02:00
|
|
|
''' parse a JSON string and return a data structure '''
|
2012-07-15 18:29:53 +02:00
|
|
|
|
2012-03-18 22:53:58 +01:00
|
|
|
return json.loads(data)
|
|
|
|
|
2012-08-11 15:55:14 +02:00
|
|
|
def parse_json(raw_data):
|
2012-03-18 22:53:58 +01:00
|
|
|
''' this version for module return data only '''
|
2012-07-15 18:29:53 +02:00
|
|
|
|
2012-08-11 17:59:14 +02:00
|
|
|
# ignore stuff like tcgetattr spewage or other warnings
|
2012-08-11 16:24:16 +02:00
|
|
|
data = filter_leading_non_json_lines(raw_data)
|
2012-08-11 15:55:14 +02:00
|
|
|
|
2012-03-15 02:16:15 +01:00
|
|
|
try:
|
|
|
|
return json.loads(data)
|
|
|
|
except:
|
|
|
|
# not JSON, but try "Baby JSON" which allows many of our modules to not
|
|
|
|
# require JSON and makes writing modules in bash much simpler
|
|
|
|
results = {}
|
2012-07-15 15:32:47 +02:00
|
|
|
try:
|
2012-06-14 20:17:38 +02:00
|
|
|
tokens = shlex.split(data)
|
2012-08-07 02:07:02 +02:00
|
|
|
except:
|
2012-06-14 20:17:38 +02:00
|
|
|
print "failed to parse json: "+ data
|
2012-08-07 02:07:02 +02:00
|
|
|
raise
|
|
|
|
|
2012-03-15 02:16:15 +01:00
|
|
|
for t in tokens:
|
|
|
|
if t.find("=") == -1:
|
2012-08-17 03:47:17 +02:00
|
|
|
raise errors.AnsibleError("failed to parse: %s" % raw_data)
|
2012-03-15 02:16:15 +01:00
|
|
|
(key,value) = t.split("=", 1)
|
|
|
|
if key == 'changed' or 'failed':
|
2012-03-16 03:32:14 +01:00
|
|
|
if value.lower() in [ 'true', '1' ]:
|
2012-03-15 02:16:15 +01:00
|
|
|
value = True
|
|
|
|
elif value.lower() in [ 'false', '0' ]:
|
|
|
|
value = False
|
|
|
|
if key == 'rc':
|
2012-08-07 02:07:02 +02:00
|
|
|
value = int(value)
|
2012-03-15 02:16:15 +01:00
|
|
|
results[key] = value
|
2012-03-16 02:53:14 +01:00
|
|
|
if len(results.keys()) == 0:
|
2012-08-17 03:47:17 +02:00
|
|
|
return { "failed" : True, "parsed" : False, "msg" : raw_data }
|
2012-03-15 02:16:15 +01:00
|
|
|
return results
|
|
|
|
|
2012-05-10 19:25:28 +02:00
|
|
|
_LISTRE = re.compile(r"(\w+)\[(\d+)\]")
|
|
|
|
|
2012-08-27 15:41:56 +02:00
|
|
|
class VarNotFoundException(Exception):
|
|
|
|
pass
|
|
|
|
|
2012-09-25 15:43:15 +02:00
|
|
|
def _varLookup(name, vars, depth=0):
|
2012-05-09 16:05:06 +02:00
|
|
|
''' find the contents of a possibly complex variable in vars. '''
|
2012-07-15 18:29:53 +02:00
|
|
|
|
2012-05-09 16:05:06 +02:00
|
|
|
path = name.split('.')
|
|
|
|
space = vars
|
|
|
|
for part in path:
|
2012-09-25 15:43:15 +02:00
|
|
|
part = varReplace(part, vars, depth=depth + 1)
|
2012-05-09 16:05:06 +02:00
|
|
|
if part in space:
|
|
|
|
space = space[part]
|
2012-05-10 19:25:28 +02:00
|
|
|
elif "[" in part:
|
|
|
|
m = _LISTRE.search(part)
|
|
|
|
if not m:
|
2012-08-27 15:41:56 +02:00
|
|
|
raise VarNotFoundException()
|
2012-05-10 19:25:28 +02:00
|
|
|
try:
|
|
|
|
space = space[m.group(1)][int(m.group(2))]
|
|
|
|
except (KeyError, IndexError):
|
2012-08-27 15:41:56 +02:00
|
|
|
raise VarNotFoundException()
|
2012-05-09 16:05:06 +02:00
|
|
|
else:
|
2012-08-27 15:41:56 +02:00
|
|
|
raise VarNotFoundException()
|
2012-05-09 16:05:06 +02:00
|
|
|
return space
|
|
|
|
|
2012-09-25 15:43:15 +02:00
|
|
|
_KEYCRE = re.compile(r"\$(?P<complex>\{){0,1}((?(complex)[\w\.\[\]\$\{\}]+|\w+))(?(complex)\})")
|
2012-03-20 00:32:38 +01:00
|
|
|
|
2012-06-09 21:59:22 +02:00
|
|
|
def varLookup(varname, vars):
|
2012-09-17 14:02:30 +02:00
|
|
|
''' helper function used by with_items '''
|
2012-07-15 18:29:53 +02:00
|
|
|
|
2012-06-09 21:59:22 +02:00
|
|
|
m = _KEYCRE.search(varname)
|
|
|
|
if not m:
|
|
|
|
return None
|
2012-08-27 15:41:56 +02:00
|
|
|
try:
|
|
|
|
return _varLookup(m.group(2), vars)
|
|
|
|
except VarNotFoundException:
|
|
|
|
return None
|
2012-06-09 21:59:22 +02:00
|
|
|
|
2012-09-25 15:43:15 +02:00
|
|
|
def varReplace(raw, vars, do_repr=False, depth=0):
|
2012-07-15 18:29:53 +02:00
|
|
|
''' Perform variable replacement of $variables in string raw using vars dictionary '''
|
2012-03-20 00:32:38 +01:00
|
|
|
# this code originally from yum
|
|
|
|
|
2012-09-25 15:43:15 +02:00
|
|
|
if (depth > 20):
|
|
|
|
raise errors.AnsibleError("template recursion depth exceeded")
|
|
|
|
|
2012-07-15 18:29:53 +02:00
|
|
|
done = [] # Completed chunks to return
|
2012-03-20 00:32:38 +01:00
|
|
|
|
|
|
|
while raw:
|
|
|
|
m = _KEYCRE.search(raw)
|
|
|
|
if not m:
|
|
|
|
done.append(raw)
|
|
|
|
break
|
|
|
|
|
|
|
|
# Determine replacement value (if unknown variable then preserve
|
|
|
|
# original)
|
2012-05-09 16:05:06 +02:00
|
|
|
|
2012-08-27 15:41:56 +02:00
|
|
|
try:
|
2012-09-25 15:43:15 +02:00
|
|
|
replacement = unicode(_varLookup(m.group(2), vars, depth))
|
|
|
|
replacement = varReplace(replacement, vars, depth=depth + 1)
|
2012-08-27 15:41:56 +02:00
|
|
|
except VarNotFoundException:
|
|
|
|
replacement = m.group()
|
2012-03-20 00:32:38 +01:00
|
|
|
|
|
|
|
start, end = m.span()
|
2012-09-25 10:37:47 +02:00
|
|
|
if do_repr:
|
|
|
|
replacement = repr(replacement)
|
|
|
|
if (start > 0 and
|
|
|
|
((raw[start - 1] == "'" and raw[end] == "'") or
|
|
|
|
(raw[start - 1] == '"' and raw[end] == '"'))):
|
|
|
|
start -= 1
|
|
|
|
end += 1
|
2012-03-20 00:32:38 +01:00
|
|
|
done.append(raw[:start]) # Keep stuff leading up to token
|
|
|
|
done.append(replacement) # Append replacement value
|
|
|
|
raw = raw[end:] # Continue with remainder of string
|
|
|
|
|
|
|
|
return ''.join(done)
|
|
|
|
|
2012-09-19 03:34:55 +02:00
|
|
|
_FILEPIPECRE = re.compile(r"\$(?P<special>FILE|PIPE)\(([^\}]+)\)")
|
2012-09-17 14:02:30 +02:00
|
|
|
def varReplaceFilesAndPipes(basedir, raw):
|
|
|
|
done = [] # Completed chunks to return
|
|
|
|
|
|
|
|
while raw:
|
|
|
|
m = _FILEPIPECRE.search(raw)
|
|
|
|
if not m:
|
|
|
|
done.append(raw)
|
|
|
|
break
|
|
|
|
|
|
|
|
# Determine replacement value (if unknown variable then preserve
|
|
|
|
# original)
|
|
|
|
|
|
|
|
if m.group(1) == "FILE":
|
|
|
|
try:
|
|
|
|
f = open(path_dwim(basedir, m.group(2)), "r")
|
|
|
|
except IOError:
|
|
|
|
raise VarNotFoundException()
|
|
|
|
replacement = f.read()
|
|
|
|
f.close()
|
|
|
|
elif m.group(1) == "PIPE":
|
|
|
|
p = subprocess.Popen(m.group(2), shell=True, stdout=subprocess.PIPE)
|
|
|
|
(stdout, stderr) = p.communicate()
|
|
|
|
if p.returncode != 0:
|
|
|
|
raise VarNotFoundException()
|
|
|
|
replacement = stdout
|
|
|
|
|
|
|
|
start, end = m.span()
|
|
|
|
done.append(raw[:start]) # Keep stuff leading up to token
|
|
|
|
done.append(replacement) # Append replacement value
|
|
|
|
raw = raw[end:] # Continue with remainder of string
|
|
|
|
|
|
|
|
return ''.join(done)
|
|
|
|
|
|
|
|
|
2012-09-25 10:37:47 +02:00
|
|
|
def template(basedir, text, vars, do_repr=False):
|
2012-07-15 18:29:53 +02:00
|
|
|
''' run a text buffer through the templating engine until it no longer changes '''
|
|
|
|
|
2012-06-09 18:40:17 +02:00
|
|
|
prev_text = ''
|
2012-08-20 21:00:31 +02:00
|
|
|
try:
|
|
|
|
text = text.decode('utf-8')
|
|
|
|
except UnicodeEncodeError:
|
|
|
|
pass # already unicode
|
2012-09-25 15:43:15 +02:00
|
|
|
text = varReplace(unicode(text), vars, do_repr)
|
2012-09-17 14:02:30 +02:00
|
|
|
text = varReplaceFilesAndPipes(basedir, text)
|
2012-06-09 18:40:17 +02:00
|
|
|
return text
|
2012-03-22 04:39:09 +01:00
|
|
|
|
2012-07-15 20:19:11 +02:00
|
|
|
def template_from_file(basedir, path, vars):
|
2012-03-20 00:23:14 +01:00
|
|
|
''' run a file through the templating engine '''
|
2012-07-15 18:29:53 +02:00
|
|
|
|
2012-06-17 22:18:08 +02:00
|
|
|
environment = jinja2.Environment(loader=jinja2.FileSystemLoader(basedir), trim_blocks=False)
|
2012-08-31 07:30:18 +02:00
|
|
|
environment.filters['to_json'] = json.dumps
|
|
|
|
environment.filters['from_json'] = json.loads
|
2012-09-06 16:19:56 +02:00
|
|
|
environment.filters['to_yaml'] = yaml.dump
|
|
|
|
environment.filters['from_yaml'] = yaml.load
|
2012-06-17 22:18:08 +02:00
|
|
|
data = codecs.open(path_dwim(basedir, path), encoding="utf8").read()
|
2012-07-03 16:42:00 +02:00
|
|
|
t = environment.from_string(data)
|
2012-06-17 22:18:08 +02:00
|
|
|
vars = vars.copy()
|
2012-07-03 16:42:00 +02:00
|
|
|
res = t.render(vars)
|
2012-06-17 22:18:08 +02:00
|
|
|
if data.endswith('\n') and not res.endswith('\n'):
|
|
|
|
res = res + '\n'
|
2012-09-17 14:02:30 +02:00
|
|
|
return template(basedir, res, vars)
|
2012-03-20 03:42:31 +01:00
|
|
|
|
|
|
|
def parse_yaml(data):
|
2012-07-07 14:18:33 +02:00
|
|
|
''' convert a yaml string to a data structure '''
|
2012-07-15 18:29:53 +02:00
|
|
|
|
2012-03-20 03:42:31 +01:00
|
|
|
return yaml.load(data)
|
2012-08-07 02:07:02 +02:00
|
|
|
|
2012-03-20 03:42:31 +01:00
|
|
|
def parse_yaml_from_file(path):
|
2012-07-07 14:18:33 +02:00
|
|
|
''' convert a yaml file to a data structure '''
|
2012-07-15 18:29:53 +02:00
|
|
|
|
2012-03-21 03:29:21 +01:00
|
|
|
try:
|
|
|
|
data = file(path).read()
|
2012-08-08 00:24:22 +02:00
|
|
|
return parse_yaml(data)
|
2012-03-21 03:29:21 +01:00
|
|
|
except IOError:
|
|
|
|
raise errors.AnsibleError("file not found: %s" % path)
|
2012-08-08 00:24:22 +02:00
|
|
|
except yaml.YAMLError, exc:
|
|
|
|
if hasattr(exc, 'problem_mark'):
|
|
|
|
mark = exc.problem_mark
|
|
|
|
if mark.line -1 >= 0:
|
|
|
|
before_probline = data.split("\n")[mark.line-1]
|
|
|
|
else:
|
|
|
|
before_probline = ''
|
|
|
|
probline = data.split("\n")[mark.line]
|
|
|
|
arrow = " " * mark.column + "^"
|
|
|
|
msg = """Syntax Error while loading YAML script, %s
|
|
|
|
Note: The error may actually appear before this position: line %s, column %s
|
|
|
|
|
|
|
|
%s
|
|
|
|
%s
|
|
|
|
%s""" % (path, mark.line + 1, mark.column + 1, before_probline, probline, arrow)
|
|
|
|
else:
|
|
|
|
# No problem markers means we have to throw a generic
|
|
|
|
# "stuff messed up" type message. Sry bud.
|
|
|
|
msg = "Could not parse YAML. Check over %s again." % path
|
|
|
|
raise errors.AnsibleYAMLValidationFailed(msg)
|
2012-03-20 00:23:14 +01:00
|
|
|
|
2012-03-31 04:52:38 +02:00
|
|
|
def parse_kv(args):
|
2012-03-22 04:39:09 +01:00
|
|
|
''' convert a string of key/value items to a dict '''
|
2012-07-15 18:29:53 +02:00
|
|
|
|
2012-03-22 04:39:09 +01:00
|
|
|
options = {}
|
2012-04-27 02:42:20 +02:00
|
|
|
if args is not None:
|
2012-08-03 02:21:59 +02:00
|
|
|
# attempting to split a unicode here does bad things
|
|
|
|
vargs = shlex.split(str(args), posix=True)
|
2012-04-27 03:25:43 +02:00
|
|
|
for x in vargs:
|
|
|
|
if x.find("=") != -1:
|
2012-08-03 02:21:59 +02:00
|
|
|
k, v = x.split("=",1)
|
2012-04-27 03:25:43 +02:00
|
|
|
options[k]=v
|
2012-03-22 04:39:09 +01:00
|
|
|
return options
|
2012-03-31 03:56:10 +02:00
|
|
|
|
2012-07-09 09:52:00 +02:00
|
|
|
def md5(filename):
|
2012-07-09 19:27:47 +02:00
|
|
|
''' Return MD5 hex digest of local file, or None if file is not present. '''
|
2012-07-15 18:29:53 +02:00
|
|
|
|
2012-07-09 19:27:47 +02:00
|
|
|
if not os.path.exists(filename):
|
|
|
|
return None
|
|
|
|
digest = _md5()
|
|
|
|
blocksize = 64 * 1024
|
|
|
|
infile = open(filename, 'rb')
|
|
|
|
block = infile.read(blocksize)
|
|
|
|
while block:
|
|
|
|
digest.update(block)
|
|
|
|
block = infile.read(blocksize)
|
|
|
|
infile.close()
|
|
|
|
return digest.hexdigest()
|
|
|
|
|
2012-07-21 22:51:31 +02:00
|
|
|
def default(value, function):
|
|
|
|
''' syntactic sugar around lazy evaluation of defaults '''
|
|
|
|
if value is None:
|
|
|
|
return function()
|
|
|
|
return value
|
|
|
|
|
2012-08-08 11:18:51 +02:00
|
|
|
def _gitinfo():
|
2012-08-09 03:05:58 +02:00
|
|
|
''' returns a string containing git branch, commit id and commit date '''
|
2012-08-08 11:18:51 +02:00
|
|
|
result = None
|
|
|
|
repo_path = os.path.join(os.path.dirname(__file__), '..', '..', '.git')
|
2012-09-22 08:07:49 +02:00
|
|
|
|
2012-08-08 11:18:51 +02:00
|
|
|
if os.path.exists(repo_path):
|
2012-09-05 11:40:14 +02:00
|
|
|
# Check if the .git is a file. If it is a file, it means that we are in a submodule structure.
|
2012-09-04 15:12:39 +02:00
|
|
|
if os.path.isfile(repo_path):
|
2012-09-04 16:05:00 +02:00
|
|
|
try:
|
2012-09-06 14:18:29 +02:00
|
|
|
gitdir = yaml.load(open(repo_path)).get('gitdir')
|
2012-09-05 11:40:14 +02:00
|
|
|
# There is a posibility the .git file to have an absolute path.
|
2012-09-06 14:18:29 +02:00
|
|
|
if os.path.isabs(gitdir):
|
|
|
|
repo_path = gitdir
|
|
|
|
else:
|
|
|
|
repo_path = os.path.join(repo_path.split('.git')[0], gitdir)
|
2012-09-05 11:40:14 +02:00
|
|
|
except (IOError, AttributeError):
|
2012-09-05 03:07:29 +02:00
|
|
|
return ''
|
2012-08-17 17:35:17 +02:00
|
|
|
f = open(os.path.join(repo_path, "HEAD"))
|
|
|
|
branch = f.readline().split('/')[-1].rstrip("\n")
|
|
|
|
f.close()
|
2012-08-09 03:05:58 +02:00
|
|
|
branch_path = os.path.join(repo_path, "refs", "heads", branch)
|
2012-08-23 18:30:34 +02:00
|
|
|
if os.path.exists(branch_path):
|
|
|
|
f = open(branch_path)
|
|
|
|
commit = f.readline()[:10]
|
|
|
|
f.close()
|
|
|
|
date = time.localtime(os.stat(branch_path).st_mtime)
|
2012-09-22 08:07:49 +02:00
|
|
|
if time.daylight == 0:
|
2012-08-23 18:30:34 +02:00
|
|
|
offset = time.timezone
|
|
|
|
else:
|
|
|
|
offset = time.altzone
|
|
|
|
result = "({0} {1}) last updated {2} (GMT {3:+04d})".format(branch, commit,
|
|
|
|
time.strftime("%Y/%m/%d %H:%M:%S", date), offset / -36)
|
2012-08-22 20:54:25 +02:00
|
|
|
else:
|
2012-09-05 03:07:29 +02:00
|
|
|
result = ''
|
2012-08-08 11:18:51 +02:00
|
|
|
return result
|
|
|
|
|
|
|
|
def version(prog):
|
|
|
|
result = "{0} {1}".format(prog, __version__)
|
|
|
|
gitinfo = _gitinfo()
|
|
|
|
if gitinfo:
|
|
|
|
result = result + " {0}".format(gitinfo)
|
|
|
|
return result
|
|
|
|
|
2012-09-22 08:07:49 +02:00
|
|
|
def getch():
|
|
|
|
''' read in a single character '''
|
|
|
|
fd = sys.stdin.fileno()
|
|
|
|
old_settings = termios.tcgetattr(fd)
|
|
|
|
try:
|
|
|
|
tty.setraw(sys.stdin.fileno())
|
|
|
|
ch = sys.stdin.read(1)
|
|
|
|
finally:
|
|
|
|
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
|
|
|
|
return ch
|
|
|
|
|
2012-07-07 14:45:06 +02:00
|
|
|
####################################################################
|
2012-08-07 02:07:02 +02:00
|
|
|
# option handling code for /usr/bin/ansible and ansible-playbook
|
2012-07-07 14:45:06 +02:00
|
|
|
# below this line
|
|
|
|
|
2012-04-10 19:51:58 +02:00
|
|
|
class SortedOptParser(optparse.OptionParser):
|
|
|
|
'''Optparser which sorts the options by opt before outputting --help'''
|
2012-07-15 18:29:53 +02:00
|
|
|
|
2012-04-10 19:51:58 +02:00
|
|
|
def format_help(self, formatter=None):
|
2012-07-15 14:46:58 +02:00
|
|
|
self.option_list.sort(key=operator.methodcaller('get_opt_string'))
|
2012-04-10 19:51:58 +02:00
|
|
|
return optparse.OptionParser.format_help(self, formatter=None)
|
|
|
|
|
2012-08-09 03:09:14 +02:00
|
|
|
def increment_debug(option, opt, value, parser):
|
|
|
|
global VERBOSITY
|
|
|
|
VERBOSITY += 1
|
|
|
|
|
2012-09-22 08:07:49 +02:00
|
|
|
def base_parser(constants=C, usage="", output_opts=False, runas_opts=False,
|
2012-08-10 08:45:29 +02:00
|
|
|
async_opts=False, connect_opts=False, subset_opts=False):
|
2012-04-10 19:51:58 +02:00
|
|
|
''' create an options parser for any ansible script '''
|
|
|
|
|
2012-08-08 11:18:51 +02:00
|
|
|
parser = SortedOptParser(usage, version=version("%prog"))
|
2012-08-09 03:09:14 +02:00
|
|
|
parser.add_option('-v','--verbose', default=False, action="callback",
|
|
|
|
callback=increment_debug, help="verbose mode (-vvv for more)")
|
|
|
|
|
2012-04-10 19:51:58 +02:00
|
|
|
parser.add_option('-f','--forks', dest='forks', default=constants.DEFAULT_FORKS, type='int',
|
2012-04-13 03:30:49 +02:00
|
|
|
help="specify number of parallel processes to use (default=%s)" % constants.DEFAULT_FORKS)
|
2012-04-10 19:51:58 +02:00
|
|
|
parser.add_option('-i', '--inventory-file', dest='inventory',
|
2012-08-07 02:07:02 +02:00
|
|
|
help="specify inventory host file (default=%s)" % constants.DEFAULT_HOST_LIST,
|
2012-04-13 03:30:49 +02:00
|
|
|
default=constants.DEFAULT_HOST_LIST)
|
2012-04-14 01:06:11 +02:00
|
|
|
parser.add_option('-k', '--ask-pass', default=False, dest='ask_pass', action='store_true',
|
2012-04-10 19:51:58 +02:00
|
|
|
help='ask for SSH password')
|
2012-09-07 20:37:32 +02:00
|
|
|
parser.add_option('--private-key', default=C.DEFAULT_PRIVATE_KEY_FILE, dest='private_key_file',
|
2012-05-14 22:14:38 +02:00
|
|
|
help='use this file to authenticate the connection')
|
2012-04-14 01:06:11 +02:00
|
|
|
parser.add_option('-K', '--ask-sudo-pass', default=False, dest='ask_sudo_pass', action='store_true',
|
|
|
|
help='ask for sudo password')
|
2012-04-10 19:51:58 +02:00
|
|
|
parser.add_option('-M', '--module-path', dest='module_path',
|
2012-06-06 22:42:29 +02:00
|
|
|
help="specify path(s) to module library (default=%s)" % constants.DEFAULT_MODULE_PATH,
|
2012-04-13 03:30:49 +02:00
|
|
|
default=constants.DEFAULT_MODULE_PATH)
|
2012-08-10 08:45:29 +02:00
|
|
|
|
|
|
|
if subset_opts:
|
|
|
|
parser.add_option('-l', '--limit', default=constants.DEFAULT_SUBSET, dest='subset',
|
|
|
|
help='further limit selected hosts to an additional pattern')
|
|
|
|
|
2012-04-10 19:51:58 +02:00
|
|
|
parser.add_option('-T', '--timeout', default=constants.DEFAULT_TIMEOUT, type='int',
|
2012-08-07 02:07:02 +02:00
|
|
|
dest='timeout',
|
2012-04-13 03:30:49 +02:00
|
|
|
help="override the SSH timeout in seconds (default=%s)" % constants.DEFAULT_TIMEOUT)
|
2012-04-14 01:33:19 +02:00
|
|
|
|
2012-04-05 23:06:23 +02:00
|
|
|
if output_opts:
|
2012-04-10 19:51:58 +02:00
|
|
|
parser.add_option('-o', '--one-line', dest='one_line', action='store_true',
|
|
|
|
help='condense output')
|
|
|
|
parser.add_option('-t', '--tree', dest='tree', default=None,
|
|
|
|
help='log output to this directory')
|
2012-04-05 23:06:23 +02:00
|
|
|
|
|
|
|
if runas_opts:
|
2012-04-10 19:51:58 +02:00
|
|
|
parser.add_option("-s", "--sudo", default=False, action="store_true",
|
|
|
|
dest='sudo', help="run operations with sudo (nopasswd)")
|
2012-05-07 17:37:50 +02:00
|
|
|
parser.add_option('-U', '--sudo-user', dest='sudo_user', help='desired sudo user (default=root)',
|
|
|
|
default=None) # Can't default to root because we need to detect when this option was given
|
2012-04-10 19:51:58 +02:00
|
|
|
parser.add_option('-u', '--user', default=constants.DEFAULT_REMOTE_USER,
|
2012-08-07 02:07:02 +02:00
|
|
|
dest='remote_user',
|
2012-04-13 03:30:49 +02:00
|
|
|
help='connect as this user (default=%s)' % constants.DEFAULT_REMOTE_USER)
|
2012-08-07 02:07:02 +02:00
|
|
|
|
2012-04-11 01:13:18 +02:00
|
|
|
if connect_opts:
|
2012-04-12 20:18:35 +02:00
|
|
|
parser.add_option('-c', '--connection', dest='connection',
|
|
|
|
default=C.DEFAULT_TRANSPORT,
|
2012-04-13 03:20:37 +02:00
|
|
|
help="connection type to use (default=%s)" % C.DEFAULT_TRANSPORT)
|
2012-04-11 01:13:18 +02:00
|
|
|
|
2012-04-05 23:06:23 +02:00
|
|
|
if async_opts:
|
2012-04-10 19:51:58 +02:00
|
|
|
parser.add_option('-P', '--poll', default=constants.DEFAULT_POLL_INTERVAL, type='int',
|
2012-08-07 02:07:02 +02:00
|
|
|
dest='poll_interval',
|
2012-04-13 03:30:49 +02:00
|
|
|
help="set the poll interval if using -B (default=%s)" % constants.DEFAULT_POLL_INTERVAL)
|
2012-04-10 19:51:58 +02:00
|
|
|
parser.add_option('-B', '--background', dest='seconds', type='int', default=0,
|
2012-04-13 03:30:49 +02:00
|
|
|
help='run asynchronously, failing after X seconds (default=N/A)')
|
2012-04-06 16:59:15 +02:00
|
|
|
|
2012-04-10 19:51:58 +02:00
|
|
|
return parser
|
2012-04-06 16:59:15 +02:00
|
|
|
|
2012-08-09 16:56:40 +02:00
|
|
|
def do_encrypt(result, encrypt, salt_size=None, salt=None):
|
|
|
|
if PASSLIB_AVAILABLE:
|
|
|
|
try:
|
|
|
|
crypt = getattr(passlib.hash, encrypt)
|
|
|
|
except:
|
|
|
|
raise errors.AnsibleError("passlib does not support '%s' algorithm" % encrypt)
|
2012-07-25 01:30:02 +02:00
|
|
|
|
2012-08-09 16:56:40 +02:00
|
|
|
if salt_size:
|
|
|
|
result = crypt.encrypt(result, salt_size=salt_size)
|
|
|
|
elif salt:
|
|
|
|
result = crypt.encrypt(result, salt=salt)
|
|
|
|
else:
|
|
|
|
result = crypt.encrypt(result)
|
|
|
|
else:
|
|
|
|
raise errors.AnsibleError("passlib must be installed to encrypt vars_prompt values")
|
|
|
|
|
|
|
|
return result
|
2012-07-25 01:30:02 +02:00
|
|
|
|
2012-08-11 17:59:14 +02:00
|
|
|
def last_non_blank_line(buf):
|
|
|
|
|
|
|
|
all_lines = buf.splitlines()
|
2012-08-11 16:14:19 +02:00
|
|
|
all_lines.reverse()
|
|
|
|
for line in all_lines:
|
|
|
|
if (len(line) > 0):
|
|
|
|
return line
|
2012-08-11 17:59:14 +02:00
|
|
|
# shouldn't occur unless there's no output
|
2012-09-22 08:07:49 +02:00
|
|
|
return ""
|
2012-08-11 17:59:14 +02:00
|
|
|
|
|
|
|
def filter_leading_non_json_lines(buf):
|
2012-09-22 08:07:49 +02:00
|
|
|
'''
|
2012-08-11 17:59:14 +02:00
|
|
|
used to avoid random output from SSH at the top of JSON output, like messages from
|
|
|
|
tcagetattr, or where dropbear spews MOTD on every single command (which is nuts).
|
2012-09-22 08:07:49 +02:00
|
|
|
|
2012-08-11 17:59:14 +02:00
|
|
|
need to filter anything which starts not with '{', '[', ', '=' or is an empty line.
|
2012-09-22 08:07:49 +02:00
|
|
|
filter only leading lines since multiline JSON is valid.
|
2012-08-11 17:59:14 +02:00
|
|
|
'''
|
|
|
|
|
|
|
|
filtered_lines = StringIO.StringIO()
|
|
|
|
stop_filtering = False
|
|
|
|
for line in buf.splitlines():
|
|
|
|
if stop_filtering or "=" in line or line.startswith('{') or line.startswith('['):
|
|
|
|
stop_filtering = True
|
|
|
|
filtered_lines.write(line + '\n')
|
|
|
|
return filtered_lines.getvalue()
|
2012-08-11 15:55:14 +02:00
|
|
|
|
2012-08-18 16:52:24 +02:00
|
|
|
def import_plugins(directory):
|
|
|
|
modules = {}
|
2012-09-22 08:07:49 +02:00
|
|
|
for path in glob.glob(os.path.join(directory, '*.py')):
|
2012-09-07 00:57:04 +02:00
|
|
|
if path.startswith("_"):
|
|
|
|
continue
|
2012-08-18 16:52:24 +02:00
|
|
|
name, ext = os.path.splitext(os.path.basename(path))
|
2012-09-07 00:46:15 +02:00
|
|
|
if not name.startswith("_"):
|
|
|
|
modules[name] = imp.load_source(name, path)
|
2012-08-18 16:52:24 +02:00
|
|
|
return modules
|