mirror of
https://github.com/ansible-collections/community.general.git
synced 2024-09-14 20:13:21 +02:00
Resync the v1 directory with v1_last. Fixes #11162
This commit is contained in:
parent
0826106441
commit
ccb8bcebd3
10 changed files with 200 additions and 148 deletions
|
@ -136,6 +136,9 @@ DEFAULT_ASK_SU_PASS = get_config(p, DEFAULTS, 'ask_su_pass', 'ANSIBLE_ASK_
|
||||||
DEFAULT_GATHERING = get_config(p, DEFAULTS, 'gathering', 'ANSIBLE_GATHERING', 'implicit').lower()
|
DEFAULT_GATHERING = get_config(p, DEFAULTS, 'gathering', 'ANSIBLE_GATHERING', 'implicit').lower()
|
||||||
DEFAULT_LOG_PATH = shell_expand_path(get_config(p, DEFAULTS, 'log_path', 'ANSIBLE_LOG_PATH', ''))
|
DEFAULT_LOG_PATH = shell_expand_path(get_config(p, DEFAULTS, 'log_path', 'ANSIBLE_LOG_PATH', ''))
|
||||||
|
|
||||||
|
# selinux
|
||||||
|
DEFAULT_SELINUX_SPECIAL_FS = get_config(p, 'selinux', 'special_context_filesystems', None, 'fuse, nfs, vboxsf', islist=True)
|
||||||
|
|
||||||
#TODO: get rid of ternary chain mess
|
#TODO: get rid of ternary chain mess
|
||||||
BECOME_METHODS = ['sudo','su','pbrun','pfexec','runas']
|
BECOME_METHODS = ['sudo','su','pbrun','pfexec','runas']
|
||||||
BECOME_ERROR_STRINGS = {'sudo': 'Sorry, try again.', 'su': 'Authentication failure', 'pbrun': '', 'pfexec': '', 'runas': ''}
|
BECOME_ERROR_STRINGS = {'sudo': 'Sorry, try again.', 'su': 'Authentication failure', 'pbrun': '', 'pfexec': '', 'runas': ''}
|
||||||
|
@ -176,6 +179,9 @@ DEFAULT_LOAD_CALLBACK_PLUGINS = get_config(p, DEFAULTS, 'bin_ansible_callbacks'
|
||||||
DEFAULT_FORCE_HANDLERS = get_config(p, DEFAULTS, 'force_handlers', 'ANSIBLE_FORCE_HANDLERS', False, boolean=True)
|
DEFAULT_FORCE_HANDLERS = get_config(p, DEFAULTS, 'force_handlers', 'ANSIBLE_FORCE_HANDLERS', False, boolean=True)
|
||||||
|
|
||||||
|
|
||||||
|
RETRY_FILES_ENABLED = get_config(p, DEFAULTS, 'retry_files_enabled', 'ANSIBLE_RETRY_FILES_ENABLED', True, boolean=True)
|
||||||
|
RETRY_FILES_SAVE_PATH = get_config(p, DEFAULTS, 'retry_files_save_path', 'ANSIBLE_RETRY_FILES_SAVE_PATH', '~/')
|
||||||
|
|
||||||
# CONNECTION RELATED
|
# CONNECTION RELATED
|
||||||
ANSIBLE_SSH_ARGS = get_config(p, 'ssh_connection', 'ssh_args', 'ANSIBLE_SSH_ARGS', None)
|
ANSIBLE_SSH_ARGS = get_config(p, 'ssh_connection', 'ssh_args', 'ANSIBLE_SSH_ARGS', None)
|
||||||
ANSIBLE_SSH_CONTROL_PATH = get_config(p, 'ssh_connection', 'control_path', 'ANSIBLE_SSH_CONTROL_PATH', "%(directory)s/ansible-ssh-%%h-%%p-%%r")
|
ANSIBLE_SSH_CONTROL_PATH = get_config(p, 'ssh_connection', 'control_path', 'ANSIBLE_SSH_CONTROL_PATH', "%(directory)s/ansible-ssh-%%h-%%p-%%r")
|
||||||
|
|
|
@ -38,6 +38,8 @@ BOOLEANS_TRUE = ['yes', 'on', '1', 'true', 1]
|
||||||
BOOLEANS_FALSE = ['no', 'off', '0', 'false', 0]
|
BOOLEANS_FALSE = ['no', 'off', '0', 'false', 0]
|
||||||
BOOLEANS = BOOLEANS_TRUE + BOOLEANS_FALSE
|
BOOLEANS = BOOLEANS_TRUE + BOOLEANS_FALSE
|
||||||
|
|
||||||
|
SELINUX_SPECIAL_FS="<<SELINUX_SPECIAL_FILESYSTEMS>>"
|
||||||
|
|
||||||
# ansible modules can be written in any language. To simplify
|
# ansible modules can be written in any language. To simplify
|
||||||
# development of Python modules, the functions available here
|
# development of Python modules, the functions available here
|
||||||
# can be inserted in any module source automatically by including
|
# can be inserted in any module source automatically by including
|
||||||
|
@ -181,7 +183,8 @@ def get_distribution():
|
||||||
''' return the distribution name '''
|
''' return the distribution name '''
|
||||||
if platform.system() == 'Linux':
|
if platform.system() == 'Linux':
|
||||||
try:
|
try:
|
||||||
distribution = platform.linux_distribution()[0].capitalize()
|
supported_dists = platform._supported_dists + ('arch',)
|
||||||
|
distribution = platform.linux_distribution(supported_dists=supported_dists)[0].capitalize()
|
||||||
if not distribution and os.path.isfile('/etc/system-release'):
|
if not distribution and os.path.isfile('/etc/system-release'):
|
||||||
distribution = platform.linux_distribution(supported_dists=['system'])[0].capitalize()
|
distribution = platform.linux_distribution(supported_dists=['system'])[0].capitalize()
|
||||||
if 'Amazon' in distribution:
|
if 'Amazon' in distribution:
|
||||||
|
@ -334,7 +337,8 @@ class AnsibleModule(object):
|
||||||
|
|
||||||
def __init__(self, argument_spec, bypass_checks=False, no_log=False,
|
def __init__(self, argument_spec, bypass_checks=False, no_log=False,
|
||||||
check_invalid_arguments=True, mutually_exclusive=None, required_together=None,
|
check_invalid_arguments=True, mutually_exclusive=None, required_together=None,
|
||||||
required_one_of=None, add_file_common_args=False, supports_check_mode=False):
|
required_one_of=None, add_file_common_args=False, supports_check_mode=False,
|
||||||
|
required_if=None):
|
||||||
|
|
||||||
'''
|
'''
|
||||||
common code for quickly building an ansible module in Python
|
common code for quickly building an ansible module in Python
|
||||||
|
@ -382,6 +386,7 @@ class AnsibleModule(object):
|
||||||
self._check_argument_types()
|
self._check_argument_types()
|
||||||
self._check_required_together(required_together)
|
self._check_required_together(required_together)
|
||||||
self._check_required_one_of(required_one_of)
|
self._check_required_one_of(required_one_of)
|
||||||
|
self._check_required_if(required_if)
|
||||||
|
|
||||||
self._set_defaults(pre=False)
|
self._set_defaults(pre=False)
|
||||||
if not self.no_log:
|
if not self.no_log:
|
||||||
|
@ -528,10 +533,10 @@ class AnsibleModule(object):
|
||||||
path = os.path.dirname(path)
|
path = os.path.dirname(path)
|
||||||
return path
|
return path
|
||||||
|
|
||||||
def is_nfs_path(self, path):
|
def is_special_selinux_path(self, path):
|
||||||
"""
|
"""
|
||||||
Returns a tuple containing (True, selinux_context) if the given path
|
Returns a tuple containing (True, selinux_context) if the given path is on a
|
||||||
is on a NFS mount point, otherwise the return will be (False, None).
|
NFS or other 'special' fs mount point, otherwise the return will be (False, None).
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
f = open('/proc/mounts', 'r')
|
f = open('/proc/mounts', 'r')
|
||||||
|
@ -542,9 +547,13 @@ class AnsibleModule(object):
|
||||||
path_mount_point = self.find_mount_point(path)
|
path_mount_point = self.find_mount_point(path)
|
||||||
for line in mount_data:
|
for line in mount_data:
|
||||||
(device, mount_point, fstype, options, rest) = line.split(' ', 4)
|
(device, mount_point, fstype, options, rest) = line.split(' ', 4)
|
||||||
if path_mount_point == mount_point and 'nfs' in fstype:
|
|
||||||
nfs_context = self.selinux_context(path_mount_point)
|
if path_mount_point == mount_point:
|
||||||
return (True, nfs_context)
|
for fs in SELINUX_SPECIAL_FS.split(','):
|
||||||
|
if fs in fstype:
|
||||||
|
special_context = self.selinux_context(path_mount_point)
|
||||||
|
return (True, special_context)
|
||||||
|
|
||||||
return (False, None)
|
return (False, None)
|
||||||
|
|
||||||
def set_default_selinux_context(self, path, changed):
|
def set_default_selinux_context(self, path, changed):
|
||||||
|
@ -562,9 +571,9 @@ class AnsibleModule(object):
|
||||||
# Iterate over the current context instead of the
|
# Iterate over the current context instead of the
|
||||||
# argument context, which may have selevel.
|
# argument context, which may have selevel.
|
||||||
|
|
||||||
(is_nfs, nfs_context) = self.is_nfs_path(path)
|
(is_special_se, sp_context) = self.is_special_selinux_path(path)
|
||||||
if is_nfs:
|
if is_special_se:
|
||||||
new_context = nfs_context
|
new_context = sp_context
|
||||||
else:
|
else:
|
||||||
for i in range(len(cur_context)):
|
for i in range(len(cur_context)):
|
||||||
if len(context) > i:
|
if len(context) > i:
|
||||||
|
@ -861,6 +870,7 @@ class AnsibleModule(object):
|
||||||
locale.setlocale(locale.LC_ALL, 'C')
|
locale.setlocale(locale.LC_ALL, 'C')
|
||||||
os.environ['LANG'] = 'C'
|
os.environ['LANG'] = 'C'
|
||||||
os.environ['LC_CTYPE'] = 'C'
|
os.environ['LC_CTYPE'] = 'C'
|
||||||
|
os.environ['LC_MESSAGES'] = 'C'
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
self.fail_json(msg="An unknown error was encountered while attempting to validate the locale: %s" % e)
|
self.fail_json(msg="An unknown error was encountered while attempting to validate the locale: %s" % e)
|
||||||
|
|
||||||
|
@ -950,6 +960,20 @@ class AnsibleModule(object):
|
||||||
if len(missing) > 0:
|
if len(missing) > 0:
|
||||||
self.fail_json(msg="missing required arguments: %s" % ",".join(missing))
|
self.fail_json(msg="missing required arguments: %s" % ",".join(missing))
|
||||||
|
|
||||||
|
def _check_required_if(self, spec):
|
||||||
|
''' ensure that parameters which conditionally required are present '''
|
||||||
|
if spec is None:
|
||||||
|
return
|
||||||
|
for (key, val, requirements) in spec:
|
||||||
|
missing = []
|
||||||
|
if key in self.params and self.params[key] == val:
|
||||||
|
for check in requirements:
|
||||||
|
count = self._count_terms(check)
|
||||||
|
if count == 0:
|
||||||
|
missing.append(check)
|
||||||
|
if len(missing) > 0:
|
||||||
|
self.fail_json(msg="%s is %s but the following are missing: %s" % (key, val, ','.join(missing)))
|
||||||
|
|
||||||
def _check_argument_values(self):
|
def _check_argument_values(self):
|
||||||
''' ensure all arguments have the requested values, and there are no stray arguments '''
|
''' ensure all arguments have the requested values, and there are no stray arguments '''
|
||||||
for (k,v) in self.argument_spec.iteritems():
|
for (k,v) in self.argument_spec.iteritems():
|
||||||
|
@ -1009,6 +1033,7 @@ class AnsibleModule(object):
|
||||||
value = self.params[k]
|
value = self.params[k]
|
||||||
is_invalid = False
|
is_invalid = False
|
||||||
|
|
||||||
|
try:
|
||||||
if wanted == 'str':
|
if wanted == 'str':
|
||||||
if not isinstance(value, basestring):
|
if not isinstance(value, basestring):
|
||||||
self.params[k] = str(value)
|
self.params[k] = str(value)
|
||||||
|
@ -1060,6 +1085,8 @@ class AnsibleModule(object):
|
||||||
|
|
||||||
if is_invalid:
|
if is_invalid:
|
||||||
self.fail_json(msg="argument %s is of invalid type: %s, required: %s" % (k, type(value), wanted))
|
self.fail_json(msg="argument %s is of invalid type: %s, required: %s" % (k, type(value), wanted))
|
||||||
|
except ValueError, e:
|
||||||
|
self.fail_json(msg="value of argument %s is not of type %s and we were unable to automatically convert" % (k, wanted))
|
||||||
|
|
||||||
def _set_defaults(self, pre=True):
|
def _set_defaults(self, pre=True):
|
||||||
for (k,v) in self.argument_spec.iteritems():
|
for (k,v) in self.argument_spec.iteritems():
|
||||||
|
|
|
@ -64,14 +64,12 @@ class AnsibleCloudStack:
|
||||||
api_secret = self.module.params.get('secret_key')
|
api_secret = self.module.params.get('secret_key')
|
||||||
api_url = self.module.params.get('api_url')
|
api_url = self.module.params.get('api_url')
|
||||||
api_http_method = self.module.params.get('api_http_method')
|
api_http_method = self.module.params.get('api_http_method')
|
||||||
api_timeout = self.module.params.get('api_timeout')
|
|
||||||
|
|
||||||
if api_key and api_secret and api_url:
|
if api_key and api_secret and api_url:
|
||||||
self.cs = CloudStack(
|
self.cs = CloudStack(
|
||||||
endpoint=api_url,
|
endpoint=api_url,
|
||||||
key=api_key,
|
key=api_key,
|
||||||
secret=api_secret,
|
secret=api_secret,
|
||||||
timeout=api_timeout,
|
|
||||||
method=api_http_method
|
method=api_http_method
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -99,8 +99,9 @@ class Facts(object):
|
||||||
('/etc/os-release', 'SuSE'),
|
('/etc/os-release', 'SuSE'),
|
||||||
('/etc/gentoo-release', 'Gentoo'),
|
('/etc/gentoo-release', 'Gentoo'),
|
||||||
('/etc/os-release', 'Debian'),
|
('/etc/os-release', 'Debian'),
|
||||||
|
('/etc/lsb-release', 'Mandriva'),
|
||||||
('/etc/os-release', 'NA'),
|
('/etc/os-release', 'NA'),
|
||||||
('/etc/lsb-release', 'Mandriva'))
|
)
|
||||||
SELINUX_MODE_DICT = { 1: 'enforcing', 0: 'permissive', -1: 'disabled' }
|
SELINUX_MODE_DICT = { 1: 'enforcing', 0: 'permissive', -1: 'disabled' }
|
||||||
|
|
||||||
# A list of dicts. If there is a platform with more than one
|
# A list of dicts. If there is a platform with more than one
|
||||||
|
@ -416,7 +417,9 @@ class Facts(object):
|
||||||
self.facts['distribution_version'] = self.facts['distribution_version'] + '.' + release.group(1)
|
self.facts['distribution_version'] = self.facts['distribution_version'] + '.' + release.group(1)
|
||||||
elif name == 'Debian':
|
elif name == 'Debian':
|
||||||
data = get_file_content(path)
|
data = get_file_content(path)
|
||||||
if 'Debian' in data or 'Raspbian' in data:
|
if 'Ubuntu' in data:
|
||||||
|
break # Ubuntu gets correct info from python functions
|
||||||
|
elif 'Debian' in data or 'Raspbian' in data:
|
||||||
release = re.search("PRETTY_NAME=[^(]+ \(?([^)]+?)\)", data)
|
release = re.search("PRETTY_NAME=[^(]+ \(?([^)]+?)\)", data)
|
||||||
if release:
|
if release:
|
||||||
self.facts['distribution_release'] = release.groups()[0]
|
self.facts['distribution_release'] = release.groups()[0]
|
||||||
|
@ -2160,7 +2163,7 @@ class DarwinNetwork(GenericBsdIfconfigNetwork, Network):
|
||||||
current_if['media'] = 'Unknown' # Mac does not give us this
|
current_if['media'] = 'Unknown' # Mac does not give us this
|
||||||
current_if['media_select'] = words[1]
|
current_if['media_select'] = words[1]
|
||||||
if len(words) > 2:
|
if len(words) > 2:
|
||||||
current_if['media_type'] = words[2][1:]
|
current_if['media_type'] = words[2][1:-1]
|
||||||
if len(words) > 3:
|
if len(words) > 3:
|
||||||
current_if['media_options'] = self.get_options(words[3])
|
current_if['media_options'] = self.get_options(words[3])
|
||||||
|
|
||||||
|
@ -2545,6 +2548,43 @@ class LinuxVirtual(Virtual):
|
||||||
self.facts['virtualization_role'] = 'NA'
|
self.facts['virtualization_role'] = 'NA'
|
||||||
return
|
return
|
||||||
|
|
||||||
|
class FreeBSDVirtual(Virtual):
|
||||||
|
"""
|
||||||
|
This is a FreeBSD-specific subclass of Virtual. It defines
|
||||||
|
- virtualization_type
|
||||||
|
- virtualization_role
|
||||||
|
"""
|
||||||
|
platform = 'FreeBSD'
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
Virtual.__init__(self)
|
||||||
|
|
||||||
|
def populate(self):
|
||||||
|
self.get_virtual_facts()
|
||||||
|
return self.facts
|
||||||
|
|
||||||
|
def get_virtual_facts(self):
|
||||||
|
self.facts['virtualization_type'] = ''
|
||||||
|
self.facts['virtualization_role'] = ''
|
||||||
|
|
||||||
|
class OpenBSDVirtual(Virtual):
|
||||||
|
"""
|
||||||
|
This is a OpenBSD-specific subclass of Virtual. It defines
|
||||||
|
- virtualization_type
|
||||||
|
- virtualization_role
|
||||||
|
"""
|
||||||
|
platform = 'OpenBSD'
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
Virtual.__init__(self)
|
||||||
|
|
||||||
|
def populate(self):
|
||||||
|
self.get_virtual_facts()
|
||||||
|
return self.facts
|
||||||
|
|
||||||
|
def get_virtual_facts(self):
|
||||||
|
self.facts['virtualization_type'] = ''
|
||||||
|
self.facts['virtualization_role'] = ''
|
||||||
|
|
||||||
class HPUXVirtual(Virtual):
|
class HPUXVirtual(Virtual):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -65,7 +65,7 @@ Function Exit-Json($obj)
|
||||||
$obj = New-Object psobject
|
$obj = New-Object psobject
|
||||||
}
|
}
|
||||||
|
|
||||||
echo $obj | ConvertTo-Json -Depth 99
|
echo $obj | ConvertTo-Json -Compress -Depth 99
|
||||||
Exit
|
Exit
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -89,7 +89,7 @@ Function Fail-Json($obj, $message = $null)
|
||||||
|
|
||||||
Set-Attr $obj "msg" $message
|
Set-Attr $obj "msg" $message
|
||||||
Set-Attr $obj "failed" $true
|
Set-Attr $obj "failed" $true
|
||||||
echo $obj | ConvertTo-Json -Depth 99
|
echo $obj | ConvertTo-Json -Compress -Depth 99
|
||||||
Exit 1
|
Exit 1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -50,6 +50,15 @@ try:
|
||||||
except:
|
except:
|
||||||
HAS_SSL=False
|
HAS_SSL=False
|
||||||
|
|
||||||
|
HAS_MATCH_HOSTNAME = True
|
||||||
|
try:
|
||||||
|
from ssl import match_hostname, CertificateError
|
||||||
|
except ImportError:
|
||||||
|
try:
|
||||||
|
from backports.ssl_match_hostname import match_hostname, CertificateError
|
||||||
|
except ImportError:
|
||||||
|
HAS_MATCH_HOSTNAME = False
|
||||||
|
|
||||||
import httplib
|
import httplib
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
@ -293,11 +302,13 @@ class SSLValidationHandler(urllib2.BaseHandler):
|
||||||
connect_result = s.recv(4096)
|
connect_result = s.recv(4096)
|
||||||
self.validate_proxy_response(connect_result)
|
self.validate_proxy_response(connect_result)
|
||||||
ssl_s = ssl.wrap_socket(s, ca_certs=tmp_ca_cert_path, cert_reqs=ssl.CERT_REQUIRED)
|
ssl_s = ssl.wrap_socket(s, ca_certs=tmp_ca_cert_path, cert_reqs=ssl.CERT_REQUIRED)
|
||||||
|
match_hostname(ssl_s.getpeercert(), self.hostname)
|
||||||
else:
|
else:
|
||||||
self.module.fail_json(msg='Unsupported proxy scheme: %s. Currently ansible only supports HTTP proxies.' % proxy_parts.get('scheme'))
|
self.module.fail_json(msg='Unsupported proxy scheme: %s. Currently ansible only supports HTTP proxies.' % proxy_parts.get('scheme'))
|
||||||
else:
|
else:
|
||||||
s.connect((self.hostname, self.port))
|
s.connect((self.hostname, self.port))
|
||||||
ssl_s = ssl.wrap_socket(s, ca_certs=tmp_ca_cert_path, cert_reqs=ssl.CERT_REQUIRED)
|
ssl_s = ssl.wrap_socket(s, ca_certs=tmp_ca_cert_path, cert_reqs=ssl.CERT_REQUIRED)
|
||||||
|
match_hostname(ssl_s.getpeercert(), self.hostname)
|
||||||
# close the ssl connection
|
# close the ssl connection
|
||||||
#ssl_s.unwrap()
|
#ssl_s.unwrap()
|
||||||
s.close()
|
s.close()
|
||||||
|
@ -311,6 +322,9 @@ class SSLValidationHandler(urllib2.BaseHandler):
|
||||||
'Use validate_certs=no or make sure your managed systems have a valid CA certificate installed. ' + \
|
'Use validate_certs=no or make sure your managed systems have a valid CA certificate installed. ' + \
|
||||||
'Paths checked for this platform: %s' % ", ".join(paths_checked)
|
'Paths checked for this platform: %s' % ", ".join(paths_checked)
|
||||||
)
|
)
|
||||||
|
except CertificateError:
|
||||||
|
self.module.fail_json(msg="SSL Certificate does not belong to %s. Make sure the url has a certificate that belongs to it or use validate_certs=no (insecure)" % self.hostname)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# cleanup the temp file created, don't worry
|
# cleanup the temp file created, don't worry
|
||||||
# if it fails for some reason
|
# if it fails for some reason
|
||||||
|
@ -363,14 +377,15 @@ def fetch_url(module, url, data=None, headers=None, method=None,
|
||||||
# FIXME: change the following to use the generic_urlparse function
|
# FIXME: change the following to use the generic_urlparse function
|
||||||
# to remove the indexed references for 'parsed'
|
# to remove the indexed references for 'parsed'
|
||||||
parsed = urlparse.urlparse(url)
|
parsed = urlparse.urlparse(url)
|
||||||
if parsed[0] == 'https':
|
if parsed[0] == 'https' and validate_certs:
|
||||||
if not HAS_SSL and validate_certs:
|
if not HAS_SSL:
|
||||||
if distribution == 'Redhat':
|
if distribution == 'Redhat':
|
||||||
module.fail_json(msg='SSL validation is not available in your version of python. You can use validate_certs=no, however this is unsafe and not recommended. You can also install python-ssl from EPEL')
|
module.fail_json(msg='SSL validation is not available in your version of python. You can use validate_certs=no, however this is unsafe and not recommended. You can also install python-ssl from EPEL')
|
||||||
else:
|
else:
|
||||||
module.fail_json(msg='SSL validation is not available in your version of python. You can use validate_certs=no, however this is unsafe and not recommended')
|
module.fail_json(msg='SSL validation is not available in your version of python. You can use validate_certs=no, however this is unsafe and not recommended')
|
||||||
|
if not HAS_MATCH_HOSTNAME:
|
||||||
|
module.fail_json(msg='Available SSL validation does not check that the certificate matches the hostname. You can install backports.ssl_match_hostname or update your managed machine to python-2.7.9 or newer. You could also use validate_certs=no, however this is unsafe and not recommended')
|
||||||
|
|
||||||
elif validate_certs:
|
|
||||||
# do the cert validation
|
# do the cert validation
|
||||||
netloc = parsed[1]
|
netloc = parsed[1]
|
||||||
if '@' in netloc:
|
if '@' in netloc:
|
||||||
|
|
|
@ -16,22 +16,21 @@
|
||||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||||
#
|
#
|
||||||
|
|
||||||
import fcntl
|
|
||||||
import gettext
|
|
||||||
import hmac
|
|
||||||
import os
|
import os
|
||||||
import pipes
|
|
||||||
import pty
|
|
||||||
import pwd
|
|
||||||
import random
|
|
||||||
import re
|
import re
|
||||||
import select
|
|
||||||
import shlex
|
|
||||||
import subprocess
|
import subprocess
|
||||||
import time
|
import shlex
|
||||||
|
import pipes
|
||||||
|
import random
|
||||||
|
import select
|
||||||
|
import fcntl
|
||||||
|
import hmac
|
||||||
|
import pwd
|
||||||
|
import gettext
|
||||||
|
import pty
|
||||||
from hashlib import sha1
|
from hashlib import sha1
|
||||||
import ansible.constants as C
|
import ansible.constants as C
|
||||||
from ansible.callbacks import vvv, vv
|
from ansible.callbacks import vvv
|
||||||
from ansible import errors
|
from ansible import errors
|
||||||
from ansible import utils
|
from ansible import utils
|
||||||
|
|
||||||
|
@ -257,51 +256,7 @@ class Connection(object):
|
||||||
vvv("EXEC previous known host file not found for %s" % host)
|
vvv("EXEC previous known host file not found for %s" % host)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def exec_command(self, *args, **kwargs):
|
def exec_command(self, cmd, tmp_path, become_user=None, sudoable=False, executable='/bin/sh', in_data=None):
|
||||||
""" Wrapper around _exec_command to retry in the case of an ssh
|
|
||||||
failure
|
|
||||||
|
|
||||||
Will retry if:
|
|
||||||
* an exception is caught
|
|
||||||
* ssh returns 255
|
|
||||||
|
|
||||||
Will not retry if
|
|
||||||
* remaining_tries is <2
|
|
||||||
* retries limit reached
|
|
||||||
"""
|
|
||||||
remaining_tries = C.get_config(
|
|
||||||
C.p, 'ssh_connection', 'retries',
|
|
||||||
'ANSIBLE_SSH_RETRIES', 3, integer=True) + 1
|
|
||||||
cmd_summary = "%s %s..." % (args[0], str(kwargs)[:200])
|
|
||||||
for attempt in xrange(remaining_tries):
|
|
||||||
pause = 2 ** attempt - 1
|
|
||||||
if pause > 30:
|
|
||||||
pause = 30
|
|
||||||
time.sleep(pause)
|
|
||||||
try:
|
|
||||||
return_tuple = self._exec_command(*args, **kwargs)
|
|
||||||
except Exception as e:
|
|
||||||
msg = ("ssh_retry: attempt: %d, caught exception(%s) from cmd "
|
|
||||||
"(%s).") % (attempt, e, cmd_summary)
|
|
||||||
vv(msg)
|
|
||||||
if attempt == remaining_tries - 1:
|
|
||||||
raise e
|
|
||||||
else:
|
|
||||||
continue
|
|
||||||
# 0 = success
|
|
||||||
# 1-254 = remote command return code
|
|
||||||
# 255 = failure from the ssh command itself
|
|
||||||
if return_tuple[0] != 255:
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
msg = ('ssh_retry: attempt: %d, ssh return code is 255. cmd '
|
|
||||||
'(%s).') % (attempt, cmd_summary)
|
|
||||||
vv(msg)
|
|
||||||
|
|
||||||
return return_tuple
|
|
||||||
|
|
||||||
|
|
||||||
def _exec_command(self, cmd, tmp_path, become_user=None, sudoable=False, executable='/bin/sh', in_data=None):
|
|
||||||
''' run a command on the remote host '''
|
''' run a command on the remote host '''
|
||||||
|
|
||||||
if sudoable and self.runner.become and self.runner.become_method not in self.become_methods_supported:
|
if sudoable and self.runner.become and self.runner.become_method not in self.become_methods_supported:
|
||||||
|
|
|
@ -1024,9 +1024,9 @@ def base_parser(constants=C, usage="", output_opts=False, runas_opts=False,
|
||||||
|
|
||||||
if runas_opts:
|
if runas_opts:
|
||||||
# priv user defaults to root later on to enable detecting when this option was given here
|
# priv user defaults to root later on to enable detecting when this option was given here
|
||||||
parser.add_option('-K', '--ask-sudo-pass', default=False, dest='ask_sudo_pass', action='store_true',
|
parser.add_option('-K', '--ask-sudo-pass', default=constants.DEFAULT_ASK_SUDO_PASS, dest='ask_sudo_pass', action='store_true',
|
||||||
help='ask for sudo password (deprecated, use become)')
|
help='ask for sudo password (deprecated, use become)')
|
||||||
parser.add_option('--ask-su-pass', default=False, dest='ask_su_pass', action='store_true',
|
parser.add_option('--ask-su-pass', default=constants.DEFAULT_ASK_SU_PASS, dest='ask_su_pass', action='store_true',
|
||||||
help='ask for su password (deprecated, use become)')
|
help='ask for su password (deprecated, use become)')
|
||||||
parser.add_option("-s", "--sudo", default=constants.DEFAULT_SUDO, action="store_true", dest='sudo',
|
parser.add_option("-s", "--sudo", default=constants.DEFAULT_SUDO, action="store_true", dest='sudo',
|
||||||
help="run operations with sudo (nopasswd) (deprecated, use become)")
|
help="run operations with sudo (nopasswd) (deprecated, use become)")
|
||||||
|
@ -1617,7 +1617,9 @@ def _load_vars_from_folder(folder_path, results, vault_password=None):
|
||||||
names.sort()
|
names.sort()
|
||||||
|
|
||||||
# do not parse hidden files or dirs, e.g. .svn/
|
# do not parse hidden files or dirs, e.g. .svn/
|
||||||
paths = [os.path.join(folder_path, name) for name in names if not name.startswith('.')]
|
paths = [os.path.join(folder_path, name) for name in names
|
||||||
|
if not name.startswith('.')
|
||||||
|
and os.path.splitext(name)[1] in C.YAML_FILENAME_EXTENSIONS]
|
||||||
for path in paths:
|
for path in paths:
|
||||||
_found, results = _load_vars_from_path(path, results, vault_password=vault_password)
|
_found, results = _load_vars_from_path(path, results, vault_password=vault_password)
|
||||||
return results
|
return results
|
||||||
|
|
|
@ -23,6 +23,8 @@ import ast
|
||||||
import yaml
|
import yaml
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
|
from collections import MutableMapping, MutableSet, MutableSequence
|
||||||
|
|
||||||
from ansible import utils
|
from ansible import utils
|
||||||
|
|
||||||
# modules that are ok that they do not have documentation strings
|
# modules that are ok that they do not have documentation strings
|
||||||
|
@ -86,7 +88,14 @@ def get_docstring(filename, verbose=False):
|
||||||
if not doc.has_key(key):
|
if not doc.has_key(key):
|
||||||
doc[key] = value
|
doc[key] = value
|
||||||
else:
|
else:
|
||||||
|
if isinstance(doc[key], MutableMapping):
|
||||||
doc[key].update(value)
|
doc[key].update(value)
|
||||||
|
elif isinstance(doc[key], MutableSet):
|
||||||
|
doc[key].add(value)
|
||||||
|
elif isinstance(doc[key], MutableSequence):
|
||||||
|
doc[key] = sorted(frozenset(doc[key] + value))
|
||||||
|
else:
|
||||||
|
raise Exception("Attempt to extend a documentation fragement of unknown type")
|
||||||
|
|
||||||
if 'EXAMPLES' in (t.id for t in child.targets):
|
if 'EXAMPLES' in (t.id for t in child.targets):
|
||||||
plainexamples = child.value.s[1:] # Skip first empty line
|
plainexamples = child.value.s[1:] # Skip first empty line
|
||||||
|
|
Loading…
Reference in a new issue