mirror of
https://github.com/ansible-collections/community.general.git
synced 2024-09-14 20:13:21 +02:00
Merge pull request #12119 from mgedmin/py3k
Some steps towards Python 3 support
This commit is contained in:
commit
6907166667
48 changed files with 114 additions and 120 deletions
|
@ -76,7 +76,7 @@ def save_cache(data, config):
|
|||
cache = open('/'.join([dpath,'inventory']), 'w')
|
||||
cache.write(json.dumps(data))
|
||||
cache.close()
|
||||
except IOError, e:
|
||||
except IOError as e:
|
||||
pass # not really sure what to do here
|
||||
|
||||
|
||||
|
@ -88,7 +88,7 @@ def get_cache(cache_item, config):
|
|||
cache = open('/'.join([dpath,'inventory']), 'r')
|
||||
inv = cache.read()
|
||||
cache.close()
|
||||
except IOError, e:
|
||||
except IOError as e:
|
||||
pass # not really sure what to do here
|
||||
|
||||
return inv
|
||||
|
@ -172,7 +172,7 @@ def generate_inv_from_api(enterprise_entity,config):
|
|||
else:
|
||||
vm_metadata = metadata['metadata']['metadata']
|
||||
inventory['_meta']['hostvars'][vm_nic] = vm_metadata
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
pass
|
||||
|
||||
inventory[vm_vapp]['children'].append(vmcollection['name'])
|
||||
|
@ -183,7 +183,7 @@ def generate_inv_from_api(enterprise_entity,config):
|
|||
inventory[vmcollection['name']].append(vm_nic)
|
||||
|
||||
return inventory
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
# Return empty hosts output
|
||||
return { 'all': {'hosts': []}, '_meta': { 'hostvars': {} } }
|
||||
|
||||
|
@ -214,7 +214,7 @@ if __name__ == '__main__':
|
|||
try:
|
||||
login = api_get(None,config)
|
||||
enterprise = next(link for link in (login['links']) if (link['rel']=='enterprise'))
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
enterprise = None
|
||||
|
||||
if cache_available(config):
|
||||
|
|
|
@ -98,7 +98,7 @@ class CloudStackInventory(object):
|
|||
options = parser.parse_args()
|
||||
try:
|
||||
self.cs = CloudStack(**read_config())
|
||||
except CloudStackException, e:
|
||||
except CloudStackException as e:
|
||||
print >> sys.stderr, "Error: Could not connect to CloudStack API"
|
||||
|
||||
project_id = ''
|
||||
|
|
|
@ -136,7 +136,7 @@ except ImportError:
|
|||
|
||||
try:
|
||||
import consul
|
||||
except ImportError, e:
|
||||
except ImportError as e:
|
||||
print """failed=True msg='python-consul required for this module. see
|
||||
http://python-consul.readthedocs.org/en/latest/#installation'"""
|
||||
sys.exit(1)
|
||||
|
|
|
@ -145,7 +145,7 @@ except ImportError:
|
|||
|
||||
try:
|
||||
from dopy.manager import DoError, DoManager
|
||||
except ImportError, e:
|
||||
except ImportError as e:
|
||||
print "failed=True msg='`dopy` library required for this script'"
|
||||
sys.exit(1)
|
||||
|
||||
|
|
|
@ -237,7 +237,7 @@ class GceInventory(object):
|
|||
'''Gets details about a specific instance '''
|
||||
try:
|
||||
return self.driver.ex_get_node(instance_name)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
return None
|
||||
|
||||
def group_instances(self):
|
||||
|
|
|
@ -101,7 +101,7 @@ except:
|
|||
from chube.linode_obj import Linode
|
||||
|
||||
sys.path = old_path
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
raise Exception("could not import chube")
|
||||
|
||||
load_chube_config()
|
||||
|
@ -184,7 +184,7 @@ class LinodeInventory(object):
|
|||
try:
|
||||
for node in Linode.search(status=Linode.STATUS_RUNNING):
|
||||
self.add_node(node)
|
||||
except chube_api.linode_api.ApiError, e:
|
||||
except chube_api.linode_api.ApiError as e:
|
||||
print "Looks like Linode's API is down:"
|
||||
print
|
||||
print e
|
||||
|
@ -194,7 +194,7 @@ class LinodeInventory(object):
|
|||
"""Gets details about a specific node."""
|
||||
try:
|
||||
return Linode.find(api_id=linode_id)
|
||||
except chube_api.linode_api.ApiError, e:
|
||||
except chube_api.linode_api.ApiError as e:
|
||||
print "Looks like Linode's API is down:"
|
||||
print
|
||||
print e
|
||||
|
|
|
@ -245,7 +245,7 @@ def _list_into_cache(regions):
|
|||
if cs is None:
|
||||
warnings.warn(
|
||||
'Connecting to Rackspace region "%s" has caused Pyrax to '
|
||||
'return a NoneType. Is this a valid region?' % region,
|
||||
'return None. Is this a valid region?' % region,
|
||||
RuntimeWarning)
|
||||
continue
|
||||
for server in cs.servers.list():
|
||||
|
@ -412,7 +412,7 @@ def setup():
|
|||
pyrax.keyring_auth(keyring_username, region=region)
|
||||
else:
|
||||
pyrax.set_credential_file(creds_file, region=region)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
sys.stderr.write("%s: %s\n" % (e, e.message))
|
||||
sys.exit(1)
|
||||
|
||||
|
|
|
@ -132,7 +132,7 @@ try:
|
|||
for group in spacewalk_report('system-groups'):
|
||||
org_groups[group['spacewalk_group_id']] = group['spacewalk_org_id']
|
||||
|
||||
except (OSError), e:
|
||||
except (OSError) as e:
|
||||
print >> sys.stderr, 'Problem executing the command "%s system-groups": %s' % \
|
||||
(SW_REPORT, str(e))
|
||||
sys.exit(2)
|
||||
|
@ -148,7 +148,7 @@ if options.list:
|
|||
for item in spacewalk_report('inventory'):
|
||||
host_vars[ item['spacewalk_profile_name'] ] = dict( ( key, ( value.split(';') if ';' in value else value) ) for key, value in item.items() )
|
||||
|
||||
except (OSError), e:
|
||||
except (OSError) as e:
|
||||
print >> sys.stderr, 'Problem executing the command "%s inventory": %s' % \
|
||||
(SW_REPORT, str(e))
|
||||
sys.exit(2)
|
||||
|
@ -185,7 +185,7 @@ if options.list:
|
|||
if system['spacewalk_server_name'] in host_vars and not system['spacewalk_server_name'] in meta[ "hostvars" ]:
|
||||
meta[ "hostvars" ][ system['spacewalk_server_name'] ] = host_vars[ system['spacewalk_server_name'] ]
|
||||
|
||||
except (OSError), e:
|
||||
except (OSError) as e:
|
||||
print >> sys.stderr, 'Problem executing the command "%s system-groups-systems": %s' % \
|
||||
(SW_REPORT, str(e))
|
||||
sys.exit(2)
|
||||
|
@ -212,7 +212,7 @@ elif options.host:
|
|||
host_details = system
|
||||
break
|
||||
|
||||
except (OSError), e:
|
||||
except (OSError) as e:
|
||||
print >> sys.stderr, 'Problem executing the command "%s inventory": %s' % \
|
||||
(SW_REPORT, str(e))
|
||||
sys.exit(2)
|
||||
|
|
|
@ -164,7 +164,7 @@ class VMwareInventory(object):
|
|||
obj_info = self._get_obj_info(val, depth - 1, seen)
|
||||
if obj_info != ():
|
||||
d[attr] = obj_info
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
pass
|
||||
return d
|
||||
elif isinstance(obj, SudsObject):
|
||||
|
@ -207,7 +207,7 @@ class VMwareInventory(object):
|
|||
host_info[k] = v
|
||||
try:
|
||||
host_info['ipAddress'] = host.config.network.vnic[0].spec.ip.ipAddress
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
print >> sys.stderr, e
|
||||
host_info = self._flatten_dict(host_info, prefix)
|
||||
if ('%s_ipAddress' % prefix) in host_info:
|
||||
|
|
|
@ -109,7 +109,7 @@ class ZabbixInventory(object):
|
|||
try:
|
||||
api = ZabbixAPI(server=self.zabbix_server)
|
||||
api.login(user=self.zabbix_username, password=self.zabbix_password)
|
||||
except BaseException, e:
|
||||
except BaseException as e:
|
||||
print >> sys.stderr, "Error: Could not login to Zabbix server. Check your zabbix.ini."
|
||||
sys.exit(1)
|
||||
|
||||
|
|
|
@ -120,7 +120,7 @@ class DocCLI(CLI):
|
|||
# this typically means we couldn't even parse the docstring, not just that the YAML is busted,
|
||||
# probably a quoting issue.
|
||||
raise AnsibleError("Parsing produced an empty object.")
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self.display.vvv(traceback.print_exc())
|
||||
raise AnsibleError("module %s missing documentation (or could not parse documentation): %s\n" % (module, str(e)))
|
||||
|
||||
|
|
|
@ -196,7 +196,7 @@ class PullCLI(CLI):
|
|||
os.chdir('/')
|
||||
try:
|
||||
shutil.rmtree(self.options.dest)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self.display.error("Failed to remove %s: %s" % (self.options.dest, str(e)))
|
||||
|
||||
return rc
|
||||
|
|
|
@ -67,7 +67,7 @@ class WorkerProcess(multiprocessing.Process):
|
|||
if fileno is not None:
|
||||
try:
|
||||
self._new_stdin = os.fdopen(os.dup(fileno))
|
||||
except OSError, e:
|
||||
except OSError as e:
|
||||
# couldn't dupe stdin, most likely because it's
|
||||
# not a valid file descriptor, so we just rely on
|
||||
# using the one that was passed in
|
||||
|
@ -137,7 +137,7 @@ class WorkerProcess(multiprocessing.Process):
|
|||
except:
|
||||
# FIXME: most likely an abort, catch those kinds of errors specifically
|
||||
break
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
debug("WORKER EXCEPTION: %s" % e)
|
||||
debug("WORKER EXCEPTION: %s" % traceback.format_exc())
|
||||
try:
|
||||
|
|
|
@ -125,14 +125,14 @@ class TaskExecutor:
|
|||
result = json.dumps(res)
|
||||
debug("done dumping result, returning")
|
||||
return result
|
||||
except AnsibleError, e:
|
||||
except AnsibleError as e:
|
||||
return dict(failed=True, msg=to_unicode(e, nonstring='simplerepr'))
|
||||
finally:
|
||||
try:
|
||||
self._connection.close()
|
||||
except AttributeError:
|
||||
pass
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
debug("error closing connection: %s" % to_unicode(e))
|
||||
|
||||
def _get_loop_items(self):
|
||||
|
@ -187,7 +187,7 @@ class TaskExecutor:
|
|||
|
||||
try:
|
||||
tmp_task = self._task.copy()
|
||||
except AnsibleParserError, e:
|
||||
except AnsibleParserError as e:
|
||||
results.append(dict(failed=True, msg=str(e)))
|
||||
continue
|
||||
|
||||
|
|
|
@ -143,7 +143,7 @@ class Inventory(object):
|
|||
return re.search(pattern_str[1:], str)
|
||||
else:
|
||||
return fnmatch.fnmatch(str, pattern_str)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
raise AnsibleError('invalid host pattern: %s' % pattern_str)
|
||||
|
||||
def _match_list(self, items, item_attr, pattern_str):
|
||||
|
@ -153,7 +153,7 @@ class Inventory(object):
|
|||
pattern = re.compile(fnmatch.translate(pattern_str))
|
||||
else:
|
||||
pattern = re.compile(pattern_str[1:])
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
raise AnsibleError('invalid host pattern: %s' % pattern_str)
|
||||
|
||||
for item in items:
|
||||
|
|
|
@ -46,7 +46,7 @@ class InventoryScript:
|
|||
cmd = [ self.filename, "--list" ]
|
||||
try:
|
||||
sp = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
except OSError, e:
|
||||
except OSError as e:
|
||||
raise AnsibleError("problem running %s (%s)" % (' '.join(cmd), e))
|
||||
(stdout, stderr) = sp.communicate()
|
||||
|
||||
|
@ -153,7 +153,7 @@ class InventoryScript:
|
|||
cmd = [self.filename, "--host", host.name]
|
||||
try:
|
||||
sp = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
except OSError, e:
|
||||
except OSError as e:
|
||||
raise AnsibleError("problem running %s (%s)" % (' '.join(cmd), e))
|
||||
(out, err) = sp.communicate()
|
||||
if out.strip() == '':
|
||||
|
|
|
@ -66,7 +66,7 @@ class Conditional:
|
|||
for conditional in self.when:
|
||||
if not self._check_conditional(conditional, templar, all_vars):
|
||||
return False
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
raise AnsibleError("The conditional check '%s' failed. The error was: %s" % (conditional, e), obj=ds)
|
||||
|
||||
return True
|
||||
|
|
|
@ -20,8 +20,6 @@ __metaclass__ = type
|
|||
|
||||
import os
|
||||
|
||||
from types import NoneType
|
||||
|
||||
from ansible.errors import AnsibleParserError
|
||||
from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject, AnsibleSequence
|
||||
|
||||
|
|
|
@ -25,7 +25,6 @@ import inspect
|
|||
import os
|
||||
|
||||
from hashlib import sha1
|
||||
from types import NoneType
|
||||
|
||||
from ansible.errors import AnsibleError, AnsibleParserError
|
||||
from ansible.parsing import DataLoader
|
||||
|
@ -184,16 +183,16 @@ class Role(Base, Become, Conditional, Taggable):
|
|||
|
||||
# vars and default vars are regular dictionaries
|
||||
self._role_vars = self._load_role_yaml('vars')
|
||||
if not isinstance(self._role_vars, (dict, NoneType)):
|
||||
raise AnsibleParserError("The vars/main.yml file for role '%s' must contain a dictionary of variables" % self._role_name)
|
||||
elif self._role_vars is None:
|
||||
if self._role_vars is None:
|
||||
self._role_vars = dict()
|
||||
elif not isinstance(self._role_vars, dict):
|
||||
raise AnsibleParserError("The vars/main.yml file for role '%s' must contain a dictionary of variables" % self._role_name)
|
||||
|
||||
self._default_vars = self._load_role_yaml('defaults')
|
||||
if not isinstance(self._default_vars, (dict, NoneType)):
|
||||
raise AnsibleParserError("The default/main.yml file for role '%s' must contain a dictionary of variables" % self._role_name)
|
||||
elif self._default_vars is None:
|
||||
if self._default_vars is None:
|
||||
self._default_vars = dict()
|
||||
elif not isinstance(self._default_vars, dict):
|
||||
raise AnsibleParserError("The default/main.yml file for role '%s' must contain a dictionary of variables" % self._role_name)
|
||||
|
||||
def _load_role_yaml(self, subdir):
|
||||
file_path = os.path.join(self._role_path, subdir)
|
||||
|
@ -370,7 +369,7 @@ class Role(Base, Become, Conditional, Taggable):
|
|||
def deserialize(self, data, include_deps=True):
|
||||
self._role_name = data.get('_role_name', '')
|
||||
self._role_path = data.get('_role_path', '')
|
||||
self._role_vars = data.get('_role_vars', dict())
|
||||
self._role_vars = data.get('_role_vars', dict())
|
||||
self._role_params = data.get('_role_params', dict())
|
||||
self._default_vars = data.get('_default_vars', dict())
|
||||
self._had_task_run = data.get('_had_task_run', dict())
|
||||
|
|
|
@ -166,7 +166,7 @@ class ActionBase:
|
|||
|
||||
tmp_mode = None
|
||||
if self._play_context.remote_user != 'root' or self._play_context.become and self._play_context.become_user != 'root':
|
||||
tmp_mode = 0755
|
||||
tmp_mode = 0o755
|
||||
|
||||
cmd = self._connection._shell.mkdtemp(basefile, use_system_tmp, tmp_mode)
|
||||
self._display.debug("executing _low_level_execute_command to create the tmp path")
|
||||
|
|
|
@ -19,8 +19,6 @@ __metaclass__ = type
|
|||
|
||||
import os
|
||||
|
||||
from types import NoneType
|
||||
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.parsing import DataLoader
|
||||
from ansible.plugins.action import ActionBase
|
||||
|
|
12
lib/ansible/plugins/cache/jsonfile.py
vendored
12
lib/ansible/plugins/cache/jsonfile.py
vendored
|
@ -45,7 +45,7 @@ class CacheModule(BaseCacheModule):
|
|||
if not os.path.exists(self._cache_dir):
|
||||
try:
|
||||
os.makedirs(self._cache_dir)
|
||||
except (OSError,IOError), e:
|
||||
except (OSError,IOError) as e:
|
||||
self._display.warning("error while trying to create cache dir %s : %s" % (self._cache_dir, str(e)))
|
||||
return None
|
||||
|
||||
|
@ -60,7 +60,7 @@ class CacheModule(BaseCacheModule):
|
|||
cachefile = "%s/%s" % (self._cache_dir, key)
|
||||
try:
|
||||
f = codecs.open(cachefile, 'r', encoding='utf-8')
|
||||
except (OSError,IOError), e:
|
||||
except (OSError,IOError) as e:
|
||||
self._display.warning("error while trying to read %s : %s" % (cachefile, str(e)))
|
||||
pass
|
||||
else:
|
||||
|
@ -81,7 +81,7 @@ class CacheModule(BaseCacheModule):
|
|||
cachefile = "%s/%s" % (self._cache_dir, key)
|
||||
try:
|
||||
f = codecs.open(cachefile, 'w', encoding='utf-8')
|
||||
except (OSError,IOError), e:
|
||||
except (OSError,IOError) as e:
|
||||
self._display.warning("error while trying to write to %s : %s" % (cachefile, str(e)))
|
||||
pass
|
||||
else:
|
||||
|
@ -94,7 +94,7 @@ class CacheModule(BaseCacheModule):
|
|||
cachefile = "%s/%s" % (self._cache_dir, key)
|
||||
try:
|
||||
st = os.stat(cachefile)
|
||||
except (OSError,IOError), e:
|
||||
except (OSError,IOError) as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
return False
|
||||
else:
|
||||
|
@ -126,7 +126,7 @@ class CacheModule(BaseCacheModule):
|
|||
try:
|
||||
st = os.stat(cachefile)
|
||||
return True
|
||||
except (OSError,IOError), e:
|
||||
except (OSError,IOError) as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
return False
|
||||
else:
|
||||
|
@ -137,7 +137,7 @@ class CacheModule(BaseCacheModule):
|
|||
del self._cache[key]
|
||||
try:
|
||||
os.remove("%s/%s" % (self._cache_dir, key))
|
||||
except (OSError,IOError), e:
|
||||
except (OSError,IOError) as e:
|
||||
pass #TODO: only pass on non existing?
|
||||
|
||||
def flush(self):
|
||||
|
|
|
@ -152,7 +152,7 @@ def version_compare(value, version, operator='eq', strict=False):
|
|||
try:
|
||||
method = getattr(py_operator, operator)
|
||||
return method(Version(str(value)), Version(str(version)))
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
raise errors.AnsibleFilterError('Version comparison: %s' % e)
|
||||
|
||||
def regex_escape(string):
|
||||
|
|
|
@ -80,14 +80,14 @@ def logarithm(x, base=math.e):
|
|||
return math.log10(x)
|
||||
else:
|
||||
return math.log(x, base)
|
||||
except TypeError, e:
|
||||
except TypeError as e:
|
||||
raise errors.AnsibleFilterError('log() can only be used on numbers: %s' % str(e))
|
||||
|
||||
|
||||
def power(x, y):
|
||||
try:
|
||||
return math.pow(x, y)
|
||||
except TypeError, e:
|
||||
except TypeError as e:
|
||||
raise errors.AnsibleFilterError('pow() can only be used on numbers: %s' % str(e))
|
||||
|
||||
|
||||
|
@ -97,7 +97,7 @@ def inversepower(x, base=2):
|
|||
return math.sqrt(x)
|
||||
else:
|
||||
return math.pow(x, 1.0/float(base))
|
||||
except TypeError, e:
|
||||
except TypeError as e:
|
||||
raise errors.AnsibleFilterError('root() can only be used on numbers: %s' % str(e))
|
||||
|
||||
|
||||
|
@ -107,13 +107,13 @@ def human_readable(size, isbits=False, unit=None):
|
|||
suffix = ''
|
||||
|
||||
ranges = (
|
||||
(1<<70L, 'Z'),
|
||||
(1<<60L, 'E'),
|
||||
(1<<50L, 'P'),
|
||||
(1<<40L, 'T'),
|
||||
(1<<30L, 'G'),
|
||||
(1<<20L, 'M'),
|
||||
(1<<10L, 'K'),
|
||||
(1<<70, 'Z'),
|
||||
(1<<60, 'E'),
|
||||
(1<<50, 'P'),
|
||||
(1<<40, 'T'),
|
||||
(1<<30, 'G'),
|
||||
(1<<20, 'M'),
|
||||
(1<<10, 'K'),
|
||||
(1, base)
|
||||
)
|
||||
|
||||
|
|
|
@ -67,7 +67,7 @@ except ImportError:
|
|||
try:
|
||||
import consul
|
||||
HAS_CONSUL = True
|
||||
except ImportError, e:
|
||||
except ImportError as e:
|
||||
HAS_CONSUL = False
|
||||
|
||||
|
||||
|
@ -104,7 +104,7 @@ class LookupModule(LookupBase):
|
|||
values.append(r['Value'])
|
||||
else:
|
||||
values.append(results[1]['Value'])
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
raise AnsibleError(
|
||||
"Error locating '%s' in kv store. Error was %s" % (term, e))
|
||||
|
||||
|
@ -127,7 +127,7 @@ class LookupModule(LookupBase):
|
|||
name, value = param.split('=')
|
||||
assert name in paramvals, "% not a valid consul lookup parameter" % name
|
||||
paramvals[name] = value
|
||||
except (ValueError, AssertionError), e:
|
||||
except (ValueError, AssertionError) as e:
|
||||
raise AnsibleError(e)
|
||||
|
||||
return paramvals
|
||||
|
|
|
@ -41,7 +41,7 @@ class LookupModule(LookupBase):
|
|||
val = credstash.getSecret(term, **kwargs)
|
||||
except credstash.ItemNotFound:
|
||||
raise AnsibleError('Key {0} not found'.format(term))
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
raise AnsibleError('Encountered exception while fetching {0}: {1}'.format(term, e.message))
|
||||
ret.append(val)
|
||||
|
||||
|
|
|
@ -141,7 +141,7 @@ class LookupModule(LookupBase):
|
|||
try:
|
||||
nsaddr = dns.resolver.query(ns)[0].address
|
||||
nameservers.append(nsaddr)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
raise AnsibleError("dns lookup NS: ", str(e))
|
||||
myres.nameservers = nameservers
|
||||
continue
|
||||
|
@ -176,7 +176,7 @@ class LookupModule(LookupBase):
|
|||
domain = n.to_text()
|
||||
except dns.exception.SyntaxError:
|
||||
pass
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
raise AnsibleError("dns.reversename unhandled exception", str(e))
|
||||
|
||||
try:
|
||||
|
@ -196,7 +196,7 @@ class LookupModule(LookupBase):
|
|||
rd['ttl'] = answers.rrset.ttl
|
||||
|
||||
ret.append(rd)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
ret.append(str(e))
|
||||
|
||||
except dns.resolver.NXDOMAIN:
|
||||
|
@ -205,7 +205,7 @@ class LookupModule(LookupBase):
|
|||
ret.append("")
|
||||
except dns.resolver.Timeout:
|
||||
ret.append('')
|
||||
except dns.exception.DNSException, e:
|
||||
except dns.exception.DNSException as e:
|
||||
raise AnsibleError("dns.resolver unhandled exception", e)
|
||||
|
||||
return ret
|
||||
|
|
|
@ -47,7 +47,7 @@ class LookupModule(LookupBase):
|
|||
# Retrieve a single value
|
||||
try:
|
||||
value = self.cp.get(section, key)
|
||||
except ConfigParser.NoOptionError, e:
|
||||
except ConfigParser.NoOptionError as e:
|
||||
return dflt
|
||||
return value
|
||||
|
||||
|
@ -76,7 +76,7 @@ class LookupModule(LookupBase):
|
|||
name, value = param.split('=')
|
||||
assert(name in paramvals)
|
||||
paramvals[name] = value
|
||||
except (ValueError, AssertionError), e:
|
||||
except (ValueError, AssertionError) as e:
|
||||
raise errors.AnsibleError(e)
|
||||
|
||||
path = self._loader.path_dwim_relative(basedir, 'files', paramvals['file'])
|
||||
|
|
|
@ -32,7 +32,7 @@ class LookupModule(LookupBase):
|
|||
for x in terms:
|
||||
try:
|
||||
intermediate = listify_lookup_plugin_terms(x, templar=self._templar, loader=self._loader, fail_on_undefined=True)
|
||||
except UndefinedError, e:
|
||||
except UndefinedError as e:
|
||||
raise AnsibleUndefinedVariable("One of the nested variables was undefined. The error was: %s" % e)
|
||||
results.append(intermediate)
|
||||
return results
|
||||
|
|
|
@ -186,7 +186,7 @@ class LookupModule(LookupBase):
|
|||
try:
|
||||
if not self.parse_simple_args(term):
|
||||
self.parse_kv_args(parse_kv(term))
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
raise AnsibleError("unknown error parsing with_sequence arguments: %r. Error was: %s" % (term, e))
|
||||
|
||||
self.sanity_check()
|
||||
|
|
|
@ -55,7 +55,7 @@ class LookupModule(LookupBase):
|
|||
assert(name in paramvals)
|
||||
paramvals[name] = value
|
||||
|
||||
except (ValueError, AssertionError), e:
|
||||
except (ValueError, AssertionError) as e:
|
||||
# In case "file" or "key" are not present
|
||||
raise AnsibleError(e)
|
||||
|
||||
|
|
|
@ -70,7 +70,7 @@ class ShellModule(object):
|
|||
# change the umask in a subshell to achieve the desired mode
|
||||
# also for directories created with `mkdir -p`
|
||||
if mode:
|
||||
tmp_umask = 0777 & ~mode
|
||||
tmp_umask = 0o777 & ~mode
|
||||
cmd = '(umask %o && %s)' % (tmp_umask, cmd)
|
||||
|
||||
return cmd
|
||||
|
|
|
@ -382,7 +382,7 @@ class StrategyBase:
|
|||
data = self._loader.load_from_file(included_file._filename)
|
||||
if data is None:
|
||||
return []
|
||||
except AnsibleError, e:
|
||||
except AnsibleError as e:
|
||||
for host in included_file._hosts:
|
||||
tr = TaskResult(host=host, task=included_file._task, return_data=dict(failed=True, reason=str(e)))
|
||||
iterator.mark_host_failed(host)
|
||||
|
@ -455,7 +455,7 @@ class StrategyBase:
|
|||
loader=self._loader,
|
||||
variable_manager=self._variable_manager
|
||||
)
|
||||
except AnsibleError, e:
|
||||
except AnsibleError as e:
|
||||
return False
|
||||
|
||||
if len(included_files) > 0:
|
||||
|
@ -475,7 +475,7 @@ class StrategyBase:
|
|||
# and add the new blocks to the list of handler blocks
|
||||
handler_block.block.extend(block.block)
|
||||
#iterator._play.handlers.extend(new_blocks)
|
||||
except AnsibleError, e:
|
||||
except AnsibleError as e:
|
||||
for host in included_file._hosts:
|
||||
iterator.mark_host_failed(host)
|
||||
self._tqm._failed_hosts[host.name] = True
|
||||
|
|
|
@ -144,7 +144,7 @@ class StrategyModule(StrategyBase):
|
|||
|
||||
try:
|
||||
included_files = IncludedFile.process_include_results(host_results, self._tqm, iterator=iterator, loader=self._loader, variable_manager=self._variable_manager)
|
||||
except AnsibleError, e:
|
||||
except AnsibleError as e:
|
||||
return False
|
||||
|
||||
if len(included_files) > 0:
|
||||
|
@ -153,7 +153,7 @@ class StrategyModule(StrategyBase):
|
|||
# list of noop tasks, to make sure that they continue running in lock-step
|
||||
try:
|
||||
new_blocks = self._load_included_file(included_file, iterator=iterator)
|
||||
except AnsibleError, e:
|
||||
except AnsibleError as e:
|
||||
for host in included_file._hosts:
|
||||
iterator.mark_host_failed(host)
|
||||
self._display.warning(str(e))
|
||||
|
|
|
@ -258,7 +258,7 @@ class StrategyModule(StrategyBase):
|
|||
|
||||
try:
|
||||
included_files = IncludedFile.process_include_results(host_results, self._tqm, iterator=iterator, loader=self._loader, variable_manager=self._variable_manager)
|
||||
except AnsibleError, e:
|
||||
except AnsibleError as e:
|
||||
return False
|
||||
|
||||
if len(included_files) > 0:
|
||||
|
@ -273,7 +273,7 @@ class StrategyModule(StrategyBase):
|
|||
# list of noop tasks, to make sure that they continue running in lock-step
|
||||
try:
|
||||
new_blocks = self._load_included_file(included_file, iterator=iterator)
|
||||
except AnsibleError, e:
|
||||
except AnsibleError as e:
|
||||
for host in included_file._hosts:
|
||||
iterator.mark_host_failed(host)
|
||||
self._display.warning(str(e))
|
||||
|
@ -296,7 +296,7 @@ class StrategyModule(StrategyBase):
|
|||
iterator.add_tasks(host, all_blocks[host])
|
||||
|
||||
self._display.debug("results queue empty")
|
||||
except (IOError, EOFError), e:
|
||||
except (IOError, EOFError) as e:
|
||||
self._display.debug("got IOError/EOFError in task loop: %s" % e)
|
||||
# most likely an abort, return failed
|
||||
return False
|
||||
|
|
|
@ -37,7 +37,6 @@ from ansible.template.vars import AnsibleJ2Vars
|
|||
from ansible.utils.debug import debug
|
||||
|
||||
from numbers import Number
|
||||
from types import NoneType
|
||||
|
||||
__all__ = ['Templar']
|
||||
|
||||
|
@ -188,7 +187,7 @@ class Templar:
|
|||
resolved_val = self._available_variables[var_name]
|
||||
if isinstance(resolved_val, NON_TEMPLATED_TYPES):
|
||||
return resolved_val
|
||||
elif isinstance(resolved_val, NoneType):
|
||||
elif resolved_val is None:
|
||||
return C.DEFAULT_NULL_REPRESENTATION
|
||||
|
||||
result = self._do_template(variable, preserve_trailing_newlines=preserve_trailing_newlines, fail_on_undefined=fail_on_undefined, overrides=overrides)
|
||||
|
@ -261,7 +260,7 @@ class Templar:
|
|||
ran = instance.run(loop_terms, variables=self._available_variables, **kwargs)
|
||||
except (AnsibleUndefinedVariable, UndefinedError) as e:
|
||||
raise AnsibleUndefinedVariable(e)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
if self._fail_on_lookup_errors:
|
||||
raise
|
||||
ran = None
|
||||
|
@ -299,9 +298,9 @@ class Templar:
|
|||
|
||||
try:
|
||||
t = myenv.from_string(data)
|
||||
except TemplateSyntaxError, e:
|
||||
except TemplateSyntaxError as e:
|
||||
raise AnsibleError("template error while templating string: %s" % str(e))
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
if 'recursion' in str(e):
|
||||
raise AnsibleError("recursive loop detected in template string: %s" % data)
|
||||
else:
|
||||
|
@ -317,7 +316,7 @@ class Templar:
|
|||
|
||||
try:
|
||||
res = j2_concat(rf)
|
||||
except TypeError, te:
|
||||
except TypeError as te:
|
||||
if 'StrictUndefined' in str(te):
|
||||
raise AnsibleUndefinedVariable(
|
||||
"Unable to look up a name or access an attribute in template string. " + \
|
||||
|
@ -338,7 +337,7 @@ class Templar:
|
|||
res += '\n' * (data_newlines - res_newlines)
|
||||
|
||||
return res
|
||||
except (UndefinedError, AnsibleUndefinedVariable), e:
|
||||
except (UndefinedError, AnsibleUndefinedVariable) as e:
|
||||
if fail_on_undefined:
|
||||
raise AnsibleUndefinedVariable(e)
|
||||
else:
|
||||
|
|
0
lib/ansible/utils/module_docs.py
Normal file → Executable file
0
lib/ansible/utils/module_docs.py
Normal file → Executable file
|
@ -45,6 +45,6 @@ def makedirs_safe(path, mode=None):
|
|||
os.makedirs(path, mode)
|
||||
else:
|
||||
os.makedirs(path)
|
||||
except OSError, e:
|
||||
except OSError as e:
|
||||
if e.errno != EEXIST:
|
||||
raise
|
||||
|
|
|
@ -215,7 +215,7 @@ def to_bytes(obj, encoding='utf-8', errors='replace', nonstring=None):
|
|||
return obj
|
||||
elif nonstring == 'simplerepr':
|
||||
try:
|
||||
simple = binary_type(obj)
|
||||
simple = str(obj)
|
||||
except UnicodeError:
|
||||
try:
|
||||
simple = obj.__str__()
|
||||
|
|
|
@ -242,7 +242,7 @@ class VariableManager:
|
|||
break
|
||||
else:
|
||||
raise AnsibleError("vars file %s was not found" % vars_file_item)
|
||||
except UndefinedError, e:
|
||||
except UndefinedError as e:
|
||||
continue
|
||||
|
||||
if not C.DEFAULT_PRIVATE_ROLE_VARS:
|
||||
|
|
|
@ -57,7 +57,7 @@ def delete_aws_eips(get_func, attr, opts):
|
|||
try:
|
||||
eip_log = open(opts.eip_log, 'r').read().splitlines()
|
||||
except IOError:
|
||||
print opts.eip_log, 'not found.'
|
||||
print('%s not found.' % opts.eip_log)
|
||||
return
|
||||
|
||||
for item in get_func():
|
||||
|
@ -175,5 +175,5 @@ if __name__ == '__main__':
|
|||
filters = {"tag:Name":opts.match_re.replace('^',''), "instance-state-name": ['running', 'pending', 'stopped' ]}
|
||||
delete_aws_instances(aws.get_all_instances(filters=filters), opts)
|
||||
|
||||
except KeyboardInterrupt, e:
|
||||
print "\nExiting on user command."
|
||||
except KeyboardInterrupt as e:
|
||||
print("\nExiting on user command.")
|
||||
|
|
|
@ -73,5 +73,5 @@ if __name__ == '__main__':
|
|||
delete_gce_resources(get_snapshots, 'name', opts)
|
||||
# Delete matching disks
|
||||
delete_gce_resources(gce.list_volumes, 'name', opts)
|
||||
except KeyboardInterrupt, e:
|
||||
print "\nExiting on user command."
|
||||
except KeyboardInterrupt as e:
|
||||
print("\nExiting on user command.")
|
||||
|
|
4
test/integration/cleanup_rax.py
Normal file → Executable file
4
test/integration/cleanup_rax.py
Normal file → Executable file
|
@ -54,8 +54,8 @@ def authenticate():
|
|||
def prompt_and_delete(item, prompt, assumeyes):
|
||||
if not assumeyes:
|
||||
assumeyes = raw_input(prompt).lower() == 'y'
|
||||
assert (hasattr(item, 'delete') or hasattr(item, 'terminate'),
|
||||
"Class <%s> has no delete or terminate attribute" % item.__class__)
|
||||
assert hasattr(item, 'delete') or hasattr(item, 'terminate'), \
|
||||
"Class <%s> has no delete or terminate attribute" % item.__class__
|
||||
if assumeyes:
|
||||
if hasattr(item, 'delete'):
|
||||
item.delete()
|
||||
|
|
|
@ -6,6 +6,6 @@ if __name__ == '__main__':
|
|||
import consul
|
||||
consul = consul.Consul(host='0.0.0.0', port=8500)
|
||||
consul.catalog.nodes()
|
||||
print "True"
|
||||
print("True")
|
||||
except:
|
||||
pass
|
||||
|
|
|
@ -20,7 +20,7 @@ else:
|
|||
def createDaemon():
|
||||
try:
|
||||
pid = os.fork()
|
||||
except OSError, e:
|
||||
except OSError as e:
|
||||
raise Exception, "%s [%d]" % (e.strerror, e.errno)
|
||||
|
||||
if (pid == 0):
|
||||
|
@ -28,7 +28,7 @@ def createDaemon():
|
|||
|
||||
try:
|
||||
pid = os.fork()
|
||||
except OSError, e:
|
||||
except OSError as e:
|
||||
raise Exception, "%s [%d]" % (e.strerror, e.errno)
|
||||
|
||||
if (pid == 0):
|
||||
|
|
|
@ -38,5 +38,5 @@ if __name__ == '__main__':
|
|||
gce.create_volume_snapshot(base_volume, name=prefix+'-snapshot')
|
||||
gce.create_volume(
|
||||
size=10, name=prefix+'-extra', location='us-central1-a')
|
||||
except KeyboardInterrupt, e:
|
||||
print "\nExiting on user command."
|
||||
except KeyboardInterrupt as e:
|
||||
print("\nExiting on user command.")
|
||||
|
|
|
@ -314,7 +314,7 @@ class TestModuleUtilsBasic(unittest.TestCase):
|
|||
|
||||
base_params = dict(
|
||||
path = '/path/to/file',
|
||||
mode = 0600,
|
||||
mode = 0o600,
|
||||
owner = 'root',
|
||||
group = 'root',
|
||||
seuser = '_default',
|
||||
|
@ -711,9 +711,9 @@ class TestModuleUtilsBasic(unittest.TestCase):
|
|||
)
|
||||
|
||||
mock_stat1 = MagicMock()
|
||||
mock_stat1.st_mode = 0444
|
||||
mock_stat1.st_mode = 0o444
|
||||
mock_stat2 = MagicMock()
|
||||
mock_stat2.st_mode = 0660
|
||||
mock_stat2.st_mode = 0o660
|
||||
|
||||
with patch('os.lstat', side_effect=[mock_stat1]):
|
||||
self.assertEqual(am.set_mode_if_different('/path/to/file', None, True), True)
|
||||
|
@ -723,13 +723,13 @@ class TestModuleUtilsBasic(unittest.TestCase):
|
|||
with patch('os.lstat') as m:
|
||||
with patch('os.lchmod', return_value=None, create=True) as m_os:
|
||||
m.side_effect = [mock_stat1, mock_stat2, mock_stat2]
|
||||
self.assertEqual(am.set_mode_if_different('/path/to/file', 0660, False), True)
|
||||
m_os.assert_called_with('/path/to/file', 0660)
|
||||
self.assertEqual(am.set_mode_if_different('/path/to/file', 0o660, False), True)
|
||||
m_os.assert_called_with('/path/to/file', 0o660)
|
||||
|
||||
m.side_effect = [mock_stat1, mock_stat2, mock_stat2]
|
||||
am._symbolic_mode_to_octal = MagicMock(return_value=0660)
|
||||
am._symbolic_mode_to_octal = MagicMock(return_value=0o660)
|
||||
self.assertEqual(am.set_mode_if_different('/path/to/file', 'o+w,g+w,a-r', False), True)
|
||||
m_os.assert_called_with('/path/to/file', 0660)
|
||||
m_os.assert_called_with('/path/to/file', 0o660)
|
||||
|
||||
m.side_effect = [mock_stat1, mock_stat2, mock_stat2]
|
||||
am._symbolic_mode_to_octal = MagicMock(side_effect=Exception)
|
||||
|
@ -737,7 +737,7 @@ class TestModuleUtilsBasic(unittest.TestCase):
|
|||
|
||||
m.side_effect = [mock_stat1, mock_stat2, mock_stat2]
|
||||
am.check_mode = True
|
||||
self.assertEqual(am.set_mode_if_different('/path/to/file', 0660, False), True)
|
||||
self.assertEqual(am.set_mode_if_different('/path/to/file', 0o660, False), True)
|
||||
am.check_mode = False
|
||||
|
||||
# FIXME: this isn't working yet
|
||||
|
@ -746,11 +746,11 @@ class TestModuleUtilsBasic(unittest.TestCase):
|
|||
# del m_os.lchmod
|
||||
# with patch('os.path.islink', return_value=False):
|
||||
# with patch('os.chmod', return_value=None) as m_chmod:
|
||||
# self.assertEqual(am.set_mode_if_different('/path/to/file/no_lchmod', 0660, False), True)
|
||||
# m_chmod.assert_called_with('/path/to/file', 0660)
|
||||
# self.assertEqual(am.set_mode_if_different('/path/to/file/no_lchmod', 0o660, False), True)
|
||||
# m_chmod.assert_called_with('/path/to/file', 0o660)
|
||||
# with patch('os.path.islink', return_value=True):
|
||||
# with patch('os.chmod', return_value=None) as m_chmod:
|
||||
# with patch('os.stat', return_value=mock_stat2):
|
||||
# self.assertEqual(am.set_mode_if_different('/path/to/file', 0660, False), True)
|
||||
# m_chmod.assert_called_with('/path/to/file', 0660)
|
||||
# self.assertEqual(am.set_mode_if_different('/path/to/file', 0o660, False), True)
|
||||
# m_chmod.assert_called_with('/path/to/file', 0o660)
|
||||
|
||||
|
|
2
tox.ini
2
tox.ini
|
@ -26,7 +26,7 @@ whitelist_externals = make
|
|||
[testenv:py34]
|
||||
commands =
|
||||
python --version
|
||||
python -m compileall -fq -x 'lib/ansible/module_utils' lib test contrib
|
||||
python -m compileall -fq -x 'lib/ansible/module_utils|lib/ansible/modules' lib test
|
||||
make tests
|
||||
deps = -r{toxinidir}/test-requirements.txt
|
||||
whitelist_externals = make
|
||||
|
|
Loading…
Reference in a new issue