mirror of
https://github.com/ansible-collections/community.general.git
synced 2024-09-14 20:13:21 +02:00
fixed Python 3 keys() usage (#1861)
* fixed python3 keys() * added changelog fragment * Update plugins/modules/cloud/spotinst/spotinst_aws_elastigroup.py Co-authored-by: Felix Fontein <felix@fontein.de> * Update plugins/cache/redis.py Co-authored-by: Felix Fontein <felix@fontein.de> * rolledback redis.py per PR * Update plugins/modules/monitoring/sensu/sensu_check.py Co-authored-by: Felix Fontein <felix@fontein.de> * removed unnecessary ignore lines * adding memcached and one case in redis is indeed necessary * Update changelogs/fragments/1861-python3-keys.yml Co-authored-by: Felix Fontein <felix@fontein.de> * Update changelogs/fragments/1861-python3-keys.yml * Update changelogs/fragments/1861-python3-keys.yml Co-authored-by: Felix Fontein <felix@fontein.de> Co-authored-by: Felix Fontein <felix@fontein.de>
This commit is contained in:
parent
e353390e6c
commit
434f383ae9
23 changed files with 64 additions and 50 deletions
22
changelogs/fragments/1861-python3-keys.yml
Normal file
22
changelogs/fragments/1861-python3-keys.yml
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
bugfixes:
|
||||||
|
- redis cache plugin - wrapped usages of ``keys()`` in ``list()`` for Python 3 compatibility (https://github.com/ansible-collections/community.general/pull/1861).
|
||||||
|
- memcached cache plugin - wrapped usages of ``dict.keys()`` in ``list()`` for Python 3 compatibility (https://github.com/ansible-collections/community.general/pull/1861).
|
||||||
|
- diy callback plugin - wrapped usages of ``dict.keys()`` in ``list()`` for Python 3 compatibility (https://github.com/ansible-collections/community.general/pull/1861).
|
||||||
|
- selective callback plugin - wrapped usages of ``dict.keys()`` in ``list()`` for Python 3 compatibility (https://github.com/ansible-collections/community.general/pull/1861).
|
||||||
|
- chef_databag lookup plugin - wrapped usages of ``dict.keys()`` in ``list()`` for Python 3 compatibility (https://github.com/ansible-collections/community.general/pull/1861).
|
||||||
|
- net_tools.nios.api module_utils - wrapped usages of ``dict.keys()`` in ``list()`` for Python 3 compatibility (https://github.com/ansible-collections/community.general/pull/1861).
|
||||||
|
- utm_utils module_utils - wrapped usages of ``dict.keys()`` in ``list()`` for Python 3 compatibility (https://github.com/ansible-collections/community.general/pull/1861).
|
||||||
|
- lxc_container - wrapped usages of ``dict.keys()`` in ``list()`` for Python 3 compatibility (https://github.com/ansible-collections/community.general/pull/1861).
|
||||||
|
- lxd_container - wrapped usages of ``dict.keys()`` in ``list()`` for Python 3 compatibility (https://github.com/ansible-collections/community.general/pull/1861).
|
||||||
|
- oneandone_monitoring_policy - wrapped usages of ``dict.keys()`` in ``list()`` for Python 3 compatibility (https://github.com/ansible-collections/community.general/pull/1861).
|
||||||
|
- oci_vcn - wrapped usages of ``dict.keys()`` in ``list()`` for Python 3 compatibility (https://github.com/ansible-collections/community.general/pull/1861).
|
||||||
|
- spotinst_aws_elastigroup - wrapped usages of ``dict.keys()`` in ``list()`` for Python 3 compatibility (https://github.com/ansible-collections/community.general/pull/1861).
|
||||||
|
- sensu_check - wrapped usages of ``dict.keys()`` in ``list()`` for Python 3 compatibility (https://github.com/ansible-collections/community.general/pull/1861).
|
||||||
|
- redhat_subscription - wrapped usages of ``dict.keys()`` in ``list()`` for Python 3 compatibility (https://github.com/ansible-collections/community.general/pull/1861).
|
||||||
|
- idrac_redfish_command - wrapped usages of ``dict.keys()`` in ``list()`` for Python 3 compatibility (https://github.com/ansible-collections/community.general/pull/1861).
|
||||||
|
- idrac_redfish_config - wrapped usages of ``dict.keys()`` in ``list()`` for Python 3 compatibility (https://github.com/ansible-collections/community.general/pull/1861).
|
||||||
|
- idrac_redfish_info - wrapped usages of ``dict.keys()`` in ``list()`` for Python 3 compatibility (https://github.com/ansible-collections/community.general/pull/1861).
|
||||||
|
- redfish_command - wrapped usages of ``dict.keys()`` in ``list()`` for Python 3 compatibility (https://github.com/ansible-collections/community.general/pull/1861).
|
||||||
|
- redfish_config - wrapped usages of ``dict.keys()`` in ``list()`` for Python 3 compatibility (https://github.com/ansible-collections/community.general/pull/1861).
|
||||||
|
- vdo - wrapped usages of ``dict.keys()`` in ``list()`` for Python 3 compatibility (https://github.com/ansible-collections/community.general/pull/1861).
|
||||||
|
- nsot inventory script - wrapped usages of ``dict.keys()`` in ``list()`` for Python 3 compatibility (https://github.com/ansible-collections/community.general/pull/1861).
|
2
plugins/cache/memcached.py
vendored
2
plugins/cache/memcached.py
vendored
|
@ -162,7 +162,7 @@ class CacheModuleKeys(MutableSet):
|
||||||
self._cache.set(self.PREFIX, self._keyset)
|
self._cache.set(self.PREFIX, self._keyset)
|
||||||
|
|
||||||
def remove_by_timerange(self, s_min, s_max):
|
def remove_by_timerange(self, s_min, s_max):
|
||||||
for k in self._keyset.keys():
|
for k in list(self._keyset.keys()):
|
||||||
t = self._keyset[k]
|
t = self._keyset[k]
|
||||||
if s_min < t < s_max:
|
if s_min < t < s_max:
|
||||||
del self._keyset[k]
|
del self._keyset[k]
|
||||||
|
|
6
plugins/cache/redis.py
vendored
6
plugins/cache/redis.py
vendored
|
@ -217,14 +217,12 @@ class CacheModule(BaseCacheModule):
|
||||||
self._db.zrem(self._keys_set, key)
|
self._db.zrem(self._keys_set, key)
|
||||||
|
|
||||||
def flush(self):
|
def flush(self):
|
||||||
for key in self.keys():
|
for key in list(self.keys()):
|
||||||
self.delete(key)
|
self.delete(key)
|
||||||
|
|
||||||
def copy(self):
|
def copy(self):
|
||||||
# TODO: there is probably a better way to do this in redis
|
# TODO: there is probably a better way to do this in redis
|
||||||
ret = dict()
|
ret = dict([(k, self.get(k)) for k in self.keys()])
|
||||||
for key in self.keys():
|
|
||||||
ret[key] = self.get(key)
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
def __getstate__(self):
|
def __getstate__(self):
|
||||||
|
|
|
@ -1013,7 +1013,7 @@ class CallbackModule(Default):
|
||||||
for attr in _stats_attributes:
|
for attr in _stats_attributes:
|
||||||
_ret[self.DIY_NS]['stats'].update({attr: _get_value(obj=stats, attr=attr)})
|
_ret[self.DIY_NS]['stats'].update({attr: _get_value(obj=stats, attr=attr)})
|
||||||
|
|
||||||
_ret[self.DIY_NS].update({'top_level_var_names': _ret.keys()})
|
_ret[self.DIY_NS].update({'top_level_var_names': list(_ret.keys())})
|
||||||
|
|
||||||
return _ret
|
return _ret
|
||||||
|
|
||||||
|
|
|
@ -67,7 +67,7 @@ COLORS = {
|
||||||
|
|
||||||
def dict_diff(prv, nxt):
|
def dict_diff(prv, nxt):
|
||||||
"""Return a dict of keys that differ with another config object."""
|
"""Return a dict of keys that differ with another config object."""
|
||||||
keys = set(prv.keys() + nxt.keys())
|
keys = set(list(prv.keys()) + list(nxt.keys()))
|
||||||
result = {}
|
result = {}
|
||||||
for k in keys:
|
for k in keys:
|
||||||
if prv.get(k) != nxt.get(k):
|
if prv.get(k) != nxt.get(k):
|
||||||
|
|
|
@ -81,7 +81,7 @@ class LookupModule(LookupBase):
|
||||||
)
|
)
|
||||||
if args:
|
if args:
|
||||||
raise AnsibleError(
|
raise AnsibleError(
|
||||||
"unrecognized arguments to with_sequence: %r" % args.keys()
|
"unrecognized arguments to with_sequence: %r" % list(args.keys())
|
||||||
)
|
)
|
||||||
|
|
||||||
def run(self, terms, variables=None, **kwargs):
|
def run(self, terms, variables=None, **kwargs):
|
||||||
|
|
|
@ -499,12 +499,12 @@ class WapiModule(WapiBase):
|
||||||
else:
|
else:
|
||||||
test_obj_filter = dict([('name', old_name)])
|
test_obj_filter = dict([('name', old_name)])
|
||||||
# get the object reference
|
# get the object reference
|
||||||
ib_obj = self.get_object(ib_obj_type, test_obj_filter, return_fields=ib_spec.keys())
|
ib_obj = self.get_object(ib_obj_type, test_obj_filter, return_fields=list(ib_spec.keys()))
|
||||||
if ib_obj:
|
if ib_obj:
|
||||||
obj_filter['name'] = new_name
|
obj_filter['name'] = new_name
|
||||||
else:
|
else:
|
||||||
test_obj_filter['name'] = new_name
|
test_obj_filter['name'] = new_name
|
||||||
ib_obj = self.get_object(ib_obj_type, test_obj_filter, return_fields=ib_spec.keys())
|
ib_obj = self.get_object(ib_obj_type, test_obj_filter, return_fields=list(ib_spec.keys()))
|
||||||
update = True
|
update = True
|
||||||
return ib_obj, update, new_name
|
return ib_obj, update, new_name
|
||||||
if (ib_obj_type == NIOS_HOST_RECORD):
|
if (ib_obj_type == NIOS_HOST_RECORD):
|
||||||
|
@ -538,7 +538,7 @@ class WapiModule(WapiBase):
|
||||||
# check if test_obj_filter is empty copy passed obj_filter
|
# check if test_obj_filter is empty copy passed obj_filter
|
||||||
else:
|
else:
|
||||||
test_obj_filter = obj_filter
|
test_obj_filter = obj_filter
|
||||||
ib_obj = self.get_object(ib_obj_type, test_obj_filter.copy(), return_fields=ib_spec.keys())
|
ib_obj = self.get_object(ib_obj_type, test_obj_filter.copy(), return_fields=list(ib_spec.keys()))
|
||||||
elif (ib_obj_type == NIOS_A_RECORD):
|
elif (ib_obj_type == NIOS_A_RECORD):
|
||||||
# resolves issue where multiple a_records with same name and different IP address
|
# resolves issue where multiple a_records with same name and different IP address
|
||||||
test_obj_filter = obj_filter
|
test_obj_filter = obj_filter
|
||||||
|
@ -548,7 +548,7 @@ class WapiModule(WapiBase):
|
||||||
except TypeError:
|
except TypeError:
|
||||||
ipaddr = obj_filter['ipv4addr']
|
ipaddr = obj_filter['ipv4addr']
|
||||||
test_obj_filter['ipv4addr'] = ipaddr
|
test_obj_filter['ipv4addr'] = ipaddr
|
||||||
ib_obj = self.get_object(ib_obj_type, test_obj_filter.copy(), return_fields=ib_spec.keys())
|
ib_obj = self.get_object(ib_obj_type, test_obj_filter.copy(), return_fields=list(ib_spec.keys()))
|
||||||
elif (ib_obj_type == NIOS_TXT_RECORD):
|
elif (ib_obj_type == NIOS_TXT_RECORD):
|
||||||
# resolves issue where multiple txt_records with same name and different text
|
# resolves issue where multiple txt_records with same name and different text
|
||||||
test_obj_filter = obj_filter
|
test_obj_filter = obj_filter
|
||||||
|
@ -558,12 +558,12 @@ class WapiModule(WapiBase):
|
||||||
except TypeError:
|
except TypeError:
|
||||||
txt = obj_filter['text']
|
txt = obj_filter['text']
|
||||||
test_obj_filter['text'] = txt
|
test_obj_filter['text'] = txt
|
||||||
ib_obj = self.get_object(ib_obj_type, test_obj_filter.copy(), return_fields=ib_spec.keys())
|
ib_obj = self.get_object(ib_obj_type, test_obj_filter.copy(), return_fields=list(ib_spec.keys()))
|
||||||
elif (ib_obj_type == NIOS_ZONE):
|
elif (ib_obj_type == NIOS_ZONE):
|
||||||
# del key 'restart_if_needed' as nios_zone get_object fails with the key present
|
# del key 'restart_if_needed' as nios_zone get_object fails with the key present
|
||||||
temp = ib_spec['restart_if_needed']
|
temp = ib_spec['restart_if_needed']
|
||||||
del ib_spec['restart_if_needed']
|
del ib_spec['restart_if_needed']
|
||||||
ib_obj = self.get_object(ib_obj_type, obj_filter.copy(), return_fields=ib_spec.keys())
|
ib_obj = self.get_object(ib_obj_type, obj_filter.copy(), return_fields=list(ib_spec.keys()))
|
||||||
# reinstate restart_if_needed if ib_obj is none, meaning there's no existing nios_zone ref
|
# reinstate restart_if_needed if ib_obj is none, meaning there's no existing nios_zone ref
|
||||||
if not ib_obj:
|
if not ib_obj:
|
||||||
ib_spec['restart_if_needed'] = temp
|
ib_spec['restart_if_needed'] = temp
|
||||||
|
@ -571,12 +571,12 @@ class WapiModule(WapiBase):
|
||||||
# del key 'create_token' as nios_member get_object fails with the key present
|
# del key 'create_token' as nios_member get_object fails with the key present
|
||||||
temp = ib_spec['create_token']
|
temp = ib_spec['create_token']
|
||||||
del ib_spec['create_token']
|
del ib_spec['create_token']
|
||||||
ib_obj = self.get_object(ib_obj_type, obj_filter.copy(), return_fields=ib_spec.keys())
|
ib_obj = self.get_object(ib_obj_type, obj_filter.copy(), return_fields=list(ib_spec.keys()))
|
||||||
if temp:
|
if temp:
|
||||||
# reinstate 'create_token' key
|
# reinstate 'create_token' key
|
||||||
ib_spec['create_token'] = temp
|
ib_spec['create_token'] = temp
|
||||||
else:
|
else:
|
||||||
ib_obj = self.get_object(ib_obj_type, obj_filter.copy(), return_fields=ib_spec.keys())
|
ib_obj = self.get_object(ib_obj_type, obj_filter.copy(), return_fields=list(ib_spec.keys()))
|
||||||
return ib_obj, update, new_name
|
return ib_obj, update, new_name
|
||||||
|
|
||||||
def on_update(self, proposed_object, ib_spec):
|
def on_update(self, proposed_object, ib_spec):
|
||||||
|
|
|
@ -84,7 +84,7 @@ class UTM:
|
||||||
raise UTMModuleConfigurationError(
|
raise UTMModuleConfigurationError(
|
||||||
"The keys " + to_native(
|
"The keys " + to_native(
|
||||||
self.change_relevant_keys) + " to check are not in the modules keys:\n" + to_native(
|
self.change_relevant_keys) + " to check are not in the modules keys:\n" + to_native(
|
||||||
module.params.keys()))
|
list(module.params.keys())))
|
||||||
|
|
||||||
def execute(self):
|
def execute(self):
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -1662,7 +1662,7 @@ def main():
|
||||||
),
|
),
|
||||||
backing_store=dict(
|
backing_store=dict(
|
||||||
type='str',
|
type='str',
|
||||||
choices=LXC_BACKING_STORE.keys(),
|
choices=list(LXC_BACKING_STORE.keys()),
|
||||||
default='dir'
|
default='dir'
|
||||||
),
|
),
|
||||||
template_options=dict(
|
template_options=dict(
|
||||||
|
@ -1699,7 +1699,7 @@ def main():
|
||||||
type='path'
|
type='path'
|
||||||
),
|
),
|
||||||
state=dict(
|
state=dict(
|
||||||
choices=LXC_ANSIBLE_STATES.keys(),
|
choices=list(LXC_ANSIBLE_STATES.keys()),
|
||||||
default='started'
|
default='started'
|
||||||
),
|
),
|
||||||
container_command=dict(
|
container_command=dict(
|
||||||
|
@ -1733,7 +1733,7 @@ def main():
|
||||||
type='path',
|
type='path',
|
||||||
),
|
),
|
||||||
archive_compression=dict(
|
archive_compression=dict(
|
||||||
choices=LXC_COMPRESSION_MAP.keys(),
|
choices=list(LXC_COMPRESSION_MAP.keys()),
|
||||||
default='gzip'
|
default='gzip'
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
|
|
|
@ -665,7 +665,7 @@ def main():
|
||||||
type='dict',
|
type='dict',
|
||||||
),
|
),
|
||||||
state=dict(
|
state=dict(
|
||||||
choices=LXD_ANSIBLE_STATES.keys(),
|
choices=list(LXD_ANSIBLE_STATES.keys()),
|
||||||
default='started'
|
default='started'
|
||||||
),
|
),
|
||||||
target=dict(
|
target=dict(
|
||||||
|
|
|
@ -695,15 +695,15 @@ def update_monitoring_policy(module, oneandone_conn):
|
||||||
threshold_entities = ['cpu', 'ram', 'disk', 'internal_ping', 'transfer']
|
threshold_entities = ['cpu', 'ram', 'disk', 'internal_ping', 'transfer']
|
||||||
|
|
||||||
_thresholds = []
|
_thresholds = []
|
||||||
for treshold in thresholds:
|
for threshold in thresholds:
|
||||||
key = treshold.keys()[0]
|
key = list(threshold.keys())[0]
|
||||||
if key in threshold_entities:
|
if key in threshold_entities:
|
||||||
_threshold = oneandone.client.Threshold(
|
_threshold = oneandone.client.Threshold(
|
||||||
entity=key,
|
entity=key,
|
||||||
warning_value=treshold[key]['warning']['value'],
|
warning_value=threshold[key]['warning']['value'],
|
||||||
warning_alert=str(treshold[key]['warning']['alert']).lower(),
|
warning_alert=str(threshold[key]['warning']['alert']).lower(),
|
||||||
critical_value=treshold[key]['critical']['value'],
|
critical_value=threshold[key]['critical']['value'],
|
||||||
critical_alert=str(treshold[key]['critical']['alert']).lower())
|
critical_alert=str(threshold[key]['critical']['alert']).lower())
|
||||||
_thresholds.append(_threshold)
|
_thresholds.append(_threshold)
|
||||||
|
|
||||||
if name or description or email or thresholds:
|
if name or description or email or thresholds:
|
||||||
|
@ -864,15 +864,15 @@ def create_monitoring_policy(module, oneandone_conn):
|
||||||
threshold_entities = ['cpu', 'ram', 'disk', 'internal_ping', 'transfer']
|
threshold_entities = ['cpu', 'ram', 'disk', 'internal_ping', 'transfer']
|
||||||
|
|
||||||
_thresholds = []
|
_thresholds = []
|
||||||
for treshold in thresholds:
|
for threshold in thresholds:
|
||||||
key = treshold.keys()[0]
|
key = list(threshold.keys())[0]
|
||||||
if key in threshold_entities:
|
if key in threshold_entities:
|
||||||
_threshold = oneandone.client.Threshold(
|
_threshold = oneandone.client.Threshold(
|
||||||
entity=key,
|
entity=key,
|
||||||
warning_value=treshold[key]['warning']['value'],
|
warning_value=threshold[key]['warning']['value'],
|
||||||
warning_alert=str(treshold[key]['warning']['alert']).lower(),
|
warning_alert=str(threshold[key]['warning']['alert']).lower(),
|
||||||
critical_value=treshold[key]['critical']['value'],
|
critical_value=threshold[key]['critical']['value'],
|
||||||
critical_alert=str(treshold[key]['critical']['alert']).lower())
|
critical_alert=str(threshold[key]['critical']['alert']).lower())
|
||||||
_thresholds.append(_threshold)
|
_thresholds.append(_threshold)
|
||||||
|
|
||||||
_ports = []
|
_ports = []
|
||||||
|
|
|
@ -128,7 +128,7 @@ def update_vcn(virtual_network_client, module):
|
||||||
primitive_params_update=["vcn_id"],
|
primitive_params_update=["vcn_id"],
|
||||||
kwargs_non_primitive_update={UpdateVcnDetails: "update_vcn_details"},
|
kwargs_non_primitive_update={UpdateVcnDetails: "update_vcn_details"},
|
||||||
module=module,
|
module=module,
|
||||||
update_attributes=UpdateVcnDetails().attribute_map.keys(),
|
update_attributes=list(UpdateVcnDetails().attribute_map.keys()),
|
||||||
)
|
)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
|
@ -1305,10 +1305,8 @@ def expand_tags(eg_launchspec, tags):
|
||||||
|
|
||||||
for tag in tags:
|
for tag in tags:
|
||||||
eg_tag = spotinst.aws_elastigroup.Tag()
|
eg_tag = spotinst.aws_elastigroup.Tag()
|
||||||
if tag.keys():
|
if tag:
|
||||||
eg_tag.tag_key = tag.keys()[0]
|
eg_tag.tag_key, eg_tag.tag_value = list(tag.items())[0]
|
||||||
if tag.values():
|
|
||||||
eg_tag.tag_value = tag.values()[0]
|
|
||||||
|
|
||||||
eg_tags.append(eg_tag)
|
eg_tags.append(eg_tag)
|
||||||
|
|
||||||
|
|
|
@ -596,7 +596,7 @@ class Rhsm(RegistrationBase):
|
||||||
|
|
||||||
if missing_pools or serials:
|
if missing_pools or serials:
|
||||||
changed = True
|
changed = True
|
||||||
return {'changed': changed, 'subscribed_pool_ids': missing_pools.keys(),
|
return {'changed': changed, 'subscribed_pool_ids': list(missing_pools.keys()),
|
||||||
'unsubscribed_serials': serials}
|
'unsubscribed_serials': serials}
|
||||||
|
|
||||||
def sync_syspurpose(self):
|
def sync_syspurpose(self):
|
||||||
|
|
|
@ -165,7 +165,7 @@ def main():
|
||||||
|
|
||||||
# Check that Category is valid
|
# Check that Category is valid
|
||||||
if category not in CATEGORY_COMMANDS_ALL:
|
if category not in CATEGORY_COMMANDS_ALL:
|
||||||
module.fail_json(msg=to_native("Invalid Category '%s'. Valid Categories = %s" % (category, CATEGORY_COMMANDS_ALL.keys())))
|
module.fail_json(msg=to_native("Invalid Category '%s'. Valid Categories = %s" % (category, list(CATEGORY_COMMANDS_ALL.keys()))))
|
||||||
|
|
||||||
# Check that all commands are valid
|
# Check that all commands are valid
|
||||||
for cmd in command_list:
|
for cmd in command_list:
|
||||||
|
|
|
@ -279,7 +279,7 @@ def main():
|
||||||
|
|
||||||
# Check that Category is valid
|
# Check that Category is valid
|
||||||
if category not in CATEGORY_COMMANDS_ALL:
|
if category not in CATEGORY_COMMANDS_ALL:
|
||||||
module.fail_json(msg=to_native("Invalid Category '%s'. Valid Categories = %s" % (category, CATEGORY_COMMANDS_ALL.keys())))
|
module.fail_json(msg=to_native("Invalid Category '%s'. Valid Categories = %s" % (category, list(CATEGORY_COMMANDS_ALL.keys()))))
|
||||||
|
|
||||||
# Check that all commands are valid
|
# Check that all commands are valid
|
||||||
for cmd in command_list:
|
for cmd in command_list:
|
||||||
|
|
|
@ -202,7 +202,7 @@ def main():
|
||||||
|
|
||||||
# Check that Category is valid
|
# Check that Category is valid
|
||||||
if category not in CATEGORY_COMMANDS_ALL:
|
if category not in CATEGORY_COMMANDS_ALL:
|
||||||
module.fail_json(msg=to_native("Invalid Category '%s'. Valid Categories = %s" % (category, CATEGORY_COMMANDS_ALL.keys())))
|
module.fail_json(msg=to_native("Invalid Category '%s'. Valid Categories = %s" % (category, list(CATEGORY_COMMANDS_ALL.keys()))))
|
||||||
|
|
||||||
# Check that all commands are valid
|
# Check that all commands are valid
|
||||||
for cmd in command_list:
|
for cmd in command_list:
|
||||||
|
|
|
@ -639,7 +639,7 @@ def main():
|
||||||
|
|
||||||
# Check that Category is valid
|
# Check that Category is valid
|
||||||
if category not in CATEGORY_COMMANDS_ALL:
|
if category not in CATEGORY_COMMANDS_ALL:
|
||||||
module.fail_json(msg=to_native("Invalid Category '%s'. Valid Categories = %s" % (category, CATEGORY_COMMANDS_ALL.keys())))
|
module.fail_json(msg=to_native("Invalid Category '%s'. Valid Categories = %s" % (category, list(CATEGORY_COMMANDS_ALL.keys()))))
|
||||||
|
|
||||||
# Check that all commands are valid
|
# Check that all commands are valid
|
||||||
for cmd in command_list:
|
for cmd in command_list:
|
||||||
|
|
|
@ -288,7 +288,7 @@ def main():
|
||||||
|
|
||||||
# Check that Category is valid
|
# Check that Category is valid
|
||||||
if category not in CATEGORY_COMMANDS_ALL:
|
if category not in CATEGORY_COMMANDS_ALL:
|
||||||
module.fail_json(msg=to_native("Invalid Category '%s'. Valid Categories = %s" % (category, CATEGORY_COMMANDS_ALL.keys())))
|
module.fail_json(msg=to_native("Invalid Category '%s'. Valid Categories = %s" % (category, list(CATEGORY_COMMANDS_ALL.keys()))))
|
||||||
|
|
||||||
# Check that all commands are valid
|
# Check that all commands are valid
|
||||||
for cmd in command_list:
|
for cmd in command_list:
|
||||||
|
|
|
@ -327,7 +327,7 @@ def inventory_vdos(module, vdocmd):
|
||||||
vdoyamls = vdostatusyaml['VDOs']
|
vdoyamls = vdostatusyaml['VDOs']
|
||||||
|
|
||||||
if vdoyamls is not None:
|
if vdoyamls is not None:
|
||||||
vdolist = vdoyamls.keys()
|
vdolist = list(vdoyamls.keys())
|
||||||
|
|
||||||
return vdolist
|
return vdolist
|
||||||
|
|
||||||
|
|
|
@ -181,7 +181,7 @@ class NSoTInventory(object):
|
||||||
sys.exit('%s\n' % e)
|
sys.exit('%s\n' % e)
|
||||||
else: # Use defaults if env var missing
|
else: # Use defaults if env var missing
|
||||||
self._config_default()
|
self._config_default()
|
||||||
self.groups = self.config.keys()
|
self.groups = list(self.config.keys())
|
||||||
self.client = get_api_client()
|
self.client = get_api_client()
|
||||||
self._meta = {'hostvars': dict()}
|
self._meta = {'hostvars': dict()}
|
||||||
|
|
||||||
|
|
|
@ -9,9 +9,7 @@ plugins/modules/cloud/linode/linode.py validate-modules:parameter-type-not-in-do
|
||||||
plugins/modules/cloud/linode/linode.py validate-modules:undocumented-parameter
|
plugins/modules/cloud/linode/linode.py validate-modules:undocumented-parameter
|
||||||
plugins/modules/cloud/linode/linode_v4.py validate-modules:parameter-list-no-elements
|
plugins/modules/cloud/linode/linode_v4.py validate-modules:parameter-list-no-elements
|
||||||
plugins/modules/cloud/lxc/lxc_container.py use-argspec-type-path
|
plugins/modules/cloud/lxc/lxc_container.py use-argspec-type-path
|
||||||
plugins/modules/cloud/lxc/lxc_container.py validate-modules:invalid-ansiblemodule-schema
|
|
||||||
plugins/modules/cloud/lxc/lxc_container.py validate-modules:use-run-command-not-popen
|
plugins/modules/cloud/lxc/lxc_container.py validate-modules:use-run-command-not-popen
|
||||||
plugins/modules/cloud/lxd/lxd_container.py validate-modules:invalid-ansiblemodule-schema
|
|
||||||
plugins/modules/cloud/misc/rhevm.py validate-modules:parameter-state-invalid-choice
|
plugins/modules/cloud/misc/rhevm.py validate-modules:parameter-state-invalid-choice
|
||||||
plugins/modules/cloud/oneandone/oneandone_firewall_policy.py validate-modules:parameter-list-no-elements
|
plugins/modules/cloud/oneandone/oneandone_firewall_policy.py validate-modules:parameter-list-no-elements
|
||||||
plugins/modules/cloud/oneandone/oneandone_load_balancer.py validate-modules:parameter-list-no-elements
|
plugins/modules/cloud/oneandone/oneandone_load_balancer.py validate-modules:parameter-list-no-elements
|
||||||
|
|
|
@ -8,9 +8,7 @@ plugins/modules/cloud/linode/linode.py validate-modules:parameter-type-not-in-do
|
||||||
plugins/modules/cloud/linode/linode.py validate-modules:undocumented-parameter
|
plugins/modules/cloud/linode/linode.py validate-modules:undocumented-parameter
|
||||||
plugins/modules/cloud/linode/linode_v4.py validate-modules:parameter-list-no-elements
|
plugins/modules/cloud/linode/linode_v4.py validate-modules:parameter-list-no-elements
|
||||||
plugins/modules/cloud/lxc/lxc_container.py use-argspec-type-path
|
plugins/modules/cloud/lxc/lxc_container.py use-argspec-type-path
|
||||||
plugins/modules/cloud/lxc/lxc_container.py validate-modules:invalid-ansiblemodule-schema
|
|
||||||
plugins/modules/cloud/lxc/lxc_container.py validate-modules:use-run-command-not-popen
|
plugins/modules/cloud/lxc/lxc_container.py validate-modules:use-run-command-not-popen
|
||||||
plugins/modules/cloud/lxd/lxd_container.py validate-modules:invalid-ansiblemodule-schema
|
|
||||||
plugins/modules/cloud/misc/rhevm.py validate-modules:parameter-state-invalid-choice
|
plugins/modules/cloud/misc/rhevm.py validate-modules:parameter-state-invalid-choice
|
||||||
plugins/modules/cloud/oneandone/oneandone_firewall_policy.py validate-modules:parameter-list-no-elements
|
plugins/modules/cloud/oneandone/oneandone_firewall_policy.py validate-modules:parameter-list-no-elements
|
||||||
plugins/modules/cloud/oneandone/oneandone_load_balancer.py validate-modules:parameter-list-no-elements
|
plugins/modules/cloud/oneandone/oneandone_load_balancer.py validate-modules:parameter-list-no-elements
|
||||||
|
|
Loading…
Reference in a new issue