mirror of
https://github.com/ansible-collections/community.general.git
synced 2024-09-14 20:13:21 +02:00
Fix linting errors; fix some real bugs (#5111)
* Fix linting errors. * Fix bugs. * Another linter error ignored. * More fixes. * Ignore sanity errors with older versions. ci_complete * Forgot to commit more changes.
This commit is contained in:
parent
0338eb7a7c
commit
a54af8909c
52 changed files with 115 additions and 94 deletions
7
changelogs/fragments/5111-fixes.yml
Normal file
7
changelogs/fragments/5111-fixes.yml
Normal file
|
@ -0,0 +1,7 @@
|
|||
bugfixes:
|
||||
- "funcd connection plugin - fix signature of ``exec_command`` (https://github.com/ansible-collections/community.general/pull/5111)."
|
||||
- "packet_ip_subnet - fix error reporting in case of invalid CIDR prefix lengths (https://github.com/ansible-collections/community.general/pull/5111)."
|
||||
- "dnsimple_info - correctly report missing library as ``requests`` and not ``another_library`` (https://github.com/ansible-collections/community.general/pull/5111)."
|
||||
- "pip_package_info - remove usage of global variable (https://github.com/ansible-collections/community.general/pull/5111)."
|
||||
- "manageiq_alert_profiles - avoid crash when reporting unknown profile caused by trying to return an undefined variable (https://github.com/ansible-collections/community.general/pull/5111)."
|
||||
- "apache2_mod_proxy - avoid crash when reporting inability to parse balancer_member_page HTML caused by using an undefined variable in the error message (https://github.com/ansible-collections/community.general/pull/5111)."
|
|
@ -233,13 +233,13 @@ class CallbackModule(CallbackModule_default):
|
|||
# Remove non-essential attributes
|
||||
for attr in self.removed_attributes:
|
||||
if attr in result:
|
||||
del(result[attr])
|
||||
del result[attr]
|
||||
|
||||
# Remove empty attributes (list, dict, str)
|
||||
for attr in result.copy():
|
||||
if isinstance(result[attr], (MutableSequence, MutableMapping, binary_type, text_type)):
|
||||
if not result[attr]:
|
||||
del(result[attr])
|
||||
del result[attr]
|
||||
|
||||
def _handle_exceptions(self, result):
|
||||
if 'exception' in result:
|
||||
|
|
|
@ -64,7 +64,7 @@ class Connection(ConnectionBase):
|
|||
self.client = fc.Client(self.host)
|
||||
return self
|
||||
|
||||
def exec_command(self, cmd, become_user=None, sudoable=False, executable='/bin/sh', in_data=None):
|
||||
def exec_command(self, cmd, in_data=None, sudoable=True):
|
||||
""" run a command on the remote minion """
|
||||
|
||||
if in_data:
|
||||
|
|
|
@ -92,7 +92,7 @@ class iLORedfishUtils(RedfishUtils):
|
|||
data = response['data']
|
||||
|
||||
ntp_list = data[setkey]
|
||||
if(len(ntp_list) == 2):
|
||||
if len(ntp_list) == 2:
|
||||
ntp_list.pop(0)
|
||||
|
||||
ntp_list.append(mgr_attributes['mgr_attr_value'])
|
||||
|
|
|
@ -79,7 +79,7 @@ def memset_api_call(api_key, api_method, payload=None):
|
|||
if msg is None:
|
||||
msg = response.json()
|
||||
|
||||
return(has_failed, msg, response)
|
||||
return has_failed, msg, response
|
||||
|
||||
|
||||
def check_zone_domain(data, domain):
|
||||
|
@ -93,7 +93,7 @@ def check_zone_domain(data, domain):
|
|||
if zone_domain['domain'] == domain:
|
||||
exists = True
|
||||
|
||||
return(exists)
|
||||
return exists
|
||||
|
||||
|
||||
def check_zone(data, name):
|
||||
|
@ -110,7 +110,7 @@ def check_zone(data, name):
|
|||
if counter == 1:
|
||||
exists = True
|
||||
|
||||
return(exists, counter)
|
||||
return exists, counter
|
||||
|
||||
|
||||
def get_zone_id(zone_name, current_zones):
|
||||
|
@ -136,4 +136,4 @@ def get_zone_id(zone_name, current_zones):
|
|||
zone_id = None
|
||||
msg = 'Zone ID could not be returned as duplicate zone names were detected'
|
||||
|
||||
return(zone_exists, msg, counter, zone_id)
|
||||
return zone_exists, msg, counter, zone_id
|
||||
|
|
|
@ -123,8 +123,7 @@ def rax_find_image(module, rax_module, image, exit=True):
|
|||
except ValueError:
|
||||
try:
|
||||
image = cs.images.find(human_id=image)
|
||||
except(cs.exceptions.NotFound,
|
||||
cs.exceptions.NoUniqueMatch):
|
||||
except (cs.exceptions.NotFound, cs.exceptions.NoUniqueMatch):
|
||||
try:
|
||||
image = cs.images.find(name=image)
|
||||
except (cs.exceptions.NotFound,
|
||||
|
|
|
@ -2061,7 +2061,7 @@ class RedfishUtils(object):
|
|||
if property in data:
|
||||
nic[property] = data[property]
|
||||
result['entries'] = nic
|
||||
return(result)
|
||||
return result
|
||||
|
||||
def get_nic_inventory(self, resource_uri):
|
||||
result = {}
|
||||
|
|
|
@ -16,6 +16,7 @@ try:
|
|||
from redis import Redis
|
||||
from redis import __version__ as redis_version
|
||||
HAS_REDIS_PACKAGE = True
|
||||
REDIS_IMP_ERR = None
|
||||
except ImportError:
|
||||
REDIS_IMP_ERR = traceback.format_exc()
|
||||
HAS_REDIS_PACKAGE = False
|
||||
|
@ -23,6 +24,7 @@ except ImportError:
|
|||
try:
|
||||
import certifi
|
||||
HAS_CERTIFI_PACKAGE = True
|
||||
CERTIFI_IMPORT_ERROR = None
|
||||
except ImportError:
|
||||
CERTIFI_IMPORT_ERROR = traceback.format_exc()
|
||||
HAS_CERTIFI_PACKAGE = False
|
||||
|
|
|
@ -112,7 +112,7 @@ def poll_reload_status(api_key=None, job_id=None, payload=None):
|
|||
memset_api = response.json()
|
||||
msg = None
|
||||
|
||||
return(memset_api, msg, stderr)
|
||||
return memset_api, msg, stderr
|
||||
|
||||
|
||||
def reload_dns(args=None):
|
||||
|
@ -134,7 +134,7 @@ def reload_dns(args=None):
|
|||
retvals['failed'] = has_failed
|
||||
retvals['memset_api'] = response.json()
|
||||
retvals['msg'] = msg
|
||||
return(retvals)
|
||||
return retvals
|
||||
|
||||
# set changed to true if the reload request was accepted.
|
||||
has_changed = True
|
||||
|
@ -154,7 +154,7 @@ def reload_dns(args=None):
|
|||
if val is not None:
|
||||
retvals[val] = eval(val)
|
||||
|
||||
return(retvals)
|
||||
return retvals
|
||||
|
||||
|
||||
def main():
|
||||
|
|
|
@ -128,7 +128,7 @@ def get_facts(args=None):
|
|||
retvals['failed'] = has_failed
|
||||
retvals['msg'] = msg
|
||||
retvals['stderr'] = "API returned an error: {0}" . format(response.status_code)
|
||||
return(retvals)
|
||||
return retvals
|
||||
|
||||
# we don't want to return the same thing twice
|
||||
msg = None
|
||||
|
@ -140,7 +140,7 @@ def get_facts(args=None):
|
|||
if val is not None:
|
||||
retvals[val] = eval(val)
|
||||
|
||||
return(retvals)
|
||||
return retvals
|
||||
|
||||
|
||||
def main():
|
||||
|
|
|
@ -253,7 +253,7 @@ def get_facts(args=None):
|
|||
retvals['failed'] = has_failed
|
||||
retvals['msg'] = msg
|
||||
retvals['stderr'] = "API returned an error: {0}" . format(response.status_code)
|
||||
return(retvals)
|
||||
return retvals
|
||||
|
||||
# we don't want to return the same thing twice
|
||||
msg = None
|
||||
|
@ -265,7 +265,7 @@ def get_facts(args=None):
|
|||
if val is not None:
|
||||
retvals[val] = eval(val)
|
||||
|
||||
return(retvals)
|
||||
return retvals
|
||||
|
||||
|
||||
def main():
|
||||
|
|
|
@ -140,7 +140,7 @@ def check(args=None):
|
|||
retvals['changed'] = has_changed
|
||||
retvals['failed'] = has_failed
|
||||
|
||||
return(retvals)
|
||||
return retvals
|
||||
|
||||
|
||||
def create_zone(args=None, zone_exists=None, payload=None):
|
||||
|
@ -186,7 +186,7 @@ def create_zone(args=None, zone_exists=None, payload=None):
|
|||
_has_failed, _msg, response = memset_api_call(api_key=args['api_key'], api_method=api_method, payload=payload)
|
||||
memset_api = response.json()
|
||||
|
||||
return(has_failed, has_changed, memset_api, msg)
|
||||
return has_failed, has_changed, memset_api, msg
|
||||
|
||||
|
||||
def delete_zone(args=None, zone_exists=None, payload=None):
|
||||
|
@ -234,7 +234,7 @@ def delete_zone(args=None, zone_exists=None, payload=None):
|
|||
else:
|
||||
has_failed, has_changed = False, False
|
||||
|
||||
return(has_failed, has_changed, memset_api, msg)
|
||||
return has_failed, has_changed, memset_api, msg
|
||||
|
||||
|
||||
def create_or_delete(args=None):
|
||||
|
@ -256,7 +256,7 @@ def create_or_delete(args=None):
|
|||
retvals['failed'] = _has_failed
|
||||
retvals['msg'] = _msg
|
||||
|
||||
return(retvals)
|
||||
return retvals
|
||||
|
||||
zone_exists, _msg, counter, _zone_id = get_zone_id(zone_name=args['name'], current_zones=response.json())
|
||||
|
||||
|
@ -272,7 +272,7 @@ def create_or_delete(args=None):
|
|||
if val is not None:
|
||||
retvals[val] = eval(val)
|
||||
|
||||
return(retvals)
|
||||
return retvals
|
||||
|
||||
|
||||
def main():
|
||||
|
|
|
@ -111,7 +111,7 @@ def check(args=None):
|
|||
retvals['changed'] = has_changed
|
||||
retvals['failed'] = has_failed
|
||||
|
||||
return(retvals)
|
||||
return retvals
|
||||
|
||||
|
||||
def create_zone_domain(args=None, zone_exists=None, zone_id=None, payload=None):
|
||||
|
@ -139,7 +139,7 @@ def create_zone_domain(args=None, zone_exists=None, zone_id=None, payload=None):
|
|||
if not has_failed:
|
||||
has_changed = True
|
||||
|
||||
return(has_failed, has_changed, msg)
|
||||
return has_failed, has_changed, msg
|
||||
|
||||
|
||||
def delete_zone_domain(args=None, payload=None):
|
||||
|
@ -166,7 +166,7 @@ def delete_zone_domain(args=None, payload=None):
|
|||
# unset msg as we don't want to return unnecessary info to the user.
|
||||
msg = None
|
||||
|
||||
return(has_failed, has_changed, memset_api, msg)
|
||||
return has_failed, has_changed, memset_api, msg
|
||||
|
||||
|
||||
def create_or_delete_domain(args=None):
|
||||
|
@ -189,7 +189,7 @@ def create_or_delete_domain(args=None):
|
|||
retvals['failed'] = has_failed
|
||||
retvals['msg'] = msg
|
||||
retvals['stderr'] = "API returned an error: {0}" . format(response.status_code)
|
||||
return(retvals)
|
||||
return retvals
|
||||
|
||||
zone_exists, msg, counter, zone_id = get_zone_id(zone_name=args['zone'], current_zones=response.json())
|
||||
|
||||
|
@ -204,7 +204,7 @@ def create_or_delete_domain(args=None):
|
|||
|
||||
retvals['failed'] = has_failed
|
||||
retvals['msg'] = stderr
|
||||
return(retvals)
|
||||
return retvals
|
||||
|
||||
if args['state'] == 'present':
|
||||
has_failed, has_changed, msg = create_zone_domain(args=args, zone_exists=zone_exists, zone_id=zone_id, payload=payload)
|
||||
|
@ -218,7 +218,7 @@ def create_or_delete_domain(args=None):
|
|||
if val is not None:
|
||||
retvals[val] = eval(val)
|
||||
|
||||
return(retvals)
|
||||
return retvals
|
||||
|
||||
|
||||
def main():
|
||||
|
|
|
@ -222,7 +222,7 @@ def create_zone_record(args=None, zone_id=None, records=None, payload=None):
|
|||
# nothing to do; record is already correct so we populate
|
||||
# the return var with the existing record's details.
|
||||
memset_api = zone_record
|
||||
return(has_changed, has_failed, memset_api, msg)
|
||||
return has_changed, has_failed, memset_api, msg
|
||||
else:
|
||||
# merge dicts ensuring we change any updated values
|
||||
payload = zone_record.copy()
|
||||
|
@ -232,7 +232,7 @@ def create_zone_record(args=None, zone_id=None, records=None, payload=None):
|
|||
has_changed = True
|
||||
# return the new record to the user in the returned var.
|
||||
memset_api = new_record
|
||||
return(has_changed, has_failed, memset_api, msg)
|
||||
return has_changed, has_failed, memset_api, msg
|
||||
has_failed, msg, response = memset_api_call(api_key=args['api_key'], api_method=api_method, payload=payload)
|
||||
if not has_failed:
|
||||
has_changed = True
|
||||
|
@ -247,7 +247,7 @@ def create_zone_record(args=None, zone_id=None, records=None, payload=None):
|
|||
has_changed = True
|
||||
# populate the return var with the new record's details.
|
||||
memset_api = new_record
|
||||
return(has_changed, has_failed, memset_api, msg)
|
||||
return has_changed, has_failed, memset_api, msg
|
||||
has_failed, msg, response = memset_api_call(api_key=args['api_key'], api_method=api_method, payload=payload)
|
||||
if not has_failed:
|
||||
has_changed = True
|
||||
|
@ -255,7 +255,7 @@ def create_zone_record(args=None, zone_id=None, records=None, payload=None):
|
|||
# empty msg as we don't want to return a boatload of json to the user.
|
||||
msg = None
|
||||
|
||||
return(has_changed, has_failed, memset_api, msg)
|
||||
return has_changed, has_failed, memset_api, msg
|
||||
|
||||
|
||||
def delete_zone_record(args=None, records=None, payload=None):
|
||||
|
@ -271,7 +271,7 @@ def delete_zone_record(args=None, records=None, payload=None):
|
|||
for zone_record in records:
|
||||
if args['check_mode']:
|
||||
has_changed = True
|
||||
return(has_changed, has_failed, memset_api, msg)
|
||||
return has_changed, has_failed, memset_api, msg
|
||||
payload['id'] = zone_record['id']
|
||||
api_method = 'dns.zone_record_delete'
|
||||
has_failed, msg, response = memset_api_call(api_key=args['api_key'], api_method=api_method, payload=payload)
|
||||
|
@ -281,7 +281,7 @@ def delete_zone_record(args=None, records=None, payload=None):
|
|||
# empty msg as we don't want to return a boatload of json to the user.
|
||||
msg = None
|
||||
|
||||
return(has_changed, has_failed, memset_api, msg)
|
||||
return has_changed, has_failed, memset_api, msg
|
||||
|
||||
|
||||
def create_or_delete(args=None):
|
||||
|
@ -305,7 +305,7 @@ def create_or_delete(args=None):
|
|||
retvals['failed'] = _has_failed
|
||||
retvals['msg'] = msg
|
||||
retvals['stderr'] = "API returned an error: {0}" . format(response.status_code)
|
||||
return(retvals)
|
||||
return retvals
|
||||
|
||||
zone_exists, _msg, counter, zone_id = get_zone_id(zone_name=args['zone'], current_zones=response.json())
|
||||
|
||||
|
@ -318,7 +318,7 @@ def create_or_delete(args=None):
|
|||
retvals['failed'] = has_failed
|
||||
retvals['msg'] = stderr
|
||||
retvals['stderr'] = stderr
|
||||
return(retvals)
|
||||
return retvals
|
||||
|
||||
# get a list of all records ( as we can't limit records by zone)
|
||||
api_method = 'dns.zone_record_list'
|
||||
|
@ -340,7 +340,7 @@ def create_or_delete(args=None):
|
|||
if val is not None:
|
||||
retvals[val] = eval(val)
|
||||
|
||||
return(retvals)
|
||||
return retvals
|
||||
|
||||
|
||||
def main():
|
||||
|
|
|
@ -1253,7 +1253,6 @@ def setChanged():
|
|||
|
||||
|
||||
def setMsg(message):
|
||||
global failed
|
||||
msg.append(message)
|
||||
|
||||
|
||||
|
|
|
@ -217,7 +217,7 @@ def parse_subnet_cidr(cidr):
|
|||
try:
|
||||
prefixlen = int(prefixlen)
|
||||
except ValueError:
|
||||
raise("Wrong prefix length in CIDR expression {0}".format(cidr))
|
||||
raise Exception("Wrong prefix length in CIDR expression {0}".format(cidr))
|
||||
return addr, prefixlen
|
||||
|
||||
|
||||
|
|
|
@ -184,7 +184,7 @@ def remove_datacenter(module, profitbricks):
|
|||
name = module.params.get('name')
|
||||
changed = False
|
||||
|
||||
if(uuid_match.match(name)):
|
||||
if uuid_match.match(name):
|
||||
_remove_datacenter(module, profitbricks, name)
|
||||
changed = True
|
||||
else:
|
||||
|
|
|
@ -325,7 +325,7 @@ def delete_volume(module, profitbricks):
|
|||
break
|
||||
|
||||
for n in instance_ids:
|
||||
if(uuid_match.match(n)):
|
||||
if uuid_match.match(n):
|
||||
_delete_volume(module, profitbricks, datacenter, n)
|
||||
changed = True
|
||||
else:
|
||||
|
|
|
@ -143,6 +143,7 @@ except ImportError:
|
|||
IPADDRESS_IMP_ERR = traceback.format_exc()
|
||||
HAS_IPADDRESS = False
|
||||
else:
|
||||
IPADDRESS_IMP_ERR = None
|
||||
HAS_IPADDRESS = True
|
||||
|
||||
|
||||
|
|
|
@ -127,6 +127,7 @@ from ansible.module_utils.common.text.converters import to_native
|
|||
try:
|
||||
import etcd3
|
||||
HAS_ETCD = True
|
||||
ETCD_IMP_ERR = None
|
||||
except ImportError:
|
||||
ETCD_IMP_ERR = traceback.format_exc()
|
||||
HAS_ETCD = False
|
||||
|
|
|
@ -310,7 +310,7 @@ def do_ini(module, filename, section=None, option=None, values=None,
|
|||
# override option with no value to option with value if not allow_no_value
|
||||
if len(values) > 0:
|
||||
for index, line in enumerate(section_lines):
|
||||
if not changed_lines[index] and match_active_opt(option, section_lines[index]):
|
||||
if not changed_lines[index] and match_active_opt(option, section_lines[index]): # pylint: disable=unnecessary-list-index-lookup
|
||||
newline = assignment_format % (option, values.pop(0))
|
||||
(changed, msg) = update_section_line(changed, section_lines, index, changed_lines, newline, msg)
|
||||
if len(values) == 0:
|
||||
|
|
|
@ -446,8 +446,8 @@ def ensure(module, client):
|
|||
module_otptoken['all'] = True
|
||||
ipa_otptoken = client.otptoken_add(name=uniqueid, item=module_otptoken)
|
||||
else:
|
||||
if not(validate_modifications(ansible_to_ipa, module, ipa_otptoken,
|
||||
module_otptoken, unmodifiable_after_creation)):
|
||||
if not validate_modifications(ansible_to_ipa, module, ipa_otptoken,
|
||||
module_otptoken, unmodifiable_after_creation):
|
||||
module.fail_json(msg="Modifications requested in module are not valid")
|
||||
|
||||
# IPA will reject 'modifications' that do not actually modify anything
|
||||
|
|
|
@ -87,6 +87,7 @@ from os import path
|
|||
try:
|
||||
from pdpyras import APISession
|
||||
HAS_PD_PY = True
|
||||
PD_IMPORT_ERR = None
|
||||
except ImportError:
|
||||
HAS_PD_PY = False
|
||||
PD_IMPORT_ERR = traceback.format_exc()
|
||||
|
@ -94,6 +95,7 @@ except ImportError:
|
|||
try:
|
||||
from pdpyras import PDClientError
|
||||
HAS_PD_CLIENT_ERR = True
|
||||
PD_CLIENT_ERR_IMPORT_ERR = None
|
||||
except ImportError:
|
||||
HAS_PD_CLIENT_ERR = False
|
||||
PD_CLIENT_ERR_IMPORT_ERR = traceback.format_exc()
|
||||
|
|
|
@ -234,10 +234,11 @@ import json
|
|||
try:
|
||||
from requests import Request, Session
|
||||
except ImportError:
|
||||
HAS_ANOTHER_LIBRARY = False
|
||||
ANOTHER_LIBRARY_IMPORT_ERROR = traceback.format_exc()
|
||||
HAS_REQUESTS = False
|
||||
REQUESTS_IMPORT_ERROR = traceback.format_exc()
|
||||
else:
|
||||
HAS_ANOTHER_LIBRARY = True
|
||||
HAS_REQUESTS = True
|
||||
REQUESTS_IMPORT_ERROR = None
|
||||
|
||||
|
||||
def build_url(account, key, is_sandbox):
|
||||
|
@ -262,7 +263,7 @@ def iterate_data(module, request_object):
|
|||
request_object.url = base_url + '&page=' + str(page)
|
||||
new_results = Session().send(request_object)
|
||||
data = data + new_results.json()["data"]
|
||||
return(data)
|
||||
return data
|
||||
else:
|
||||
module.fail_json('API Call failed, check ID, key and sandbox values')
|
||||
|
||||
|
@ -306,11 +307,10 @@ def main():
|
|||
params['api_key'],
|
||||
params['sandbox'])
|
||||
|
||||
if not HAS_ANOTHER_LIBRARY:
|
||||
# Needs: from ansible.module_utils.basic import missing_required_lib
|
||||
if not HAS_REQUESTS:
|
||||
module.exit_json(
|
||||
msg=missing_required_lib('another_library'),
|
||||
exception=ANOTHER_LIBRARY_IMPORT_ERROR)
|
||||
msg=missing_required_lib('requests'),
|
||||
exception=REQUESTS_IMPORT_ERROR)
|
||||
|
||||
# At minimum we need account and key
|
||||
if params['account_id'] and params['api_key']:
|
||||
|
|
|
@ -98,13 +98,13 @@ from ansible.module_utils.facts.packages import CLIMgr
|
|||
|
||||
class PIP(CLIMgr):
|
||||
|
||||
def __init__(self, pip):
|
||||
def __init__(self, pip, module):
|
||||
|
||||
self.CLI = pip
|
||||
self.module = module
|
||||
|
||||
def list_installed(self):
|
||||
global module
|
||||
rc, out, err = module.run_command([self._cli, 'list', '-l', '--format=json'])
|
||||
rc, out, err = self.module.run_command([self._cli, 'list', '-l', '--format=json'])
|
||||
if rc != 0:
|
||||
raise Exception("Unable to list packages rc=%s : %s" % (rc, err))
|
||||
return json.loads(out)
|
||||
|
@ -117,7 +117,6 @@ class PIP(CLIMgr):
|
|||
def main():
|
||||
|
||||
# start work
|
||||
global module
|
||||
module = AnsibleModule(
|
||||
argument_spec=dict(
|
||||
clients=dict(type='list', elements='path', default=['pip']),
|
||||
|
@ -134,7 +133,7 @@ def main():
|
|||
module.warn('Skipping invalid pip client: %s' % (pip))
|
||||
continue
|
||||
try:
|
||||
pip_mgr = PIP(pip)
|
||||
pip_mgr = PIP(pip, module)
|
||||
if pip_mgr.is_available():
|
||||
found += 1
|
||||
packages[pip] = pip_mgr.get_packages()
|
||||
|
|
|
@ -222,7 +222,7 @@ class Yarn(object):
|
|||
rc, out, err = self.module.run_command(cmd, check_rc=check_rc, cwd=cwd)
|
||||
return out, err
|
||||
|
||||
return(None, None)
|
||||
return None, None
|
||||
|
||||
def list(self):
|
||||
cmd = ['list', '--depth=0', '--json']
|
||||
|
|
|
@ -85,6 +85,7 @@ try:
|
|||
import dnf.repodict
|
||||
from dnf.conf import Conf
|
||||
HAS_DNF_PACKAGES = True
|
||||
DNF_IMP_ERR = None
|
||||
except ImportError:
|
||||
DNF_IMP_ERR = traceback.format_exc()
|
||||
HAS_DNF_PACKAGES = False
|
||||
|
|
|
@ -163,7 +163,7 @@ OUTDATED_FLATPAK_VERSION_ERROR_MESSAGE = "Unknown option --columns=application"
|
|||
|
||||
def install_flat(module, binary, remote, names, method, no_dependencies):
|
||||
"""Add new flatpaks."""
|
||||
global result
|
||||
global result # pylint: disable=global-variable-not-assigned
|
||||
uri_names = []
|
||||
id_names = []
|
||||
for name in names:
|
||||
|
@ -190,7 +190,7 @@ def install_flat(module, binary, remote, names, method, no_dependencies):
|
|||
|
||||
def uninstall_flat(module, binary, names, method):
|
||||
"""Remove existing flatpaks."""
|
||||
global result
|
||||
global result # pylint: disable=global-variable-not-assigned
|
||||
installed_flat_names = [
|
||||
_match_installed_flat_name(module, binary, name, method)
|
||||
for name in names
|
||||
|
@ -225,7 +225,7 @@ def _match_installed_flat_name(module, binary, name, method):
|
|||
# This is a difficult function, since if the user supplies a flatpakref url,
|
||||
# we have to rely on a naming convention:
|
||||
# The flatpakref file name needs to match the flatpak name
|
||||
global result
|
||||
global result # pylint: disable=global-variable-not-assigned
|
||||
parsed_name = _parse_flatpak_name(name)
|
||||
# Try running flatpak list with columns feature
|
||||
command = [binary, "list", "--{0}".format(method), "--app", "--columns=application"]
|
||||
|
@ -249,7 +249,7 @@ def _match_installed_flat_name(module, binary, name, method):
|
|||
|
||||
|
||||
def _match_flat_using_outdated_flatpak_format(module, binary, parsed_name, method):
|
||||
global result
|
||||
global result # pylint: disable=global-variable-not-assigned
|
||||
command = [binary, "list", "--{0}".format(method), "--app", "--columns=application"]
|
||||
output = _flatpak_command(module, False, command)
|
||||
for row in output.split('\n'):
|
||||
|
@ -258,7 +258,7 @@ def _match_flat_using_outdated_flatpak_format(module, binary, parsed_name, metho
|
|||
|
||||
|
||||
def _match_flat_using_flatpak_column_feature(module, binary, parsed_name, method):
|
||||
global result
|
||||
global result # pylint: disable=global-variable-not-assigned
|
||||
command = [binary, "list", "--{0}".format(method), "--app"]
|
||||
output = _flatpak_command(module, False, command)
|
||||
for row in output.split('\n'):
|
||||
|
@ -277,7 +277,7 @@ def _parse_flatpak_name(name):
|
|||
|
||||
|
||||
def _flatpak_version(module, binary):
|
||||
global result
|
||||
global result # pylint: disable=global-variable-not-assigned
|
||||
command = [binary, "--version"]
|
||||
output = _flatpak_command(module, False, command)
|
||||
version_number = output.split()[1]
|
||||
|
@ -285,7 +285,7 @@ def _flatpak_version(module, binary):
|
|||
|
||||
|
||||
def _flatpak_command(module, noop, command, ignore_failure=False):
|
||||
global result
|
||||
global result # pylint: disable=global-variable-not-assigned
|
||||
result['command'] = ' '.join(command)
|
||||
if noop:
|
||||
result['rc'] = 0
|
||||
|
|
|
@ -125,7 +125,7 @@ from ansible.module_utils.common.text.converters import to_bytes, to_native
|
|||
|
||||
def add_remote(module, binary, name, flatpakrepo_url, method):
|
||||
"""Add a new remote."""
|
||||
global result
|
||||
global result # pylint: disable=global-variable-not-assigned
|
||||
command = [binary, "remote-add", "--{0}".format(method), name, flatpakrepo_url]
|
||||
_flatpak_command(module, module.check_mode, command)
|
||||
result['changed'] = True
|
||||
|
@ -133,7 +133,7 @@ def add_remote(module, binary, name, flatpakrepo_url, method):
|
|||
|
||||
def remove_remote(module, binary, name, method):
|
||||
"""Remove an existing remote."""
|
||||
global result
|
||||
global result # pylint: disable=global-variable-not-assigned
|
||||
command = [binary, "remote-delete", "--{0}".format(method), "--force", name]
|
||||
_flatpak_command(module, module.check_mode, command)
|
||||
result['changed'] = True
|
||||
|
@ -154,7 +154,7 @@ def remote_exists(module, binary, name, method):
|
|||
|
||||
|
||||
def _flatpak_command(module, noop, command):
|
||||
global result
|
||||
global result # pylint: disable=global-variable-not-assigned
|
||||
result['command'] = ' '.join(command)
|
||||
if noop:
|
||||
result['rc'] = 0
|
||||
|
|
|
@ -159,7 +159,7 @@ def upgrade(module, xbps_path):
|
|||
|
||||
rc, stdout, stderr = module.run_command(cmdneedupgrade, check_rc=False)
|
||||
if rc == 0:
|
||||
if(len(stdout.splitlines()) == 0):
|
||||
if len(stdout.splitlines()) == 0:
|
||||
module.exit_json(changed=False, msg='Nothing to upgrade')
|
||||
elif module.check_mode:
|
||||
module.exit_json(changed=True, msg='Would have performed upgrade')
|
||||
|
|
|
@ -239,7 +239,7 @@ class ManageIQAlertProfiles(object):
|
|||
except Exception as e:
|
||||
msg = "Updating profile '{name}' failed: {error}"
|
||||
msg = msg.format(name=old_profile['name'], error=e)
|
||||
self.module.fail_json(msg=msg, result=result)
|
||||
self.module.fail_json(msg=msg)
|
||||
|
||||
if changed:
|
||||
msg = "Profile {name} updated successfully".format(name=desired_profile['name'])
|
||||
|
|
|
@ -161,6 +161,7 @@ import traceback
|
|||
try:
|
||||
import gitlab
|
||||
HAS_PY_GITLAB = True
|
||||
GITLAB_IMP_ERR = None
|
||||
except ImportError:
|
||||
GITLAB_IMP_ERR = traceback.format_exc()
|
||||
HAS_PY_GITLAB = False
|
||||
|
|
|
@ -164,6 +164,7 @@ import traceback
|
|||
try:
|
||||
import gitlab
|
||||
HAS_PY_GITLAB = True
|
||||
GITLAB_IMP_ERR = None
|
||||
except ImportError:
|
||||
GITLAB_IMP_ERR = traceback.format_exc()
|
||||
HAS_PY_GITLAB = False
|
||||
|
|
|
@ -189,7 +189,7 @@ from ansible.module_utils.common.text.converters import to_native
|
|||
from ansible_collections.community.general.plugins.module_utils.gitlab import auth_argument_spec, gitlab_authentication
|
||||
|
||||
try:
|
||||
cmp
|
||||
cmp # pylint: disable=used-before-assignment
|
||||
except NameError:
|
||||
def cmp(a, b):
|
||||
return (a > b) - (a < b)
|
||||
|
|
|
@ -212,6 +212,7 @@ except ImportError:
|
|||
XMLTODICT_LIBRARY_IMPORT_ERROR = traceback.format_exc()
|
||||
else:
|
||||
HAS_XMLTODICT_LIBRARY = True
|
||||
XMLTODICT_LIBRARY_IMPORT_ERROR = None
|
||||
|
||||
|
||||
class PersistentMemory(object):
|
||||
|
|
|
@ -165,10 +165,10 @@ class BE(object):
|
|||
for line in out.splitlines():
|
||||
if self.is_freebsd:
|
||||
check = line.split()
|
||||
if(check == []):
|
||||
if check == []:
|
||||
continue
|
||||
full_name = check[0].split('/')
|
||||
if(full_name == []):
|
||||
if full_name == []:
|
||||
continue
|
||||
check[0] = full_name[len(full_name) - 1]
|
||||
if check[0] == self.name:
|
||||
|
|
|
@ -329,7 +329,7 @@ class Homectl(object):
|
|||
cmd = [self.module.get_bin_path('homectl', True)]
|
||||
cmd.append('create')
|
||||
cmd.append('--identity=-') # Read the user record from standard input.
|
||||
return(self.module.run_command(cmd, data=record))
|
||||
return self.module.run_command(cmd, data=record)
|
||||
|
||||
def _hash_password(self, password):
|
||||
method = crypt.METHOD_SHA512
|
||||
|
|
|
@ -84,6 +84,7 @@ try:
|
|||
import keyring
|
||||
|
||||
HAS_KEYRING = True
|
||||
KEYRING_IMP_ERR = None
|
||||
except ImportError:
|
||||
HAS_KEYRING = False
|
||||
KEYRING_IMP_ERR = traceback.format_exc()
|
||||
|
|
|
@ -74,6 +74,7 @@ try:
|
|||
import keyring
|
||||
|
||||
HAS_KEYRING = True
|
||||
KEYRING_IMP_ERR = None
|
||||
except ImportError:
|
||||
HAS_KEYRING = False
|
||||
KEYRING_IMP_ERR = traceback.format_exc()
|
||||
|
|
|
@ -363,7 +363,7 @@ def format_disk_size(size_bytes, unit):
|
|||
This function has been adapted from https://github.com/Distrotech/parted/blo
|
||||
b/279d9d869ff472c52b9ec2e180d568f0c99e30b0/libparted/unit.c
|
||||
"""
|
||||
global units_si, units_iec
|
||||
global units_si, units_iec # pylint: disable=global-variable-not-assigned
|
||||
|
||||
unit = unit.lower()
|
||||
|
||||
|
@ -459,7 +459,7 @@ def get_device_info(device, unit):
|
|||
Fetches information about a disk and its partitions and it returns a
|
||||
dictionary.
|
||||
"""
|
||||
global module, parted_exec
|
||||
global module, parted_exec # pylint: disable=global-variable-not-assigned
|
||||
|
||||
# If parted complains about missing labels, it means there are no partitions.
|
||||
# In this case only, use a custom function to fetch information and emulate
|
||||
|
@ -486,7 +486,7 @@ def check_parted_label(device):
|
|||
to 3.1 don't return data when there is no label. For more information see:
|
||||
http://upstream.rosalinux.ru/changelogs/libparted/3.1/changelog.html
|
||||
"""
|
||||
global parted_exec
|
||||
global parted_exec # pylint: disable=global-variable-not-assigned
|
||||
|
||||
# Check the version
|
||||
parted_major, parted_minor, dummy = parted_version()
|
||||
|
@ -532,7 +532,7 @@ def parted_version():
|
|||
"""
|
||||
Returns the major and minor version of parted installed on the system.
|
||||
"""
|
||||
global module, parted_exec
|
||||
global module, parted_exec # pylint: disable=global-variable-not-assigned
|
||||
|
||||
rc, out, err = module.run_command("%s --version" % parted_exec)
|
||||
if rc != 0:
|
||||
|
@ -551,7 +551,7 @@ def parted(script, device, align):
|
|||
"""
|
||||
Runs a parted script.
|
||||
"""
|
||||
global module, parted_exec
|
||||
global module, parted_exec # pylint: disable=global-variable-not-assigned
|
||||
|
||||
align_option = '-a %s' % align
|
||||
if align == 'undefined':
|
||||
|
@ -602,7 +602,7 @@ def check_size_format(size_str):
|
|||
|
||||
|
||||
def main():
|
||||
global module, units_si, units_iec, parted_exec
|
||||
global module, units_si, units_iec, parted_exec # pylint: disable=global-variable-not-assigned
|
||||
|
||||
changed = False
|
||||
output_script = ""
|
||||
|
|
|
@ -188,6 +188,7 @@ except ImportError:
|
|||
PYRFC_LIBRARY_IMPORT_ERROR = traceback.format_exc()
|
||||
else:
|
||||
HAS_PYRFC_LIBRARY = True
|
||||
PYRFC_LIBRARY_IMPORT_ERROR = None
|
||||
try:
|
||||
import xmltodict
|
||||
except ImportError:
|
||||
|
@ -195,6 +196,7 @@ except ImportError:
|
|||
XMLTODICT_LIBRARY_IMPORT_ERROR = traceback.format_exc()
|
||||
else:
|
||||
HAS_XMLTODICT_LIBRARY = True
|
||||
XMLTODICT_LIBRARY_IMPORT_ERROR = None
|
||||
|
||||
|
||||
def call_rfc_method(connection, method_name, kwargs):
|
||||
|
|
|
@ -262,8 +262,8 @@ class BalancerMember(object):
|
|||
else:
|
||||
try:
|
||||
soup = BeautifulSoup(balancer_member_page[0])
|
||||
except TypeError:
|
||||
self.module.fail_json(msg="Cannot parse balancer_member_page HTML! " + str(soup))
|
||||
except TypeError as exc:
|
||||
self.module.fail_json(msg="Cannot parse balancer_member_page HTML! " + str(exc))
|
||||
else:
|
||||
subsoup = soup.findAll('table')[1].findAll('tr')
|
||||
keys = subsoup[0].findAll('th')
|
||||
|
|
|
@ -62,7 +62,7 @@ if HAS_TLS and ssl_ctx is not None:
|
|||
smtp_server2 = smtpd_tls.DebuggingServer(('127.0.0.1', port2), None, ssl_ctx=ssl_ctx, starttls=False)
|
||||
else:
|
||||
print('Start SMTP server on port', port1)
|
||||
smtp_server1 = smtpd.DebuggingServer(('127.0.0.1', port1), None)
|
||||
smtp_server1 = smtpd.DebuggingServer(('127.0.0.1', port1), None) # pylint: disable=used-before-assignment
|
||||
if port2:
|
||||
print('WARNING: TLS is NOT supported on this system, not listening on port %s.' % port2)
|
||||
|
||||
|
|
|
@ -22,6 +22,7 @@ plugins/modules/cloud/univention/udm_user.py validate-modules:parameter-list-no-
|
|||
plugins/modules/clustering/consul/consul.py validate-modules:doc-missing-type
|
||||
plugins/modules/clustering/consul/consul.py validate-modules:undocumented-parameter
|
||||
plugins/modules/clustering/consul/consul_session.py validate-modules:parameter-state-invalid-choice
|
||||
plugins/modules/files/ini_file.py pylint:bad-option-value
|
||||
plugins/modules/packaging/language/yarn.py use-argspec-type-path
|
||||
plugins/modules/packaging/os/redhat_subscription.py validate-modules:return-syntax-error
|
||||
plugins/modules/remote_management/manageiq/manageiq_policies.py validate-modules:parameter-state-invalid-choice
|
||||
|
|
|
@ -17,6 +17,7 @@ plugins/modules/cloud/univention/udm_user.py validate-modules:parameter-list-no-
|
|||
plugins/modules/clustering/consul/consul.py validate-modules:doc-missing-type
|
||||
plugins/modules/clustering/consul/consul.py validate-modules:undocumented-parameter
|
||||
plugins/modules/clustering/consul/consul_session.py validate-modules:parameter-state-invalid-choice
|
||||
plugins/modules/files/ini_file.py pylint:bad-option-value
|
||||
plugins/modules/packaging/language/yarn.py use-argspec-type-path
|
||||
plugins/modules/packaging/os/redhat_subscription.py validate-modules:return-syntax-error
|
||||
plugins/modules/remote_management/manageiq/manageiq_policies.py validate-modules:parameter-state-invalid-choice
|
||||
|
|
|
@ -17,6 +17,7 @@ plugins/modules/cloud/univention/udm_user.py validate-modules:parameter-list-no-
|
|||
plugins/modules/clustering/consul/consul.py validate-modules:doc-missing-type
|
||||
plugins/modules/clustering/consul/consul.py validate-modules:undocumented-parameter
|
||||
plugins/modules/clustering/consul/consul_session.py validate-modules:parameter-state-invalid-choice
|
||||
plugins/modules/files/ini_file.py pylint:bad-option-value
|
||||
plugins/modules/packaging/language/yarn.py use-argspec-type-path
|
||||
plugins/modules/packaging/os/redhat_subscription.py validate-modules:return-syntax-error
|
||||
plugins/modules/remote_management/manageiq/manageiq_policies.py validate-modules:parameter-state-invalid-choice
|
||||
|
|
|
@ -75,8 +75,8 @@ class TestMyModule():
|
|||
}
|
||||
mod_obj.params = args
|
||||
lxca_cmms.main()
|
||||
assert(mock.call(argument_spec=expected_arguments_spec,
|
||||
supports_check_mode=False) == ansible_mod_cls.call_args)
|
||||
assert mock.call(argument_spec=expected_arguments_spec,
|
||||
supports_check_mode=False) == ansible_mod_cls.call_args
|
||||
|
||||
@mock.patch('ansible_collections.community.general.plugins.module_utils.remote_management.lxca.common.setup_conn', autospec=True)
|
||||
@mock.patch('ansible_collections.community.general.plugins.modules.remote_management.lxca.lxca_cmms._cmms_by_uuid',
|
||||
|
|
|
@ -79,8 +79,8 @@ class TestMyModule():
|
|||
}
|
||||
mod_obj.params = args
|
||||
lxca_nodes.main()
|
||||
assert(mock.call(argument_spec=expected_arguments_spec,
|
||||
supports_check_mode=False) == ansible_mod_cls.call_args)
|
||||
assert mock.call(argument_spec=expected_arguments_spec,
|
||||
supports_check_mode=False) == ansible_mod_cls.call_args
|
||||
|
||||
@mock.patch('ansible_collections.community.general.plugins.module_utils.remote_management.lxca.common.setup_conn', autospec=True)
|
||||
@mock.patch('ansible_collections.community.general.plugins.modules.remote_management.lxca.lxca_nodes._nodes_by_uuid',
|
||||
|
|
|
@ -315,9 +315,9 @@ class TestPmem(ModuleTestCase):
|
|||
test_result = result.exception.args[0]['result']
|
||||
expected = json.loads(namespace)
|
||||
|
||||
for i, notuse in enumerate(test_result):
|
||||
self.assertEqual(test_result[i]['dev'], expected[i]['dev'])
|
||||
self.assertEqual(test_result[i]['size'], expected[i]['size'])
|
||||
for i, result in enumerate(test_result):
|
||||
self.assertEqual(result['dev'], expected[i]['dev'])
|
||||
self.assertEqual(result['size'], expected[i]['size'])
|
||||
|
||||
def test_fail_when_required_args_missing(self):
|
||||
with self.assertRaises(AnsibleFailJson):
|
||||
|
|
Loading…
Reference in a new issue