1
0
Fork 0
mirror of https://github.com/ansible-collections/community.general.git synced 2024-09-14 20:13:21 +02:00

Misc. typo fixes (#4940) (#4942)

Signed-off-by: Abhijeet Kasurde <akasurde@redhat.com>
(cherry picked from commit bf94f08bc4)

Co-authored-by: Abhijeet Kasurde <akasurde@redhat.com>
This commit is contained in:
patchback[bot] 2022-07-11 22:18:22 +02:00 committed by GitHub
parent 6058a5e5b1
commit 2245742255
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
40 changed files with 55 additions and 55 deletions

View file

@ -105,7 +105,7 @@ body:
attributes:
label: Steps to Reproduce
description: |
Describe exactly how to reproduce the problem, using a minimal test-case. It would *really* help us understand your problem if you could also pased any playbooks, configs and commands you used.
Describe exactly how to reproduce the problem, using a minimal test-case. It would *really* help us understand your problem if you could also passed any playbooks, configs and commands you used.
**HINT:** You can paste https://gist.github.com links for larger files.
value: |

View file

@ -21,7 +21,7 @@ DOCUMENTATION = '''
'''
EXAMPLES = '''
- name: Count occurences
- name: Count occurrences
ansible.builtin.debug:
msg: >-
{{ [1, 'a', 2, 2, 'a', 'b', 'a'] | community.general.counter }}
@ -30,7 +30,7 @@ EXAMPLES = '''
RETURN = '''
_value:
description: A dictionary with the elements of the sequence as keys, and their number of occurance in the sequence as values.
description: A dictionary with the elements of the sequence as keys, and their number of occurrences in the sequence as values.
type: dictionary
'''

View file

@ -38,7 +38,7 @@ DOCUMENTATION = '''
parser:
description:
- The correct parser for the input data.
- For exmaple C(ifconfig).
- For example C(ifconfig).
- See U(https://github.com/kellyjonbrazil/jc#parsers) for the latest list of parsers.
type: string
required: true

View file

@ -213,7 +213,7 @@ class InventoryModule(BaseInventoryPlugin, Cacheable):
self.inventory.add_child(parent_group_name, group_name)
else:
self.display.vvvv('Processing profile %s without parent\n' % profile['name'])
# Create a heirarchy of profile names
# Create a hierarchy of profile names
profile_elements = profile['name'].split('-')
i = 0
while i < len(profile_elements) - 1:

View file

@ -522,7 +522,7 @@ class InventoryModule(BaseInventoryPlugin):
"""Helper to save data
Helper to save the data in self.data
Detect if data is allready in branch and use dict_merge() to prevent that branch is overwritten.
Detect if data is already in branch and use dict_merge() to prevent that branch is overwritten.
Args:
str(instance_name): name of instance

View file

@ -351,7 +351,7 @@ def wait_to_finish(target, pending, refresh, timeout, min_interval=1, delay=3):
if pending and status not in pending:
raise HwcModuleException(
"unexpect status(%s) occured" % status)
"unexpect status(%s) occurred" % status)
if not is_last_time:
wait *= 2

View file

@ -83,12 +83,12 @@ class OpenNebulaModule:
if self.module.params.get("api_username"):
username = self.module.params.get("api_username")
else:
self.fail("Either api_username or the environment vairable ONE_USERNAME must be provided")
self.fail("Either api_username or the environment variable ONE_USERNAME must be provided")
if self.module.params.get("api_password"):
password = self.module.params.get("api_password")
else:
self.fail("Either api_password or the environment vairable ONE_PASSWORD must be provided")
self.fail("Either api_password or the environment variable ONE_PASSWORD must be provided")
session = "%s:%s" % (username, password)

View file

@ -691,7 +691,7 @@ def check_and_create_resource(
:param model: Model used to create a resource.
:param exclude_attributes: The attributes which should not be used to distinguish the resource. e.g. display_name,
dns_label.
:param dead_states: List of states which can't transition to any of the usable states of the resource. This deafults
:param dead_states: List of states which can't transition to any of the usable states of the resource. This defaults
to ["TERMINATING", "TERMINATED", "FAULTY", "FAILED", "DELETING", "DELETED", "UNKNOWN_ENUM_VALUE"]
:param default_attribute_values: A dictionary containing default values for attributes.
:return: A dictionary containing the resource & the "changed" status. e.g. {"vcn":{x:y}, "changed":True}
@ -1189,7 +1189,7 @@ def are_dicts_equal(
def should_dict_attr_be_excluded(map_option_name, option_key, exclude_list):
"""An entry for the Exclude list for excluding a map's key is specifed as a dict with the map option name as the
"""An entry for the Exclude list for excluding a map's key is specified as a dict with the map option name as the
key, and the value as a list of keys to be excluded within that map. For example, if the keys "k1" and "k2" of a map
option named "m1" needs to be excluded, the exclude list must have an entry {'m1': ['k1','k2']} """
for exclude_item in exclude_list:

View file

@ -360,7 +360,7 @@ class LXDProfileManagement(object):
)
def _merge_dicts(self, source, destination):
"""Merge Dictionarys
"""Merge Dictionaries
Get a list of filehandle numbers from logger to be handed to
DaemonContext.files_preserve

View file

@ -303,7 +303,7 @@ class LXDProjectManagement(object):
)
def _merge_dicts(self, source, destination):
""" Return a new dict taht merge two dict,
""" Return a new dict that merge two dict,
with values in source dict overwrite destination dict
Args:

View file

@ -1204,12 +1204,12 @@ def main():
# Ensure source VM id exists when cloning
proxmox.get_vm(vmid)
# Ensure the choosen VM name doesn't already exist when cloning
# Ensure the chosen VM name doesn't already exist when cloning
existing_vmid = proxmox.get_vmid(name, ignore_missing=True)
if existing_vmid:
module.exit_json(changed=False, vmid=existing_vmid, msg="VM with name <%s> already exists" % name)
# Ensure the choosen VM id doesn't already exist when cloning
# Ensure the chosen VM id doesn't already exist when cloning
if proxmox.get_vm(newid, ignore_missing=True):
module.exit_json(changed=False, vmid=vmid, msg="vmid %s with VM name %s already exists" % (newid, name))

View file

@ -1207,7 +1207,7 @@ class XenServerVM(XenServerObject):
if (self.module.params['home_server'] and
(not self.vm_params['affinity'] or self.module.params['home_server'] != self.vm_params['affinity']['name_label'])):
# Check existance only. Ignore return value.
# Check existence only. Ignore return value.
get_object_ref(self.module, self.module.params['home_server'], uuid=None, obj_type="home server", fail=True,
msg_prefix="VM check home_server: ")
@ -1371,7 +1371,7 @@ class XenServerVM(XenServerObject):
disk_sr = disk_params.get('sr')
if disk_sr_uuid is not None or disk_sr is not None:
# Check existance only. Ignore return value.
# Check existence only. Ignore return value.
get_object_ref(self.module, disk_sr, disk_sr_uuid, obj_type="SR", fail=True,
msg_prefix="VM check disks[%s]: " % position)
elif self.default_sr_ref == 'OpaqueRef:NULL':
@ -1448,7 +1448,7 @@ class XenServerVM(XenServerObject):
if cdrom_type == "iso":
# Check if ISO exists.
# Check existance only. Ignore return value.
# Check existence only. Ignore return value.
get_object_ref(self.module, cdrom_iso_name, uuid=None, obj_type="ISO image", fail=True,
msg_prefix="VM check cdrom.iso_name: ")
@ -1496,7 +1496,7 @@ class XenServerVM(XenServerObject):
self.module.fail_json(msg="VM check networks[%s]: network name cannot be an empty string!" % position)
if network_name:
# Check existance only. Ignore return value.
# Check existence only. Ignore return value.
get_object_ref(self.module, network_name, uuid=None, obj_type="network", fail=True,
msg_prefix="VM check networks[%s]: " % position)

View file

@ -279,7 +279,7 @@ def do_ini(module, filename, section=None, option=None, values=None,
# handling multiple instances of option=value when state is 'present' with/without exclusive is a bit complex
#
# 1. edit all lines where we have a option=value pair with a matching value in values[]
# 2. edit all the remaing lines where we have a matching option
# 2. edit all the remaining lines where we have a matching option
# 3. delete remaining lines where we have a matching option
# 4. insert missing option line(s) at the end of the section

View file

@ -207,7 +207,7 @@ def main():
changed = True
else:
changed = False
out = "allready unpacked"
out = "already unpacked"
if remove:
os.remove(path)

View file

@ -45,7 +45,7 @@ EXAMPLES = r'''
ipa_user: admin
ipa_pass: supersecret
- name: Ensure the TOTP syncronization window is set to 86400 seconds
- name: Ensure the TOTP synchronization window is set to 86400 seconds
community.general.ipa_otpconfig:
ipatokentotpsyncwindow: '86400'
ipa_host: localhost
@ -59,7 +59,7 @@ EXAMPLES = r'''
ipa_user: admin
ipa_pass: supersecret
- name: Ensure the HOTP syncronization window is set to 100 hops
- name: Ensure the HOTP synchronization window is set to 100 hops
community.general.ipa_otpconfig:
ipatokenhotpsyncwindow: '100'
ipa_host: localhost

View file

@ -63,7 +63,7 @@ options:
type: str
replace:
description:
- Force replace the existant vault on IPA server.
- Force replace the existent vault on IPA server.
type: bool
default: False
choices: ["True", "False"]

View file

@ -64,7 +64,7 @@ msg:
realm_info:
description:
- Representation of the realm public infomation.
- Representation of the realm public information.
returned: always
type: dict
contains:

View file

@ -1501,7 +1501,7 @@ class Nmcli(object):
if self._hairpin is None:
self.module.deprecate(
"Parameter 'hairpin' default value will change from true to false in community.general 7.0.0. "
"Set the value explicitly to supress this warning.",
"Set the value explicitly to suppress this warning.",
version='7.0.0', collection_name='community.general',
)
# Should be False in 7.0.0 but then that should be in argument_specs

View file

@ -242,7 +242,7 @@ class AnsibleGalaxyInstall(CmdModuleHelper):
self.module.deprecate(
"Support for Ansible 2.9 and ansible-base 2.10 is being deprecated. "
"At the same time support for them is ended, also the ack_ansible29 option will be removed. "
"Upgrading is strongly recommended, or set 'ack_min_ansiblecore211' to supress this message.",
"Upgrading is strongly recommended, or set 'ack_min_ansiblecore211' to suppress this message.",
version="8.0.0",
collection_name="community.general",
)

View file

@ -81,7 +81,7 @@ options:
classmap_authoritative:
description:
- Autoload classes from classmap only.
- Implicitely enable optimize_autoloader.
- Implicitly enable optimize_autoloader.
- Recommended especially for production, but can take a bit of time to run.
default: false
type: bool

View file

@ -74,7 +74,7 @@ ilo_redfish_info:
type: dict
contains:
ret:
description: Check variable to see if the information was succesfully retrived.
description: Check variable to see if the information was successfully retrieved.
type: bool
msg:
description: Information of all active iLO sessions.

View file

@ -36,7 +36,7 @@ options:
name:
description:
- The name of the branch that needs to be protected.
- Can make use a wildcard charachter for like C(production/*) or just have C(main) or C(develop) as value.
- Can make use a wildcard character for like C(production/*) or just have C(main) or C(develop) as value.
required: true
type: str
merge_access_levels:

View file

@ -305,7 +305,7 @@ class GitLabUser(object):
# note: as we unfortunately have some uncheckable parameters
# where it is not possible to determine if the update
# changed something or not, we must assume here that a
# changed happend and that an user object update is needed
# changed happened and that an user object update is needed
potentionally_changed = True
# Assign ssh keys

View file

@ -99,7 +99,7 @@ options:
- The size of namespace. This option supports the suffixes C(k) or C(K) or C(KB) for KiB,
C(m) or C(M) or C(MB) for MiB, C(g) or C(G) or C(GB) for GiB and C(t) or C(T) or C(TB) for TiB.
- This option is required if multiple namespaces are configured.
- If this option is not set, all of the avaiable space of a region is configured.
- If this option is not set, all of the available space of a region is configured.
type: str
required: false
namespace_append:

View file

@ -57,7 +57,7 @@ options:
realname:
description:
- The user's real ('human') name.
- This can also be used to add a comment to maintain compatability with C(useradd).
- This can also be used to add a comment to maintain compatibility with C(useradd).
aliases: [ 'comment' ]
type: str
realm:

View file

@ -790,7 +790,7 @@ class AIXTimezone(Timezone):
inspects C(/etc/environment) to determine the current timezone.
While AIX time zones can be set using two formats (POSIX and
Olson) the prefered method is Olson.
Olson) the preferred method is Olson.
See the following article for more information:
https://developer.ibm.com/articles/au-aix-posix/

View file

@ -14,7 +14,7 @@
- "valid_comma_separated_spaces | community.general.from_csv(skipinitialspace=True) == expected_result"
- "valid_comma_separated_spaces | community.general.from_csv != expected_result"
- name: Parse valid csv input with no headers with/without specifiying fieldnames
- name: Parse valid csv input with no headers with/without specifying fieldnames
assert:
that:
- "valid_comma_separated_no_headers | community.general.from_csv(fieldnames=['id','name','role']) == expected_result"

View file

@ -105,7 +105,7 @@
- iso_result is changed
- iso_file.stat.exists == True
- name: Create iso file with Rock Ridge extention
- name: Create iso file with Rock Ridge extension
iso_create:
src_files:
- "{{ remote_tmp_dir }}/test1.cfg"
@ -124,7 +124,7 @@
- iso_result is changed
- iso_file.stat.exists == True
- name: Create iso file with Joliet extention
- name: Create iso file with Joliet extension
iso_create:
src_files:
- "{{ remote_tmp_dir }}/test1.cfg"

View file

@ -66,7 +66,7 @@
- name: Run tests
# Skip tests on Fedora 31 and 32 because dbus fails to start unless the container is run in priveleged mode.
# Skip tests on Fedora 31 and 32 because dbus fails to start unless the container is run in privileged mode.
# Even then, it starts unreliably. This may be due to the move to cgroup v2 in Fedora 31 and 32.
# https://www.redhat.com/sysadmin/fedora-31-control-group-v2
when:

View file

@ -107,7 +107,7 @@ def test_validate_config(inventory):
}
with pytest.raises(AnsibleError) as error_message:
inventory._validate_config(config)
assert "config missing client_secret, a required paramter" in error_message
assert "config missing client_secret, a required parameter" in error_message
config = {
"client_secret": "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx",
@ -116,7 +116,7 @@ def test_validate_config(inventory):
}
with pytest.raises(AnsibleError) as error_message:
inventory._validate_config(config)
assert "config missing client_id, a required paramter" in error_message
assert "config missing client_id, a required parameter" in error_message
def test_populate(inventory):

View file

@ -39,7 +39,7 @@ def XenAPI():
"""Imports and returns fake XenAPI module."""
# Import of fake XenAPI module is wrapped by fixture so that it does not
# affect other unit tests which could potentialy also use XenAPI module.
# affect other unit tests which could potentially also use XenAPI module.
# First we use importlib.import_module() to import the module and assign
# it to a local symbol.

View file

@ -35,7 +35,7 @@ def XenAPI():
"""Imports and returns fake XenAPI module."""
# Import of fake XenAPI module is wrapped by fixture so that it does not
# affect other unit tests which could potentialy also use XenAPI module.
# affect other unit tests which could potentially also use XenAPI module.
# First we use importlib.import_module() to import the module and assign
# it to a local symbol.

View file

@ -109,7 +109,7 @@ class TestPritunlOrg(ModuleTestCase):
idempotent_exc = idempotent_result.exception.args[0]
# Ensure both calls resulted in the same returned value
# except for changed which sould be false the second time
# except for changed which should be false the second time
for k, v in iteritems(idempotent_exc):
if k == "changed":
self.assertFalse(idempotent_exc[k])
@ -158,7 +158,7 @@ class TestPritunlOrg(ModuleTestCase):
idempotent_exc = idempotent_result.exception.args[0]
# Ensure both calls resulted in the same returned value
# except for changed which sould be false the second time
# except for changed which should be false the second time
self.assertFalse(idempotent_exc["changed"])
self.assertEqual(idempotent_exc["response"], delete_exc["response"])

View file

@ -3309,7 +3309,7 @@ def test_gsm_connection_unchanged(mocked_gsm_connection_unchanged, capfd):
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_ETHERNET_STATIC_MULTIPLE_IP4_ADDRESSES, indirect=['patch_ansible_module'])
def test_create_ethernet_with_mulitple_ip4_addresses_static(mocked_generic_connection_create, capfd):
def test_create_ethernet_with_multiple_ip4_addresses_static(mocked_generic_connection_create, capfd):
"""
Test : Create ethernet connection with static IP configuration
"""
@ -3349,7 +3349,7 @@ def test_create_ethernet_with_mulitple_ip4_addresses_static(mocked_generic_conne
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_ETHERNET_STATIC_MULTIPLE_IP6_ADDRESSES, indirect=['patch_ansible_module'])
def test_create_ethernet_with_mulitple_ip6_addresses_static(mocked_generic_connection_create, capfd):
def test_create_ethernet_with_multiple_ip6_addresses_static(mocked_generic_connection_create, capfd):
"""
Test : Create ethernet connection with multiple IPv6 addresses configuration
"""
@ -3389,7 +3389,7 @@ def test_create_ethernet_with_mulitple_ip6_addresses_static(mocked_generic_conne
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_ETHERNET_STATIC_MULTIPLE_IP4_ADDRESSES, indirect=['patch_ansible_module'])
def test_ethernet_connection_static_with_mulitple_ip4_addresses_unchanged(mocked_ethernet_connection_static_multiple_ip4_addresses_unchanged, capfd):
def test_ethernet_connection_static_with_multiple_ip4_addresses_unchanged(mocked_ethernet_connection_static_multiple_ip4_addresses_unchanged, capfd):
"""
Test : Ethernet connection with static IP configuration unchanged
"""
@ -3403,7 +3403,7 @@ def test_ethernet_connection_static_with_mulitple_ip4_addresses_unchanged(mocked
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_ETHERNET_STATIC_MULTIPLE_IP6_ADDRESSES, indirect=['patch_ansible_module'])
def test_ethernet_connection_static_with_mulitple_ip6_addresses_unchanged(mocked_ethernet_connection_static_multiple_ip6_addresses_unchanged, capfd):
def test_ethernet_connection_static_with_multiple_ip6_addresses_unchanged(mocked_ethernet_connection_static_multiple_ip6_addresses_unchanged, capfd):
"""
Test : Ethernet connection with multiple IPv6 addresses configuration unchanged
"""
@ -3485,7 +3485,7 @@ def test_create_ethernet_addr_gen_mode_and_ip6_privacy_static(mocked_generic_con
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_ETHERNET_STATIC_IP6_PRIVACY_AND_ADDR_GEN_MODE, indirect=['patch_ansible_module'])
def test_ethernet_connection_static_with_mulitple_ip4_addresses_unchanged(mocked_ethernet_connection_static_ip6_privacy_and_addr_gen_mode_unchange, capfd):
def test_ethernet_connection_static_with_multiple_ip4_addresses_unchanged(mocked_ethernet_connection_static_ip6_privacy_and_addr_gen_mode_unchange, capfd):
"""
Test : Ethernet connection with static IP configuration unchanged
"""

View file

@ -37,7 +37,7 @@ class TestSlackModule(ModuleTestCase):
with self.assertRaises(AnsibleFailJson):
self.module.main()
def test_sucessful_message(self):
def test_successful_message(self):
"""tests sending a message. This is example 1 from the docs"""
set_module_args({
'token': 'XXXX/YYYY/ZZZZ',

View file

@ -19,7 +19,7 @@ TESTED_MODULE = cpanm.__name__
@pytest.fixture
def patch_cpanm(mocker):
"""
Function used for mocking some parts of redhat_subscribtion module
Function used for mocking some parts of redhat_subscription module
"""
mocker.patch('ansible_collections.community.general.plugins.module_utils.module_helper.AnsibleModule.get_bin_path',
return_value='/testbin/cpanm')

View file

@ -3,7 +3,7 @@
## Tests structure
- `input` directory contains interfaces configuration files
- `test_interfaces_file.py` runs each hardcoded test agains all configurations in `input` directory and compares results with golden outputs in `golden_output`
- `test_interfaces_file.py` runs each hardcoded test against all configurations in `input` directory and compares results with golden outputs in `golden_output`
## Running unit tests with docker

View file

@ -17,7 +17,7 @@ TESTED_MODULE = gconftool2_info.__name__
@pytest.fixture
def patch_gconftool2_info(mocker):
"""
Function used for mocking some parts of redhat_subscribtion module
Function used for mocking some parts of redhat_subscription module
"""
mocker.patch('ansible_collections.community.general.plugins.module_utils.mh.module_helper.AnsibleModule.get_bin_path',
return_value='/testbin/gconftool-2')

View file

@ -19,7 +19,7 @@ TESTED_MODULE = xfconf.__name__
@pytest.fixture
def patch_xfconf(mocker):
"""
Function used for mocking some parts of redhat_subscribtion module
Function used for mocking some parts of redhat_subscription module
"""
mocker.patch('ansible_collections.community.general.plugins.module_utils.mh.module_helper.AnsibleModule.get_bin_path',
return_value='/testbin/xfconf-query')

View file

@ -16,7 +16,7 @@ TESTED_MODULE = xfconf_info.__name__
@pytest.fixture
def patch_xfconf_info(mocker):
"""
Function used for mocking some parts of redhat_subscribtion module
Function used for mocking some parts of redhat_subscription module
"""
mocker.patch('ansible_collections.community.general.plugins.module_utils.mh.module_helper.AnsibleModule.get_bin_path',
return_value='/testbin/xfconf-query')