mirror of
https://github.com/ansible-collections/community.general.git
synced 2024-09-14 20:13:21 +02:00
cd36164239
* Porting tests to pytest * Achievement Get: No longer need mock/generator.py * Now done via pytest's parametrization * Port safe_eval to pytest * Port text tests to pytest * Port test_set_mode_if_different to pytest * Change conftest AnsibleModule fixtures to be more flexible * Move the AnsibleModules fixtures to module_utils/conftest.py for sharing * Testing the argspec code requires: * injecting both the argspec and the arguments. * Patching the arguments into sys.stdin at a different level * More porting to obsolete mock/procenv.py * Port run_command to pytest * Port known_hosts tests to pytest * Port safe_eval to pytest * Port test_distribution_version.py to pytest * Port test_log to pytest * Port test__log_invocation to pytest * Remove unneeded import of procenv in test_postgresql * Port test_pip to pytest style * As part of this, create a pytest ansiblemodule fixture in modules/conftest.py. This is slightly different than the approach taken in module_utils because here we need to override the AnsibleModule that the modules will inherit from instead of one that we're instantiating ourselves. * Fixup usage of parametrization in test_deprecate_warn * Check that the pip module failed in our test
123 lines
5.3 KiB
Python
123 lines
5.3 KiB
Python
# -*- coding: utf-8 -*-
|
|
# Copyright (c) 2015-2017 Ansible Project
|
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
|
|
|
# Make coding more python3-ish
|
|
from __future__ import (absolute_import, division)
|
|
__metaclass__ = type
|
|
|
|
import json
|
|
import sys
|
|
from itertools import chain
|
|
|
|
import pytest
|
|
|
|
|
|
EMPTY_INVOCATION = {u'module_args': {}}
|
|
|
|
|
|
class TestAnsibleModuleExitJson:
|
|
"""
|
|
Test that various means of calling exitJson and FailJson return the messages they've been given
|
|
"""
|
|
DATA = (
|
|
({}, {'invocation': EMPTY_INVOCATION}),
|
|
({'msg': 'message'}, {'msg': 'message', 'invocation': EMPTY_INVOCATION}),
|
|
({'msg': 'success', 'changed': True},
|
|
{'msg': 'success', 'changed': True, 'invocation': EMPTY_INVOCATION}),
|
|
({'msg': 'nochange', 'changed': False},
|
|
{'msg': 'nochange', 'changed': False, 'invocation': EMPTY_INVOCATION}),
|
|
)
|
|
|
|
# pylint bug: https://github.com/PyCQA/pylint/issues/511
|
|
# pylint: disable=undefined-variable
|
|
@pytest.mark.parametrize('args, expected, stdin', ((a, e, {}) for a, e in DATA), indirect=['stdin'])
|
|
def test_exit_json_exits(self, am, capfd, args, expected):
|
|
with pytest.raises(SystemExit) as ctx:
|
|
am.exit_json(**args)
|
|
assert ctx.value.code == 0
|
|
|
|
out, err = capfd.readouterr()
|
|
return_val = json.loads(out)
|
|
assert return_val == expected
|
|
|
|
# Fail_json is only legal if it's called with a message
|
|
# pylint bug: https://github.com/PyCQA/pylint/issues/511
|
|
@pytest.mark.parametrize('args, expected, stdin',
|
|
((a, e, {}) for a, e in DATA if 'msg' in a), # pylint: disable=undefined-variable
|
|
indirect=['stdin'])
|
|
def test_fail_json_exits(self, am, capfd, args, expected):
|
|
with pytest.raises(SystemExit) as ctx:
|
|
am.fail_json(**args)
|
|
assert ctx.value.code == 1
|
|
|
|
out, err = capfd.readouterr()
|
|
return_val = json.loads(out)
|
|
# Fail_json should add failed=True
|
|
expected['failed'] = True
|
|
assert return_val == expected
|
|
|
|
@pytest.mark.parametrize('stdin', [{}], indirect=['stdin'])
|
|
def test_fail_json_no_msg(self, am):
|
|
with pytest.raises(AssertionError) as ctx:
|
|
am.fail_json()
|
|
assert ctx.value.args[0] == "implementation error -- msg to explain the error is required"
|
|
|
|
|
|
class TestAnsibleModuleExitValuesRemoved:
|
|
"""
|
|
Test that ExitJson and FailJson remove password-like values
|
|
"""
|
|
OMIT = 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER'
|
|
|
|
DATA = (
|
|
(
|
|
dict(username='person', password='$ecret k3y'),
|
|
dict(one=1, pwd='$ecret k3y', url='https://username:password12345@foo.com/login/',
|
|
not_secret='following the leader', msg='here'),
|
|
dict(one=1, pwd=OMIT, url='https://username:password12345@foo.com/login/',
|
|
not_secret='following the leader', msg='here',
|
|
invocation=dict(module_args=dict(password=OMIT, token=None, username='person'))),
|
|
),
|
|
(
|
|
dict(username='person', password='password12345'),
|
|
dict(one=1, pwd='$ecret k3y', url='https://username:password12345@foo.com/login/',
|
|
not_secret='following the leader', msg='here'),
|
|
dict(one=1, pwd='$ecret k3y', url='https://username:********@foo.com/login/',
|
|
not_secret='following the leader', msg='here',
|
|
invocation=dict(module_args=dict(password=OMIT, token=None, username='person'))),
|
|
),
|
|
(
|
|
dict(username='person', password='$ecret k3y'),
|
|
dict(one=1, pwd='$ecret k3y', url='https://username:$ecret k3y@foo.com/login/',
|
|
not_secret='following the leader', msg='here'),
|
|
dict(one=1, pwd=OMIT, url='https://username:********@foo.com/login/',
|
|
not_secret='following the leader', msg='here',
|
|
invocation=dict(module_args=dict(password=OMIT, token=None, username='person'))),
|
|
),
|
|
)
|
|
|
|
# pylint bug: https://github.com/PyCQA/pylint/issues/511
|
|
@pytest.mark.parametrize('am, stdin, return_val, expected',
|
|
(({'username': {}, 'password': {'no_log': True}, 'token': {'no_log': True}}, s, r, e)
|
|
for s, r, e in DATA), # pylint: disable=undefined-variable
|
|
indirect=['am', 'stdin'])
|
|
def test_exit_json_removes_values(self, am, capfd, return_val, expected):
|
|
with pytest.raises(SystemExit) as ctx:
|
|
am.exit_json(**return_val)
|
|
out, err = capfd.readouterr()
|
|
|
|
assert json.loads(out) == expected
|
|
|
|
# pylint bug: https://github.com/PyCQA/pylint/issues/511
|
|
@pytest.mark.parametrize('am, stdin, return_val, expected',
|
|
(({'username': {}, 'password': {'no_log': True}, 'token': {'no_log': True}}, s, r, e)
|
|
for s, r, e in DATA), # pylint: disable=undefined-variable
|
|
indirect=['am', 'stdin'])
|
|
def test_fail_json_removes_values(self, am, capfd, return_val, expected):
|
|
expected['failed'] = True
|
|
with pytest.raises(SystemExit) as ctx:
|
|
am.fail_json(**return_val) == expected
|
|
out, err = capfd.readouterr()
|
|
|
|
assert json.loads(out) == expected
|