1
0
Fork 0
mirror of https://github.com/ansible-collections/community.general.git synced 2024-09-14 20:13:21 +02:00

Make Reporter class hold all results, move line/col into results, and out of message (#24127)

* Make Reporter class hold all results, move line/col into results, and out of message

* Move line/col out of message for YAML parser errors

* We have lineno for the DOC var, use it for YAML parser errors

* Remove valdiate-modules files from legacy-files

* pep8 indentation fixes

* Add todo for line/col in _validate_docs_schema
This commit is contained in:
Matt Martz 2017-05-03 10:25:08 -05:00 committed by GitHub
parent 6522d703a1
commit 2fbfba0ef3
5 changed files with 385 additions and 226 deletions

View file

@ -965,8 +965,6 @@ test/integration/setup_gce.py
test/integration/targets/async/library/async_test.py test/integration/targets/async/library/async_test.py
test/integration/targets/uri/files/testserver.py test/integration/targets/uri/files/testserver.py
test/sanity/code-smell/ansible-var-precedence-check.py test/sanity/code-smell/ansible-var-precedence-check.py
test/sanity/validate-modules/module_args.py
test/sanity/validate-modules/schema.py
test/units/cli/test_galaxy.py test/units/cli/test_galaxy.py
test/units/contrib/inventory/test_vmware_inventory.py test/units/contrib/inventory/test_vmware_inventory.py
test/units/errors/test_errors.py test/units/errors/test_errors.py

View file

@ -70,7 +70,7 @@ def add_mocks(filename):
if [s for s in sources if s[:7] in ['ansible', '__main_']]: if [s for s in sources if s[:7] in ['ansible', '__main_']]:
parts = module.split('.') parts = module.split('.')
for i in range(len(parts)): for i in range(len(parts)):
dotted = '.'.join(parts[:i+1]) dotted = '.'.join(parts[:i + 1])
# Never mock out ansible or ansible.module_utils # Never mock out ansible or ansible.module_utils
# we may get here if a specific module_utils file needed mocked # we may get here if a specific module_utils file needed mocked
if dotted in ('ansible', 'ansible.module_utils',): if dotted in ('ansible', 'ansible.module_utils',):

View file

@ -81,6 +81,7 @@ def return_schema(data):
extra=PREVENT_EXTRA extra=PREVENT_EXTRA
) )
def doc_schema(module_name): def doc_schema(module_name):
if module_name.startswith('_'): if module_name.startswith('_'):
module_name = module_name[1:] module_name = module_name[1:]
@ -116,7 +117,6 @@ def metadata_schema(deprecated):
) )
# Things to add soon # Things to add soon
#################### ####################
# 1) Recursively validate `type: complex` fields # 1) Recursively validate `type: complex` fields

View file

@ -85,17 +85,25 @@ def parse_yaml(value, lineno, module, name, load_all=False):
except yaml.MarkedYAMLError as e: except yaml.MarkedYAMLError as e:
e.problem_mark.line += lineno - 1 e.problem_mark.line += lineno - 1
e.problem_mark.name = '%s.%s' % (module, name) e.problem_mark.name = '%s.%s' % (module, name)
errors.append('%s is not valid YAML. Line %d column %d' % errors.append({
(name, e.problem_mark.line + 1, 'msg': '%s is not valid YAML' % name,
e.problem_mark.column + 1)) 'line': e.problem_mark.line + 1,
'column': e.problem_mark.column + 1
})
traces.append(e) traces.append(e)
except yaml.reader.ReaderError as e: except yaml.reader.ReaderError as e:
traces.append(e) traces.append(e)
errors.append('%s is not valid YAML. Character ' # TODO: Better line/column detection
'0x%x at position %d.' % errors.append({
(name, e.character, e.position)) 'msg': ('%s is not valid YAML. Character '
'0x%x at position %d.' % (name, e.character, e.position)),
'line': lineno
})
except yaml.YAMLError as e: except yaml.YAMLError as e:
traces.append(e) traces.append(e)
errors.append('%s is not valid YAML: %s: %s' % (name, type(e), e)) errors.append({
'msg': '%s is not valid YAML: %s: %s' % (name, type(e), e),
'line': lineno
})
return data, errors, traces return data, errors, traces

View file

@ -41,7 +41,7 @@ from ansible.utils.plugin_docs import BLACKLIST, get_docstring
from module_args import get_argument_spec from module_args import get_argument_spec
from schema import doc_schema, option_schema, metadata_schema, return_schema from schema import doc_schema, metadata_schema, return_schema
from utils import CaptureStd, parse_yaml from utils import CaptureStd, parse_yaml
from voluptuous.humanize import humanize_error from voluptuous.humanize import humanize_error
@ -63,18 +63,18 @@ TYPE_REGEX = re.compile(r'.*(if|or)(\s+[^"\']*|\s+)(?<!_)(?<!str\()type\(.*')
BLACKLIST_IMPORTS = { BLACKLIST_IMPORTS = {
'requests': { 'requests': {
'new_only': True, 'new_only': True,
'msg': ( 'error': {
203, 'code': 203,
('requests import found, should use ' 'msg': ('requests import found, should use '
'ansible.module_utils.urls instead') 'ansible.module_utils.urls instead')
) }
}, },
'boto(?:\.|$)': { 'boto(?:\.|$)': {
'new_only': True, 'new_only': True,
'msg': ( 'error': {
204, 'code': 204,
('boto import found, new modules should use boto3') 'msg': 'boto import found, new modules should use boto3'
) }
}, },
} }
@ -88,6 +88,43 @@ class ReporterEncoder(json.JSONEncoder):
class Reporter(object): class Reporter(object):
def __init__(self):
self.files = OrderedDict()
def _ensure_default_entry(self, path):
try:
self.files[path]
except KeyError:
self.files[path] = {
'errors': [],
'warnings': [],
'traces': [],
'warning_traces': []
}
def _log(self, path, code, msg, level='error', line=0, column=0):
self._ensure_default_entry(path)
lvl_dct = self.files[path]['%ss' % level]
lvl_dct.append({
'code': code,
'msg': msg,
'line': line,
'column': column
})
def error(self, *args, **kwargs):
self._log(*args, level='error', **kwargs)
def warning(self, *args, **kwargs):
self._log(*args, level='warning', **kwargs)
def trace(self, path, tracebk):
self._ensure_default_entry(path)
self.files[path]['traces'].append(tracebk)
def warning_trace(self, path, tracebk):
self._ensure_default_entry(path)
self.files[path]['warning_traces'].append(tracebk)
@staticmethod @staticmethod
@contextmanager @contextmanager
@ -111,20 +148,14 @@ class Reporter(object):
return temp_reports return temp_reports
@staticmethod def plain(self, warnings=False, output='-'):
def plain(reports, warnings=False, output='-'):
"""Print out the test results in plain format """Print out the test results in plain format
output is ignored here for now output is ignored here for now
""" """
ret = [] ret = []
for path, report in Reporter._filter_out_ok(reports).items(): for path, report in Reporter._filter_out_ok(self.files).items():
if report['errors'] or (warnings and report['warnings']):
print('=' * 76)
print(path)
print('=' * 76)
traces = report['traces'][:] traces = report['traces'][:]
if warnings and report['warnings']: if warnings and report['warnings']:
traces.extend(report['warning_traces']) traces.extend(report['warning_traces'])
@ -133,28 +164,25 @@ class Reporter(object):
print('TRACE:') print('TRACE:')
print('\n '.join((' %s' % trace).splitlines())) print('\n '.join((' %s' % trace).splitlines()))
for error in report['errors']: for error in report['errors']:
print('ERROR:%(code)d:%(msg)s' % error) error['path'] = path
print('%(path)s:%(line)d:%(column)d: E%(code)d %(msg)s' % error)
ret.append(1) ret.append(1)
if warnings: if warnings:
for warning in report['warnings']: for warning in report['warnings']:
print('WARNING:%(code)d:%(msg)s' % warning) warning['path'] = path
# ret.append(1) # Don't incrememt exit status for warnings print('%(path)s:%(line)d:%(column)d: W%(code)d %(msg)s' % warning)
if report['errors'] or (warnings and report['warnings']):
print()
return 3 if ret else 0 return 3 if ret else 0
@staticmethod def json(self, warnings=False, output='-'):
def json(reports, warnings=False, output='-'):
"""Print out the test results in json format """Print out the test results in json format
warnings is not respected in this output warnings is not respected in this output
""" """
ret = [len(r['errors']) for _, r in reports.items()] ret = [len(r['errors']) for _, r in self.files.items()]
with Reporter._output_handle(output) as handle: with Reporter._output_handle(output) as handle:
print(json.dumps(Reporter._filter_out_ok(reports), indent=4, cls=ReporterEncoder), file=handle) print(json.dumps(Reporter._filter_out_ok(self.files), indent=4, cls=ReporterEncoder), file=handle)
return 3 if sum(ret) else 0 return 3 if sum(ret) else 0
@ -164,15 +192,8 @@ class Validator(with_metaclass(abc.ABCMeta, object)):
are scanning multiple objects for problems, you'll want to have a separate are scanning multiple objects for problems, you'll want to have a separate
Validator for each one.""" Validator for each one."""
def __init__(self): def __init__(self, reporter=None):
self.reset() self.reporter = reporter
def reset(self):
"""Reset the test results"""
self.errors = []
self.warnings = []
self.traces = []
self.warning_traces = []
@abc.abstractproperty @abc.abstractproperty
def object_name(self): def object_name(self):
@ -185,20 +206,9 @@ class Validator(with_metaclass(abc.ABCMeta, object)):
pass pass
@abc.abstractmethod @abc.abstractmethod
def validate(self, reset=True): def validate(self):
"""Run this method to generate the test results""" """Run this method to generate the test results"""
if reset: pass
self.reset()
def report(self):
return {
self.object_path: OrderedDict([
('errors', [{'code': code, 'msg': msg} for code, msg in self.errors]),
('traces', self.traces[:]),
('warnings', [{'code': code, 'msg': msg} for code, msg in self.warnings]),
('warning_traces', self.warning_traces[:])
])
}
class ModuleValidator(Validator): class ModuleValidator(Validator):
@ -215,8 +225,8 @@ class ModuleValidator(Validator):
'setup.ps1' 'setup.ps1'
)) ))
def __init__(self, path, analyze_arg_spec=False, base_branch=None, git_cache=None): def __init__(self, path, analyze_arg_spec=False, base_branch=None, git_cache=None, reporter=None):
super(ModuleValidator, self).__init__() super(ModuleValidator, self).__init__(reporter=reporter or Reporter())
self.path = path self.path = path
self.basename = os.path.basename(self.path) self.basename = os.path.basename(self.path)
@ -315,11 +325,19 @@ class ModuleValidator(Validator):
def _check_interpreter(self, powershell=False): def _check_interpreter(self, powershell=False):
if powershell: if powershell:
if not self.text.startswith('#!powershell\n'): if not self.text.startswith('#!powershell\n'):
self.errors.append((102, 'Interpreter line is not "#!powershell"')) self.reporter.error(
path=self.object_path,
code=102,
msg='Interpreter line is not "#!powershell"'
)
return return
if not self.text.startswith('#!/usr/bin/python'): if not self.text.startswith('#!/usr/bin/python'):
self.errors.append((101, 'Interpreter line is not "#!/usr/bin/python"')) self.reporter.error(
path=self.object_path,
code=101,
msg='Interpreter line is not "#!/usr/bin/python"'
)
def _check_type_instead_of_isinstance(self, powershell=False): def _check_type_instead_of_isinstance(self, powershell=False):
if powershell: if powershell:
@ -327,35 +345,45 @@ class ModuleValidator(Validator):
for line_no, line in enumerate(self.text.splitlines()): for line_no, line in enumerate(self.text.splitlines()):
typekeyword = TYPE_REGEX.match(line) typekeyword = TYPE_REGEX.match(line)
if typekeyword: if typekeyword:
self.errors.append(( # TODO: add column
403, self.reporter.error(
('Type comparison using type() found on ' path=self.object_path,
'line %d. Use isinstance() instead' % (line_no + 1)) code=403,
)) msg=('Type comparison using type() found. '
'Use isinstance() instead'),
line=line_no + 1
)
def _check_for_sys_exit(self): def _check_for_sys_exit(self):
if 'sys.exit(' in self.text: if 'sys.exit(' in self.text:
self.errors.append( # TODO: Add line/col
( self.reporter.error(
205, path=self.object_path,
'sys.exit() call found. Should be exit_json/fail_json' code=205,
) msg='sys.exit() call found. Should be exit_json/fail_json'
) )
def _check_for_gpl3_header(self): def _check_for_gpl3_header(self):
if ('GNU General Public License' not in self.text and if ('GNU General Public License' not in self.text and
'version 3' not in self.text): 'version 3' not in self.text):
self.errors.append((105, 'GPLv3 license header not found')) self.reporter.error(
path=self.object_path,
code=105,
msg='GPLv3 license header not found'
)
def _check_for_tabs(self): def _check_for_tabs(self):
for line_no, line in enumerate(self.text.splitlines()): for line_no, line in enumerate(self.text.splitlines()):
indent = INDENT_REGEX.search(line) indent = INDENT_REGEX.search(line)
if indent and '\t' in line: if indent and '\t' in line:
index = line.index('\t') index = line.index('\t')
self.errors.append(( self.reporter.error(
402, path=self.object_path,
'indentation contains tabs. line %d column %d' % (line_no + 1, index) code=402,
)) msg='indentation contains tabs',
line=line_no + 1,
column=index
)
def _find_blacklist_imports(self): def _find_blacklist_imports(self):
for child in self.ast.body: for child in self.ast.body:
@ -370,14 +398,20 @@ class ModuleValidator(Validator):
if isinstance(grandchild, ast.Import): if isinstance(grandchild, ast.Import):
names.extend(grandchild.names) names.extend(grandchild.names)
for name in names: for name in names:
# TODO: Add line/col
for blacklist_import, options in BLACKLIST_IMPORTS.items(): for blacklist_import, options in BLACKLIST_IMPORTS.items():
if re.search(blacklist_import, name.name): if re.search(blacklist_import, name.name):
msg = options['msg']
new_only = options['new_only'] new_only = options['new_only']
if self._is_new_module() and new_only: if self._is_new_module() and new_only:
self.errors.append(msg) self.reporter.error(
path=self.object_path,
**options['error']
)
elif not new_only: elif not new_only:
self.errors.append(msg) self.reporter.error(
path=self.object_path,
**options['error']
)
def _find_module_utils(self, main): def _find_module_utils(self, main):
linenos = [] linenos = []
@ -403,25 +437,38 @@ class ModuleValidator(Validator):
msg = ( msg = (
208, 208,
('module_utils imports should import specific ' ('module_utils imports should import specific '
'components, not "*". line %d' % child.lineno) 'components, not "*"')
) )
if self._is_new_module(): if self._is_new_module():
self.errors.append(msg) self.reporter.error(
path=self.object_path,
code=msg[0],
msg=msg[1],
line=child.lineno
)
else: else:
self.warnings.append(msg) self.reporter.warning(
path=self.object_path,
code=msg[0],
msg=msg[1],
line=child.lineno
)
if (isinstance(name, ast.alias) and if (isinstance(name, ast.alias) and
name.name == 'basic'): name.name == 'basic'):
found_basic = True found_basic = True
if not linenos: if not linenos:
self.errors.append((201, 'Did not find a module_utils import')) self.reporter.error(
path=self.object_path,
code=201,
msg='Did not find a module_utils import'
)
elif not found_basic: elif not found_basic:
self.warnings.append( self.reporter.warning(
( path=self.object_path,
292, code=292,
'Did not find "ansible.module_utils.basic" import' msg='Did not find "ansible.module_utils.basic" import'
)
) )
return linenos return linenos
@ -455,13 +502,19 @@ class ModuleValidator(Validator):
child.value.func.id == 'main'): child.value.func.id == 'main'):
lineno = child.lineno lineno = child.lineno
if lineno < self.length - 1: if lineno < self.length - 1:
self.errors.append(( self.reporter.error(
104, path=self.object_path,
'Call to main() not the last line' code=104,
)) msg='Call to main() not the last line',
line=lineno
)
if not lineno: if not lineno:
self.errors.append((103, 'Did not find a call to main')) self.reporter.error(
path=self.object_path,
code=103,
msg='Did not find a call to main'
)
return lineno or 0 return lineno or 0
@ -481,10 +534,12 @@ class ModuleValidator(Validator):
if target.id.lower().startswith('has_'): if target.id.lower().startswith('has_'):
found_has = True found_has = True
if found_try_except_import and not found_has: if found_try_except_import and not found_has:
self.warnings.append(( # TODO: Add line/col
291, self.reporter.warning(
'Found Try/Except block without HAS_ assginment' path=self.object_path,
)) code=291,
msg='Found Try/Except block without HAS_ assginment'
)
def _ensure_imports_below_docs(self, doc_info, first_callable): def _ensure_imports_below_docs(self, doc_info, first_callable):
min_doc_line = min( min_doc_line = min(
@ -500,13 +555,14 @@ class ModuleValidator(Validator):
if isinstance(child, (ast.Import, ast.ImportFrom)): if isinstance(child, (ast.Import, ast.ImportFrom)):
import_lines.append(child.lineno) import_lines.append(child.lineno)
if child.lineno < min_doc_line: if child.lineno < min_doc_line:
self.errors.append(( self.reporter.error(
106, path=self.object_path,
('Import found before documentation variables. ' code=106,
'All imports must appear below ' msg=('Import found before documentation variables. '
'DOCUMENTATION/EXAMPLES/RETURN/ANSIBLE_METADATA. ' 'All imports must appear below '
'line %d' % (child.lineno,)) 'DOCUMENTATION/EXAMPLES/RETURN/ANSIBLE_METADATA.'),
)) line=child.lineno
)
break break
elif isinstance(child, TRY_EXCEPT): elif isinstance(child, TRY_EXCEPT):
bodies = child.body bodies = child.body
@ -516,14 +572,15 @@ class ModuleValidator(Validator):
if isinstance(grandchild, (ast.Import, ast.ImportFrom)): if isinstance(grandchild, (ast.Import, ast.ImportFrom)):
import_lines.append(grandchild.lineno) import_lines.append(grandchild.lineno)
if grandchild.lineno < min_doc_line: if grandchild.lineno < min_doc_line:
self.errors.append(( self.reporter.error(
106, path=self.object_path,
('Import found before documentation ' code=106,
'variables. All imports must appear below ' msg=('Import found before documentation '
'DOCUMENTATION/EXAMPLES/RETURN/' 'variables. All imports must appear below '
'ANSIBLE_METADATA. line %d' % 'DOCUMENTATION/EXAMPLES/RETURN/'
(child.lineno,)) 'ANSIBLE_METADATA.'),
)) line=child.lineno
)
break break
for import_line in import_lines: for import_line in import_lines:
@ -531,26 +588,48 @@ class ModuleValidator(Validator):
msg = ( msg = (
107, 107,
('Imports should be directly below DOCUMENTATION/EXAMPLES/' ('Imports should be directly below DOCUMENTATION/EXAMPLES/'
'RETURN/ANSIBLE_METADATA. line %d' % import_line) 'RETURN/ANSIBLE_METADATA.')
) )
if self._is_new_module(): if self._is_new_module():
self.errors.append(msg) self.reporter.error(
path=self.object_path,
code=msg[0],
msg=msg[1],
line=import_line
)
else: else:
self.warnings.append(msg) self.reporter.warning(
path=self.object_path,
code=msg[0],
msg=msg[1],
line=import_line
)
def _find_ps_replacers(self): def _find_ps_replacers(self):
if 'WANT_JSON' not in self.text: if 'WANT_JSON' not in self.text:
self.errors.append((206, 'WANT_JSON not found in module')) self.reporter.error(
path=self.object_path,
code=206,
msg='WANT_JSON not found in module'
)
if REPLACER_WINDOWS not in self.text: if REPLACER_WINDOWS not in self.text:
self.errors.append((207, '"%s" not found in module' % REPLACER_WINDOWS)) self.reporter.error(
path=self.object_path,
code=207,
msg='"%s" not found in module' % REPLACER_WINDOWS
)
def _find_ps_docs_py_file(self): def _find_ps_docs_py_file(self):
if self.object_name in self.PS_DOC_BLACKLIST: if self.object_name in self.PS_DOC_BLACKLIST:
return return
py_path = self.path.replace('.ps1', '.py') py_path = self.path.replace('.ps1', '.py')
if not os.path.isfile(py_path): if not os.path.isfile(py_path):
self.errors.append((503, 'Missing python documentation file')) self.reporter.error(
path=self.object_path,
code=503,
msg='Missing python documentation file'
)
def _get_docs(self): def _get_docs(self):
docs = { docs = {
@ -611,6 +690,7 @@ class ModuleValidator(Validator):
return docs return docs
def _validate_docs_schema(self, doc, schema, name, error_code): def _validate_docs_schema(self, doc, schema, name, error_code):
# TODO: Add line/col
errors = [] errors = []
try: try:
schema(doc) schema(doc)
@ -627,87 +707,142 @@ class ModuleValidator(Validator):
else: else:
error_message = error error_message = error
self.errors.append(( self.reporter.error(
error_code, path=self.object_path,
'%s.%s: %s' % (name, '.'.join(path), error_message) code=error_code,
)) msg='%s.%s: %s' % (name, '.'.join(path), error_message)
)
def _validate_docs(self): def _validate_docs(self):
doc_info = self._get_docs() doc_info = self._get_docs()
deprecated = False deprecated = False
if not bool(doc_info['DOCUMENTATION']['value']): if not bool(doc_info['DOCUMENTATION']['value']):
self.errors.append((301, 'No DOCUMENTATION provided')) self.reporter.error(
path=self.object_path,
code=301,
msg='No DOCUMENTATION provided'
)
else: else:
doc, errors, traces = parse_yaml( doc, errors, traces = parse_yaml(
doc_info['DOCUMENTATION']['value'], doc_info['DOCUMENTATION']['value'],
doc_info['DOCUMENTATION']['lineno'], doc_info['DOCUMENTATION']['lineno'],
self.name, 'DOCUMENTATION' self.name, 'DOCUMENTATION'
) )
self.errors.extend([(302, e) for e in errors]) for error in errors:
self.traces.extend(traces) self.reporter.error(
path=self.object_path,
code=302,
**error
)
for trace in traces:
self.reporter.trace(
path=self.object_path,
tracebk=trace
)
if not errors and not traces: if not errors and not traces:
with CaptureStd(): with CaptureStd():
try: try:
get_docstring(self.path, verbose=True) get_docstring(self.path, verbose=True)
except AssertionError: except AssertionError:
fragment = doc['extends_documentation_fragment'] fragment = doc['extends_documentation_fragment']
self.errors.append(( self.reporter.error(
303, path=self.object_path,
'DOCUMENTATION fragment missing: %s' % fragment code=303,
)) msg='DOCUMENTATION fragment missing: %s' % fragment
)
except Exception: except Exception:
self.traces.append(traceback.format_exc()) self.reporter.trace(
self.errors.append(( path=self.object_path,
304, tracebk=traceback.format_exc()
'Unknown DOCUMENTATION error, see TRACE' )
)) self.reporter.error(
path=self.object_path,
code=304,
msg='Unknown DOCUMENTATION error, see TRACE'
)
if 'options' in doc and doc['options'] is None and doc.get('extends_documentation_fragment'): if 'options' in doc and doc['options'] is None and doc.get('extends_documentation_fragment'):
self.errors.append(( self.reporter.error(
305, path=self.object_path,
('DOCUMENTATION.options must be a dictionary/hash when used ' code=304,
'with DOCUMENTATION.extends_documentation_fragment') msg=('DOCUMENTATION.options must be a dictionary/hash when used '
)) 'with DOCUMENTATION.extends_documentation_fragment')
)
if self.object_name.startswith('_') and not os.path.islink(self.object_path): if self.object_name.startswith('_') and not os.path.islink(self.object_path):
deprecated = True deprecated = True
if 'deprecated' not in doc or not doc.get('deprecated'): if 'deprecated' not in doc or not doc.get('deprecated'):
self.errors.append(( self.reporter.error(
318, path=self.object_path,
'Module deprecated, but DOCUMENTATION.deprecated is missing' code=318,
)) msg='Module deprecated, but DOCUMENTATION.deprecated is missing'
)
self._validate_docs_schema(doc, doc_schema(self.object_name.split('.')[0]), 'DOCUMENTATION', 305) self._validate_docs_schema(doc, doc_schema(self.object_name.split('.')[0]), 'DOCUMENTATION', 305)
self._check_version_added(doc) self._check_version_added(doc)
self._check_for_new_args(doc) self._check_for_new_args(doc)
if not bool(doc_info['EXAMPLES']['value']): if not bool(doc_info['EXAMPLES']['value']):
self.errors.append((310, 'No EXAMPLES provided')) self.reporter.error(
path=self.object_path,
code=310,
msg='No EXAMPLES provided'
)
else: else:
_, errors, traces = parse_yaml(doc_info['EXAMPLES']['value'], _, errors, traces = parse_yaml(doc_info['EXAMPLES']['value'],
doc_info['EXAMPLES']['lineno'], doc_info['EXAMPLES']['lineno'],
self.name, 'EXAMPLES', load_all=True) self.name, 'EXAMPLES', load_all=True)
self.errors.extend([(311, error) for error in errors]) for error in errors:
self.traces.extend(traces) self.reporter.error(
path=self.object_path,
code=311,
**error
)
for trace in traces:
self.reporter.trace(
path=self.object_path,
tracebk=trace
)
if not bool(doc_info['RETURN']['value']): if not bool(doc_info['RETURN']['value']):
if self._is_new_module(): if self._is_new_module():
self.errors.append((312, 'No RETURN documentation provided')) self.reporter.error(
path=self.object_path,
code=312,
msg='No RETURN provided'
)
else: else:
self.warnings.append((312, 'No RETURN provided')) self.reporter.warning(
path=self.object_path,
code=312,
msg='No RETURN provided'
)
else: else:
data, errors, traces = parse_yaml(doc_info['RETURN']['value'], data, errors, traces = parse_yaml(doc_info['RETURN']['value'],
doc_info['RETURN']['lineno'], doc_info['RETURN']['lineno'],
self.name, 'RETURN') self.name, 'RETURN')
if data: if data:
for ret_key in data: for ret_key in data:
self._validate_docs_schema(data[ret_key], return_schema(data[ret_key]), 'RETURN.%s' % ret_key, 319) self._validate_docs_schema(data[ret_key], return_schema(data[ret_key]), 'RETURN.%s' % ret_key, 319)
self.errors.extend([(313, error) for error in errors])
self.traces.extend(traces) for error in errors:
self.reporter.error(
path=self.object_path,
code=313,
**error
)
for trace in traces:
self.reporter.trace(
path=self.object_path,
tracebk=trace
)
if not bool(doc_info['ANSIBLE_METADATA']['value']): if not bool(doc_info['ANSIBLE_METADATA']['value']):
self.errors.append((314, 'No ANSIBLE_METADATA provided')) self.reporter.error(
path=self.object_path,
code=314,
msg='No ANSIBLE_METADATA provided'
)
else: else:
metadata = None metadata = None
if isinstance(doc_info['ANSIBLE_METADATA']['value'], ast.Dict): if isinstance(doc_info['ANSIBLE_METADATA']['value'], ast.Dict):
@ -720,8 +855,17 @@ class ModuleValidator(Validator):
doc_info['ANSIBLE_METADATA']['lineno'], doc_info['ANSIBLE_METADATA']['lineno'],
self.name, 'ANSIBLE_METADATA' self.name, 'ANSIBLE_METADATA'
) )
self.errors.extend([(315, error) for error in errors]) for error in errors:
self.traces.extend(traces) self.reporter.error(
path=self.object_path,
code=315,
**error
)
for trace in traces:
self.reporter.trace(
path=self.object_path,
tracebk=trace
)
if metadata: if metadata:
self._validate_docs_schema(metadata, metadata_schema(deprecated), self._validate_docs_schema(metadata, metadata_schema(deprecated),
@ -737,10 +881,11 @@ class ModuleValidator(Validator):
version_added = StrictVersion(str(doc.get('version_added', '0.0'))) version_added = StrictVersion(str(doc.get('version_added', '0.0')))
except ValueError: except ValueError:
version_added = doc.get('version_added', '0.0') version_added = doc.get('version_added', '0.0')
self.errors.append(( self.reporter.error(
306, path=self.object_path,
'version_added is not a valid version number: %r' % version_added code=306,
)) msg='version_added is not a valid version number: %r' % version_added
)
return return
should_be = '.'.join(ansible_version.split('.')[:2]) should_be = '.'.join(ansible_version.split('.')[:2])
@ -748,10 +893,11 @@ class ModuleValidator(Validator):
if (version_added < strict_ansible_version or if (version_added < strict_ansible_version or
strict_ansible_version < version_added): strict_ansible_version < version_added):
self.errors.append(( self.reporter.error(
307, path=self.object_path,
'version_added should be %s. Currently %s' % (should_be, version_added) code=307,
)) msg='version_added should be %s. Currently %s' % (should_be, version_added)
)
def _validate_argument_spec(self): def _validate_argument_spec(self):
if not self.analyze_arg_spec: if not self.analyze_arg_spec:
@ -759,12 +905,13 @@ class ModuleValidator(Validator):
spec = get_argument_spec(self.path) spec = get_argument_spec(self.path)
for arg, data in spec.items(): for arg, data in spec.items():
if data.get('required') and data.get('default', object) != object: if data.get('required') and data.get('default', object) != object:
self.errors.append(( self.reporter.error(
317, path=self.object_path,
('"%s" is marked as required but specifies ' code=317,
'a default. Arguments with a default ' msg=('"%s" is marked as required but specifies '
'should not be marked as required' % arg) 'a default. Arguments with a default '
)) 'should not be marked as required' % arg)
)
def _check_for_new_args(self, doc): def _check_for_new_args(self, doc):
if not self.base_branch or self._is_new_module(): if not self.base_branch or self._is_new_module():
@ -776,19 +923,24 @@ class ModuleValidator(Validator):
existing_options = existing_doc.get('options', {}) existing_options = existing_doc.get('options', {})
except AssertionError: except AssertionError:
fragment = doc['extends_documentation_fragment'] fragment = doc['extends_documentation_fragment']
self.warnings.append(( self.reporter.warning(
392, path=self.object_path,
'Pre-existing DOCUMENTATION fragment missing: %s' % fragment code=392,
)) msg='Pre-existing DOCUMENTATION fragment missing: %s' % fragment
)
return return
except Exception as e: except Exception as e:
self.warning_traces.append(e) self.reporter.warning_trace(
self.warnings.append(( path=self.object_path,
391, tracebk=e
('Unknown pre-existing DOCUMENTATION ' )
'error, see TRACE. Submodule refs may ' self.reporter.warning(
'need updated') path=self.object_path,
)) code=391,
msg=('Unknown pre-existing DOCUMENTATION '
'error, see TRACE. Submodule refs may '
'need updated')
)
return return
try: try:
@ -815,12 +967,13 @@ class ModuleValidator(Validator):
) )
except ValueError: except ValueError:
version_added = details.get('version_added', '0.0') version_added = details.get('version_added', '0.0')
self.errors.append(( self.reporter.error(
308, path=self.object_path,
('version_added for new option (%s) ' code=308,
'is not a valid version number: %r' % msg=('version_added for new option (%s) '
(option, version_added)) 'is not a valid version number: %r' %
)) (option, version_added))
)
continue continue
except: except:
# If there is any other exception it should have been caught # If there is any other exception it should have been caught
@ -831,12 +984,13 @@ class ModuleValidator(Validator):
if (strict_ansible_version != mod_version_added and if (strict_ansible_version != mod_version_added and
(version_added < strict_ansible_version or (version_added < strict_ansible_version or
strict_ansible_version < version_added)): strict_ansible_version < version_added)):
self.errors.append(( self.reporter.error(
309, path=self.object_path,
('version_added for new option (%s) should ' code=309,
'be %s. Currently %s' % msg=('version_added for new option (%s) should '
(option, should_be, version_added)) 'be %s. Currently %s' %
)) (option, should_be, version_added))
)
@staticmethod @staticmethod
def is_blacklisted(path): def is_blacklisted(path):
@ -858,25 +1012,29 @@ class ModuleValidator(Validator):
def validate(self): def validate(self):
super(ModuleValidator, self).validate() super(ModuleValidator, self).validate()
# if self._powershell_module():
# self.warnings.append('Cannot check powershell modules at this '
# 'time. Skipping')
# return
if not self._python_module() and not self._powershell_module(): if not self._python_module() and not self._powershell_module():
self.errors.append(( self.reporter.error(
501, path=self.object_path,
('Official Ansible modules must have a .py ' code=501,
'extension for python modules or a .ps1 ' msg=('Official Ansible modules must have a .py '
'for powershell modules') 'extension for python modules or a .ps1 '
)) 'for powershell modules')
)
self._python_module_override = True self._python_module_override = True
if self._python_module() and self.ast is None: if self._python_module() and self.ast is None:
self.errors.append((401, 'Python SyntaxError while parsing module')) self.reporter.error(
path=self.object_path,
code=401,
msg='Python SyntaxError while parsing module'
)
try: try:
compile(self.text, self.path, 'exec') compile(self.text, self.path, 'exec')
except Exception: except Exception:
self.traces.append(traceback.format_exc()) self.reporter.trace(
path=self.object_path,
tracebk=traceback.format_exc()
)
return return
if self._python_module(): if self._python_module():
@ -908,8 +1066,8 @@ class ModuleValidator(Validator):
class PythonPackageValidator(Validator): class PythonPackageValidator(Validator):
BLACKLIST_FILES = frozenset(('__pycache__',)) BLACKLIST_FILES = frozenset(('__pycache__',))
def __init__(self, path): def __init__(self, path, reporter=None):
super(PythonPackageValidator, self).__init__() super(PythonPackageValidator, self).__init__(reporter=reporter or Reporter())
self.path = path self.path = path
self.basename = os.path.basename(path) self.basename = os.path.basename(path)
@ -930,11 +1088,10 @@ class PythonPackageValidator(Validator):
init_file = os.path.join(self.path, '__init__.py') init_file = os.path.join(self.path, '__init__.py')
if not os.path.exists(init_file): if not os.path.exists(init_file):
self.errors.append( self.reporter.error(
( path=self.object_path,
502, code=502,
'Ansible module subdirectories must contain an __init__.py' msg='Ansible module subdirectories must contain an __init__.py'
)
) )
@ -975,8 +1132,7 @@ def main():
args.modules[:] = [m.rstrip('/') for m in args.modules] args.modules[:] = [m.rstrip('/') for m in args.modules]
reports = OrderedDict() reporter = Reporter()
git_cache = GitCache(args.base_branch) git_cache = GitCache(args.base_branch)
for module in args.modules: for module in args.modules:
@ -987,9 +1143,8 @@ def main():
if ModuleValidator.is_blacklisted(path): if ModuleValidator.is_blacklisted(path):
continue continue
with ModuleValidator(path, analyze_arg_spec=args.arg_spec, with ModuleValidator(path, analyze_arg_spec=args.arg_spec,
base_branch=args.base_branch, git_cache=git_cache) as mv: base_branch=args.base_branch, git_cache=git_cache, reporter=reporter) as mv:
mv.validate() mv.validate()
reports.update(mv.report())
for root, dirs, files in os.walk(module): for root, dirs, files in os.walk(module):
basedir = root[len(module) + 1:].split('/', 1)[0] basedir = root[len(module) + 1:].split('/', 1)[0]
@ -1001,9 +1156,8 @@ def main():
path = os.path.join(root, dirname) path = os.path.join(root, dirname)
if args.exclude and args.exclude.search(path): if args.exclude and args.exclude.search(path):
continue continue
pv = PythonPackageValidator(path) pv = PythonPackageValidator(path, reporter=reporter)
pv.validate() pv.validate()
reports.update(pv.report())
for filename in files: for filename in files:
path = os.path.join(root, filename) path = os.path.join(root, filename)
@ -1012,14 +1166,13 @@ def main():
if ModuleValidator.is_blacklisted(path): if ModuleValidator.is_blacklisted(path):
continue continue
with ModuleValidator(path, analyze_arg_spec=args.arg_spec, with ModuleValidator(path, analyze_arg_spec=args.arg_spec,
base_branch=args.base_branch, git_cache=git_cache) as mv: base_branch=args.base_branch, git_cache=git_cache, reporter=reporter) as mv:
mv.validate() mv.validate()
reports.update(mv.report())
if args.format == 'plain': if args.format == 'plain':
sys.exit(Reporter.plain(reports, warnings=args.warnings, output=args.output)) sys.exit(reporter.plain(warnings=args.warnings, output=args.output))
else: else:
sys.exit(Reporter.json(reports, warnings=args.warnings, output=args.output)) sys.exit(reporter.json(warnings=args.warnings, output=args.output))
class GitCache(object): class GitCache(object):