mirror of
https://github.com/ansible-collections/community.general.git
synced 2024-09-14 20:13:21 +02:00
Consider module_utils deps when running tests. (#21382)
* Skip pep8 analysis when --explain is used. * Fix return type annotations. * Match line length requirement of PEP 8 config. * Consider module_utils deps when running tests.
This commit is contained in:
parent
f2729f11c3
commit
d54bc09fae
6 changed files with 229 additions and 37 deletions
|
@ -3,6 +3,7 @@
|
|||
from __future__ import absolute_import, print_function
|
||||
|
||||
import os
|
||||
import time
|
||||
|
||||
from lib.target import (
|
||||
walk_module_targets,
|
||||
|
@ -17,6 +18,10 @@ from lib.util import (
|
|||
display,
|
||||
)
|
||||
|
||||
from lib.import_analysis import (
|
||||
get_python_module_utils_imports,
|
||||
)
|
||||
|
||||
|
||||
def categorize_changes(paths, verbose_command=None):
|
||||
"""
|
||||
|
@ -35,6 +40,26 @@ def categorize_changes(paths, verbose_command=None):
|
|||
'network-integration': set(),
|
||||
}
|
||||
|
||||
additional_paths = set()
|
||||
|
||||
for path in paths:
|
||||
dependent_paths = mapper.get_dependent_paths(path)
|
||||
|
||||
if not dependent_paths:
|
||||
continue
|
||||
|
||||
display.info('Expanded "%s" to %d dependent file(s):' % (path, len(dependent_paths)), verbosity=1)
|
||||
|
||||
for dependent_path in dependent_paths:
|
||||
display.info(dependent_path, verbosity=1)
|
||||
additional_paths.add(dependent_path)
|
||||
|
||||
additional_paths -= set(paths) # don't count changed paths as additional paths
|
||||
|
||||
if additional_paths:
|
||||
display.info('Expanded %d changed file(s) into %d additional dependent file(s).' % (len(paths), len(additional_paths)))
|
||||
paths = sorted(set(paths) | additional_paths)
|
||||
|
||||
display.info('Mapping %d changed file(s) to tests.' % len(paths))
|
||||
|
||||
for path in paths:
|
||||
|
@ -98,6 +123,35 @@ class PathMapper(object):
|
|||
|
||||
self.prefixes = load_integration_prefixes()
|
||||
|
||||
self.python_module_utils_imports = {} # populated on first use to reduce overhead when not needed
|
||||
|
||||
def get_dependent_paths(self, path):
|
||||
"""
|
||||
:type path: str
|
||||
:rtype: list[str]
|
||||
"""
|
||||
name, ext = os.path.splitext(os.path.split(path)[1])
|
||||
|
||||
if path.startswith('lib/ansible/module_utils/'):
|
||||
if ext == '.py':
|
||||
return self.get_python_module_utils_usage(name)
|
||||
|
||||
return []
|
||||
|
||||
def get_python_module_utils_usage(self, name):
|
||||
"""
|
||||
:type name: str
|
||||
:rtype: list[str]
|
||||
"""
|
||||
if not self.python_module_utils_imports:
|
||||
display.info('Analyzing python module_utils imports...')
|
||||
before = time.time()
|
||||
self.python_module_utils_imports = get_python_module_utils_imports(self.compile_targets)
|
||||
after = time.time()
|
||||
display.info('Processed %d python module_utils in %d second(s).' % (len(self.python_module_utils_imports), after - before))
|
||||
|
||||
return sorted(self.python_module_utils_imports.get(name, set()))
|
||||
|
||||
def classify(self, path):
|
||||
"""
|
||||
:type path: str
|
||||
|
@ -174,39 +228,7 @@ class PathMapper(object):
|
|||
}
|
||||
|
||||
if ext == '.py':
|
||||
network_utils = (
|
||||
'netcfg',
|
||||
'netcli',
|
||||
'network_common',
|
||||
'network',
|
||||
)
|
||||
|
||||
if name in network_utils:
|
||||
return {
|
||||
'network-integration': 'network/', # target all network platforms
|
||||
'units': 'all',
|
||||
}
|
||||
|
||||
if name in self.prefixes and self.prefixes[name] == 'network':
|
||||
network_target = 'network/%s/' % name
|
||||
|
||||
if network_target in self.integration_targets_by_alias:
|
||||
return {
|
||||
'network-integration': network_target,
|
||||
'units': 'all',
|
||||
}
|
||||
|
||||
display.warning('Integration tests for "%s" not found.' % network_target)
|
||||
|
||||
return {
|
||||
'units': 'all',
|
||||
}
|
||||
|
||||
return {
|
||||
'integration': 'all',
|
||||
'network-integration': 'all',
|
||||
'units': 'all',
|
||||
}
|
||||
return minimal # already expanded using get_dependent_paths
|
||||
|
||||
if path.startswith('lib/ansible/plugins/connection/'):
|
||||
if name == '__init__':
|
||||
|
|
|
@ -341,7 +341,7 @@ def generate_command(args, path, options, exclude, require):
|
|||
:type options: dict[str, int]
|
||||
:type exclude: list[str]
|
||||
:type require: list[str]
|
||||
:return: list[str]
|
||||
:rtype: list[str]
|
||||
"""
|
||||
options['--color'] = 1
|
||||
|
||||
|
|
|
@ -165,7 +165,7 @@ def generate_egg_info(args):
|
|||
def generate_pip_install(command):
|
||||
"""
|
||||
:type command: str
|
||||
:return: list[str] | None
|
||||
:rtype: list[str] | None
|
||||
"""
|
||||
constraints = 'test/runner/requirements/constraints.txt'
|
||||
requirements = 'test/runner/requirements/%s.txt' % command
|
||||
|
@ -861,6 +861,9 @@ def command_sanity_pep8(args, targets):
|
|||
if stderr:
|
||||
raise SubprocessError(cmd=cmd, status=status, stderr=stderr)
|
||||
|
||||
if args.explain:
|
||||
return
|
||||
|
||||
pattern = '^(?P<path>[^:]*):(?P<line>[0-9]+):(?P<column>[0-9]+): (?P<code>[A-Z0-9]{4}) (?P<message>.*)$'
|
||||
|
||||
results = [re.search(pattern, line).groupdict() for line in stdout.splitlines()]
|
||||
|
|
167
test/runner/lib/import_analysis.py
Normal file
167
test/runner/lib/import_analysis.py
Normal file
|
@ -0,0 +1,167 @@
|
|||
"""Analyze python import statements."""
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import ast
|
||||
import os
|
||||
import uuid
|
||||
|
||||
from lib.util import (
|
||||
display,
|
||||
ApplicationError,
|
||||
)
|
||||
|
||||
|
||||
def get_python_module_utils_imports(compile_targets):
|
||||
"""Return a dictionary of python file paths mapped to sets of module_utils names.
|
||||
:type compile_targets: list[TestTarget]
|
||||
:rtype: dict[str, set[str]]
|
||||
"""
|
||||
module_utils_files = (os.path.splitext(filename) for filename in os.listdir('lib/ansible/module_utils'))
|
||||
module_utils = sorted(name[0] for name in module_utils_files if name[0] != '__init__' and name[1] == '.py')
|
||||
|
||||
imports_by_target_path = {}
|
||||
|
||||
for target in compile_targets:
|
||||
imports_by_target_path[target.path] = extract_python_module_utils_imports(target.path, module_utils)
|
||||
|
||||
def recurse_import(import_name, depth=0, seen=None):
|
||||
"""Recursively expand module_utils imports from module_utils files.
|
||||
:type import_name: str
|
||||
:type depth: int
|
||||
:type seen: set[str] | None
|
||||
:rtype set[str]
|
||||
"""
|
||||
display.info('module_utils import: %s%s' % (' ' * depth, import_name), verbosity=4)
|
||||
|
||||
if seen is None:
|
||||
seen = set([import_name])
|
||||
|
||||
results = set([import_name])
|
||||
|
||||
import_path = os.path.join('lib/ansible/module_utils', '%s.py' % import_name)
|
||||
|
||||
for name in sorted(imports_by_target_path.get(import_path, set())):
|
||||
if name in seen:
|
||||
continue
|
||||
|
||||
seen.add(name)
|
||||
|
||||
matches = sorted(recurse_import(name, depth + 1, seen))
|
||||
|
||||
for result in matches:
|
||||
results.add(result)
|
||||
|
||||
return results
|
||||
|
||||
for module_util in module_utils:
|
||||
# recurse over module_utils imports while excluding self
|
||||
module_util_imports = recurse_import(module_util)
|
||||
module_util_imports.remove(module_util)
|
||||
|
||||
# add recursive imports to all path entries which import this module_util
|
||||
for target_path in imports_by_target_path:
|
||||
if module_util in imports_by_target_path[target_path]:
|
||||
for module_util_import in sorted(module_util_imports):
|
||||
if module_util_import not in imports_by_target_path[target_path]:
|
||||
display.info('%s inherits import %s via %s' % (target_path, module_util_import, module_util), verbosity=6)
|
||||
imports_by_target_path[target_path].add(module_util_import)
|
||||
|
||||
imports = dict([(module_util, set()) for module_util in module_utils])
|
||||
|
||||
for target_path in imports_by_target_path:
|
||||
for module_util in imports_by_target_path[target_path]:
|
||||
imports[module_util].add(target_path)
|
||||
|
||||
for module_util in sorted(imports):
|
||||
if not len(imports[module_util]):
|
||||
display.warning('No imports found which use the "%s" module_util.' % module_util)
|
||||
|
||||
return imports
|
||||
|
||||
|
||||
def extract_python_module_utils_imports(path, module_utils):
|
||||
"""Return a list of module_utils imports found in the specified source file.
|
||||
:type path: str
|
||||
:type module_utils: set[str]
|
||||
:rtype: set[str]
|
||||
"""
|
||||
with open(path, 'r') as module_fd:
|
||||
code = module_fd.read()
|
||||
|
||||
try:
|
||||
tree = ast.parse(code)
|
||||
except SyntaxError as ex:
|
||||
# Setting the full path to the filename results in only the filename being given for str(ex).
|
||||
# As a work-around, set the filename to a UUID and replace it in the final string output with the actual path.
|
||||
ex.filename = str(uuid.uuid4())
|
||||
error = str(ex).replace(ex.filename, path)
|
||||
raise ApplicationError('AST parse error: %s' % error)
|
||||
|
||||
finder = ModuleUtilFinder(path, module_utils)
|
||||
finder.visit(tree)
|
||||
return finder.imports
|
||||
|
||||
|
||||
class ModuleUtilFinder(ast.NodeVisitor):
|
||||
"""AST visitor to find valid module_utils imports."""
|
||||
def __init__(self, path, module_utils):
|
||||
"""Return a list of module_utils imports found in the specified source file.
|
||||
:type path: str
|
||||
:type module_utils: set[str]
|
||||
"""
|
||||
super(ModuleUtilFinder, self).__init__()
|
||||
|
||||
self.path = path
|
||||
self.module_utils = module_utils
|
||||
self.imports = set()
|
||||
|
||||
# noinspection PyPep8Naming
|
||||
# pylint: disable=locally-disabled, invalid-name
|
||||
def visit_Import(self, node):
|
||||
"""
|
||||
:type node: ast.Import
|
||||
"""
|
||||
self.generic_visit(node)
|
||||
|
||||
for alias in node.names:
|
||||
if alias.name.startswith('ansible.module_utils.'):
|
||||
# import ansible.module_utils.MODULE[.MODULE]
|
||||
self.add_import(alias.name.split('.')[2], node.lineno)
|
||||
|
||||
# noinspection PyPep8Naming
|
||||
# pylint: disable=locally-disabled, invalid-name
|
||||
def visit_ImportFrom(self, node):
|
||||
"""
|
||||
:type node: ast.ImportFrom
|
||||
"""
|
||||
self.generic_visit(node)
|
||||
|
||||
if not node.module:
|
||||
return
|
||||
|
||||
if node.module == 'ansible.module_utils':
|
||||
for alias in node.names:
|
||||
# from ansible.module_utils import MODULE[, MODULE]
|
||||
self.add_import(alias.name, node.lineno)
|
||||
elif node.module.startswith('ansible.module_utils.'):
|
||||
# from ansible.module_utils.MODULE[.MODULE]
|
||||
self.add_import(node.module.split('.')[2], node.lineno)
|
||||
|
||||
def add_import(self, name, line_number):
|
||||
"""
|
||||
:type name: str
|
||||
:type line_number: int
|
||||
"""
|
||||
if name in self.imports:
|
||||
return # duplicate imports are ignored
|
||||
|
||||
if name not in self.module_utils:
|
||||
if self.path.startswith('test/'):
|
||||
return # invalid imports in tests are ignored
|
||||
|
||||
raise Exception('%s:%d Invalid module_util import: %s' % (self.path, line_number, name))
|
||||
|
||||
display.info('%s:%d imports module_utils: %s' % (self.path, line_number, name), verbosity=5)
|
||||
|
||||
self.imports.add(name)
|
|
@ -227,7 +227,7 @@ def deepest_path(path_a, path_b):
|
|||
"""Return the deepest of two paths, or None if the paths are unrelated.
|
||||
:type path_a: str
|
||||
:type path_b: str
|
||||
:return: str | None
|
||||
:rtype: str | None
|
||||
"""
|
||||
if path_a == '.':
|
||||
path_a = ''
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
cd test/runner/
|
||||
|
||||
pylint --max-line-length=120 --reports=n ./*.py ./*/*.py \
|
||||
pylint --max-line-length=160 --reports=n ./*.py ./*/*.py \
|
||||
--jobs 2 \
|
||||
--rcfile /dev/null \
|
||||
--function-rgx '[a-z_][a-z0-9_]{2,40}$' \
|
||||
|
|
Loading…
Reference in a new issue