mirror of
https://github.com/ansible-collections/community.general.git
synced 2024-09-14 20:13:21 +02:00
Fix ansible-doc and docsite generation for removed modules
* Fix ansible-doc wrt removed modules * Fix listing of modules ia ansible-doc to not complain about removed modules Removed modules are marked as such in the metadata but nowhere else. Need to retrieve the metadata when a module doesn't have a doc so that we can tell if it falls under this case. * omit removed modules from json dump * Print an error that the module has been removed if attempting to run ansible-doc on that specific module * Get plugin_formatter to stop outputting removed modules
This commit is contained in:
parent
b2932a41b0
commit
0873d46617
4 changed files with 59 additions and 53 deletions
|
@ -30,6 +30,7 @@ import re
|
|||
import sys
|
||||
import warnings
|
||||
from collections import defaultdict
|
||||
from copy import deepcopy
|
||||
from distutils.version import LooseVersion
|
||||
from functools import partial
|
||||
from pprint import PrettyPrinter
|
||||
|
@ -263,11 +264,18 @@ def get_plugin_info(module_dir, limit_to=None, verbose=False):
|
|||
# Regular module to process
|
||||
#
|
||||
|
||||
# use ansible core library to parse out doc metadata YAML and plaintext examples
|
||||
doc, examples, returndocs, metadata = plugin_docs.get_docstring(module_path, fragment_loader, verbose=verbose)
|
||||
|
||||
if metadata and 'removed' in metadata.get('status'):
|
||||
continue
|
||||
|
||||
category = categories
|
||||
|
||||
# Start at the second directory because we don't want the "vendor"
|
||||
mod_path_only = os.path.dirname(module_path[len(module_dir):])
|
||||
|
||||
primary_category = ''
|
||||
module_categories = []
|
||||
# build up the categories that this module belongs to
|
||||
for new_cat in mod_path_only.split('/')[1:]:
|
||||
|
@ -283,9 +291,6 @@ def get_plugin_info(module_dir, limit_to=None, verbose=False):
|
|||
if module_categories:
|
||||
primary_category = module_categories[0]
|
||||
|
||||
# use ansible core library to parse out doc metadata YAML and plaintext examples
|
||||
doc, examples, returndocs, metadata = plugin_docs.get_docstring(module_path, fragment_loader, verbose=verbose)
|
||||
|
||||
if 'options' in doc and doc['options'] is None:
|
||||
display.error("*** ERROR: DOCUMENTATION.options must be a dictionary/hash when used. ***")
|
||||
pos = getattr(doc, "ansible_pos", None)
|
||||
|
@ -525,6 +530,11 @@ def process_plugins(module_map, templates, outputname, output_dir, ansible_versi
|
|||
|
||||
|
||||
def process_categories(plugin_info, categories, templates, output_dir, output_name, plugin_type):
|
||||
# For some reason, this line is changing plugin_info:
|
||||
# text = templates['list_of_CATEGORY_modules'].render(template_data)
|
||||
# To avoid that, make a deepcopy of the data.
|
||||
# We should track that down and fix it at some point in the future.
|
||||
plugin_info = deepcopy(plugin_info)
|
||||
for category in sorted(categories.keys()):
|
||||
module_map = categories[category]
|
||||
category_filename = output_name % category
|
||||
|
|
|
@ -30,10 +30,12 @@ from ansible.cli import CLI
|
|||
from ansible.errors import AnsibleError, AnsibleOptionsError
|
||||
from ansible.module_utils._text import to_native
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible.parsing.metadata import extract_metadata
|
||||
from ansible.parsing.plugin_docs import read_docstub
|
||||
from ansible.parsing.yaml.dumper import AnsibleDumper
|
||||
from ansible.plugins.loader import module_loader, action_loader, lookup_loader, callback_loader, cache_loader, \
|
||||
vars_loader, connection_loader, strategy_loader, inventory_loader, shell_loader, fragment_loader
|
||||
from ansible.utils.plugin_docs import BLACKLIST, get_docstring, get_docstub
|
||||
from ansible.utils.plugin_docs import BLACKLIST, get_docstring
|
||||
|
||||
try:
|
||||
from __main__ import display
|
||||
|
@ -130,7 +132,7 @@ class DocCLI(CLI):
|
|||
for path in paths:
|
||||
self.plugin_list.update(self.find_plugins(path, plugin_type))
|
||||
|
||||
self.pager(self.get_plugin_list_text(loader, doc_getter=get_docstub))
|
||||
self.pager(self.get_plugin_list_text(loader))
|
||||
return 0
|
||||
|
||||
# process all plugins of type
|
||||
|
@ -144,7 +146,9 @@ class DocCLI(CLI):
|
|||
plugin_data[plugin_type] = dict()
|
||||
plugin_names = self.get_all_plugins_of_type(plugin_type)
|
||||
for plugin_name in plugin_names:
|
||||
plugin_data[plugin_type][plugin_name] = self.get_plugin_metadata(plugin_type, plugin_name)
|
||||
plugin_info = self.get_plugin_metadata(plugin_type, plugin_name)
|
||||
if plugin_info is not None:
|
||||
plugin_data[plugin_type][plugin_name] = plugin_info
|
||||
|
||||
self.pager(json.dumps(plugin_data, sort_keys=True, indent=4))
|
||||
|
||||
|
@ -183,12 +187,21 @@ class DocCLI(CLI):
|
|||
raise AnsibleError("unable to load {0} plugin named {1} ".format(plugin_type, plugin_name))
|
||||
|
||||
try:
|
||||
doc, __, __, __ = get_docstring(filename, fragment_loader, verbose=(self.options.verbosity > 0))
|
||||
doc, __, __, metadata = get_docstring(filename, fragment_loader, verbose=(self.options.verbosity > 0))
|
||||
except Exception:
|
||||
display.vvv(traceback.format_exc())
|
||||
raise AnsibleError(
|
||||
"%s %s at %s has a documentation error formatting or is missing documentation." %
|
||||
(plugin_type, plugin_name, filename), wrap_text=False)
|
||||
(plugin_type, plugin_name, filename))
|
||||
|
||||
if doc is None:
|
||||
if 'removed' not in metadata.get('status', []):
|
||||
raise AnsibleError(
|
||||
"%s %s at %s has a documentation error formatting or is missing documentation." %
|
||||
(plugin_type, plugin_name, filename))
|
||||
|
||||
# Removed plugins don't have any documentation
|
||||
return None
|
||||
|
||||
return dict(
|
||||
name=plugin_name,
|
||||
|
@ -258,6 +271,10 @@ class DocCLI(CLI):
|
|||
|
||||
return text
|
||||
else:
|
||||
if 'removed' in metadata.get('status', []):
|
||||
display.warning("%s %s has been removed\n" % (plugin_type, plugin))
|
||||
return
|
||||
|
||||
# this typically means we couldn't even parse the docstring, not just that the YAML is busted,
|
||||
# probably a quoting issue.
|
||||
raise AnsibleError("Parsing produced an empty object.")
|
||||
|
@ -304,7 +321,7 @@ class DocCLI(CLI):
|
|||
|
||||
return plugin_list
|
||||
|
||||
def get_plugin_list_text(self, loader, doc_getter=get_docstring):
|
||||
def get_plugin_list_text(self, loader):
|
||||
columns = display.columns
|
||||
displace = max(len(x) for x in self.plugin_list)
|
||||
linelimit = columns - displace - 5
|
||||
|
@ -325,13 +342,19 @@ class DocCLI(CLI):
|
|||
|
||||
doc = None
|
||||
try:
|
||||
doc, plainexamples, returndocs, metadata = doc_getter(filename, fragment_loader)
|
||||
doc = read_docstub(filename)
|
||||
except Exception:
|
||||
display.warning("%s has a documentation formatting error" % plugin)
|
||||
continue
|
||||
|
||||
if not doc or not isinstance(doc, dict):
|
||||
desc = 'UNDOCUMENTED'
|
||||
display.warning("%s parsing did not produce documentation." % plugin)
|
||||
with open(filename) as f:
|
||||
metadata = extract_metadata(module_data=f.read())
|
||||
if 'removed' not in metadata[0].get('status', []):
|
||||
desc = 'UNDOCUMENTED'
|
||||
display.warning("%s parsing did not produce documentation." % plugin)
|
||||
else:
|
||||
continue
|
||||
else:
|
||||
desc = self.tty_ify(doc.get('short_description', 'INVALID SHORT DESCRIPTION').strip())
|
||||
|
||||
|
|
|
@ -82,41 +82,27 @@ def read_docstring(filename, verbose=True, ignore_errors=True):
|
|||
return data
|
||||
|
||||
|
||||
def read_docstub(filename, verbose=True, ignore_errors=True):
|
||||
def read_docstub(filename):
|
||||
"""
|
||||
Quickly find short_description using string methods instead of node parsing.
|
||||
This does not return a full set of documentation strings and is intended for
|
||||
operations like ansible-doc -l.
|
||||
"""
|
||||
|
||||
data = {
|
||||
'doc': None,
|
||||
'plainexamples': None,
|
||||
'returndocs': None,
|
||||
'metadata': None
|
||||
}
|
||||
t_module_data = open(filename, 'r')
|
||||
capturing = False
|
||||
doc_stub = []
|
||||
|
||||
try:
|
||||
t_module_data = open(filename, 'r')
|
||||
capturing = False
|
||||
doc_stub = []
|
||||
for line in t_module_data:
|
||||
# start capturing the stub until indentation returns
|
||||
if capturing and line[0] == ' ':
|
||||
doc_stub.append(line)
|
||||
elif capturing and line[0] != ' ':
|
||||
break
|
||||
if 'short_description:' in line:
|
||||
capturing = True
|
||||
doc_stub.append(line)
|
||||
|
||||
for line in t_module_data:
|
||||
# start capturing the stub until indentation returns
|
||||
if capturing and line[0] == ' ':
|
||||
doc_stub.append(line)
|
||||
elif capturing and line[0] != ' ':
|
||||
break
|
||||
if 'short_description:' in line:
|
||||
capturing = True
|
||||
doc_stub.append(line)
|
||||
|
||||
data['doc'] = AnsibleLoader(r"".join(doc_stub), file_name=filename).get_single_data()
|
||||
|
||||
except:
|
||||
if verbose:
|
||||
display.error("unable to parse %s" % filename)
|
||||
if not ignore_errors:
|
||||
raise
|
||||
data = AnsibleLoader(r"".join(doc_stub), file_name=filename).get_single_data()
|
||||
|
||||
return data
|
||||
|
|
|
@ -120,16 +120,3 @@ def get_docstring(filename, fragment_loader, verbose=False, ignore_errors=False)
|
|||
add_fragments(data['doc'], filename, fragment_loader=fragment_loader)
|
||||
|
||||
return data['doc'], data['plainexamples'], data['returndocs'], data['metadata']
|
||||
|
||||
|
||||
def get_docstub(filename, fragment_loader, verbose=False, ignore_errors=False):
|
||||
"""
|
||||
When only short_description is needed, load a stub of the full DOCUMENTATION string to speed up operation.
|
||||
"""
|
||||
|
||||
data = read_docstub(filename, verbose=verbose, ignore_errors=ignore_errors)
|
||||
|
||||
if data.get('doc', False):
|
||||
add_fragments(data['doc'], filename, fragment_loader=fragment_loader)
|
||||
|
||||
return data['doc'], data['plainexamples'], data['returndocs'], data['metadata']
|
||||
|
|
Loading…
Reference in a new issue