def get_documents(): for root, dirnames, filenames in os.walk( os.path.dirname(ansible.modules.__file__)): # noqa: E501 for filename in filenames: if filename == '__init__.py' or not filename.endswith('py'): continue documentation = plugin_docs.get_docstring( os.path.join(root, filename), fragment_loader)[0] metadata = plugin_docs.get_docstring(os.path.join(root, filename), fragment_loader)[3] if documentation is None: continue if metadata is None: continue if metadata.get('supported_by') == 'network' or metadata[ 'supported_by'] == 'network': logger.info('Skipping network module: ' + documentation['module']) continue if metadata.get('supported_by') == 'certified' or metadata[ 'supported_by'] == 'certified': logger.info('Skipping certified module: ' + documentation['module']) continue logger.info('Metadata for module: ' + documentation['module'] + str(metadata)) yield documentation
def worker(pending_queue: multiprocessing.Queue, completed_queue: multiprocessing.Queue) -> None: """Extract the documentation from a plugin, place in completed queue. :param pending_queue: A queue with plugins to process :param completed_queue: The queue in which extracted documentation will be placed """ # pylint: disable=import-outside-toplevel # load the fragment_loader _after_ the path is set from ansible.plugins.loader import fragment_loader while True: entry = pending_queue.get() if entry is None: break collection_name, checksum, plugin_path = entry try: if ansible_version.startswith("2.9"): (doc, examples, returndocs, metadata) = get_docstring( filename=str(plugin_path), fragment_loader=fragment_loader, ) else: (doc, examples, returndocs, metadata) = get_docstring( filename=str(plugin_path), fragment_loader=fragment_loader, collection_name=collection_name, ) except Exception as exc: # pylint: disable=broad-except err_message = f"{type(exc).__name__} (get_docstring): {str(exc)}" completed_queue.put( ("error", (checksum, plugin_path, err_message))) continue try: q_message = { "plugin": { "doc": doc, "examples": examples, "returndocs": returndocs, "metadata": metadata, }, "timestamp": datetime.utcnow().isoformat(), } completed_queue.put( ("plugin", (checksum, json.dumps(q_message, default=str)))) except JSONDecodeError as exc: err_message = f"{type(exc).__name__} (json_decode_doc): {str(exc)}" completed_queue.put( ("error", (checksum, plugin_path, err_message)))
def _get_plugin_doc(plugin, loader, plugin_type, search_paths): doc = plainexamples = returndocs = metadata = {} try: # if the plugin lives in a non-python file (eg, win_X.ps1), require the corresponding python file for docs filename = loader.find_plugin(plugin, mod_type='.py', ignore_deprecated=True, check_aliases=True) if filename is None: display.warning("%s %s not found in:\n%s\n" % (plugin_type, plugin, search_paths)) return if not any(filename.endswith(x) for x in C.BLACKLIST_EXTS): doc, plainexamples, returndocs, metadata = get_docstring( filename, fragment_loader, verbose=(context.CLIARGS['verbosity'] > 0)) if doc: # doc may be None, such as when the module has been removed doc['filename'] = filename except Exception as e: display.vvv(traceback.format_exc()) raise AnsibleError( "%s %s missing documentation (or could not parse documentation): %s\n" % (plugin_type, plugin, to_native(e))) return doc, plainexamples, returndocs, metadata
def main(): module_keys = ('module', 'short_description', 'options', 'deprecated', 'suboptions') result = {'modules': [], 'directives': {}, 'lookup_plugins': []} for module in get_module_list(): if module in BLACKLIST_MODULES: continue filename = module_loader.find_plugin(module, mod_type='.py') if filename is None: continue if filename.endswith(".ps1"): continue if os.path.isdir(filename): continue try: doc = plugin_docs.get_docstring(filename, fragment_loader)[0] filtered_doc = {key: doc.get(key, None) for key in module_keys} result['modules'].append(filtered_doc) except: pass for aclass in (Play, Role, Block, Task): aobj = aclass() name = type(aobj).__name__ for attr in aobj.__dict__['_attributes']: if 'private' in attr and attr.private: continue direct_target = result['directives'].setdefault(attr, []) direct_target.append(name) if attr == 'action': local_action = result['directives'].setdefault( 'local_action', []) local_action.append(name)
def load_plugin(loader, plugin_type, plugin): result = {} try: plugin_context = loader.find_plugin_with_context( plugin, mod_type='.py', ignore_deprecated=True, check_aliases=True) if not plugin_context.resolved: result['error'] = 'Cannot find plugin' return result plugin_name = plugin_context.plugin_resolved_name filename = plugin_context.plugin_resolved_path collection_name = plugin_context.plugin_resolved_collection result.update({ 'plugin_name': plugin_name, 'filename': filename, 'collection_name': collection_name, }) documentation, plainexamples, returndocs, metadata = get_docstring( filename, fragment_loader, verbose=False, collection_name=collection_name, is_module=(plugin_type == 'module')) if documentation is None: result['error'] = 'No valid documentation found' return result documentation['filename'] = filename documentation['collection'] = collection_name if plugin_type == 'module': # is there corresponding action plugin? if plugin in action_loader: documentation['has_action'] = True else: documentation['has_action'] = False ansible_doc = { 'doc': documentation, 'examples': plainexamples, 'return': returndocs, 'metadata': metadata, } try: # If this fails, the documentation cannot be seralized as JSON json.dumps(ansible_doc, cls=AnsibleJSONEncoder) # Store result. This is guaranteed to be serializable result['ansible-doc'] = ansible_doc except Exception as e: result['error'] = ('Cannot serialize documentation as JSON: %s' % to_native(e)) except Exception as e: result['error'] = ( 'Missing documentation or could not parse documentation: %s' % to_native(e)) return result
def get_plugin_metadata(plugin_type, plugin_name): # if the plugin lives in a non-python file (eg, win_X.ps1), require the corresponding python file for docs loader = getattr(plugin_loader, '%s_loader' % plugin_type) result = loader.find_plugin_with_context(plugin_name, mod_type='.py', ignore_deprecated=True, check_aliases=True) if not result.resolved: raise AnsibleError("unable to load {0} plugin named {1} ".format( plugin_type, plugin_name)) filename = result.plugin_resolved_path collection_name = result.plugin_resolved_collection try: doc, __, __, __ = get_docstring( filename, fragment_loader, verbose=(context.CLIARGS['verbosity'] > 0), collection_name=collection_name, is_module=(plugin_type == 'module')) except Exception: display.vvv(traceback.format_exc()) raise AnsibleError( "%s %s at %s has a documentation formatting error or is missing documentation." % (plugin_type, plugin_name, filename)) if doc is None: # Removed plugins don't have any documentation return None return dict(name=plugin_name, namespace=DocCLI.namespace_from_plugin_filepath( filename, plugin_name, loader.package_path), description=doc.get('short_description', "UNKNOWN"), version_added=doc.get('version_added', "UNKNOWN"))
def main(): plugins = [] for path in sys.argv[1:] or sys.stdin.read().splitlines(): with open(path, 'rb') as f: try: mm_file = mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ) except ValueError: continue if DOC_RE.search(mm_file): plugins.append(path) mm_file.close() for plugin in plugins: data = {} data['doc'], data['examples'], data['return'], data[ 'metadata'] = get_docstring(plugin, fragment_loader) for result in find_deprecations(data['doc']): print('%s: %s is scheduled for removal in %s' % (plugin, '.'.join(str(i) for i in result[0][:-2]), result[1])) base = os.path.join(os.path.dirname(ansible.config.__file__), 'base.yml') with open(base) as f: data = yaml.safe_load(f) for result in find_deprecations(data): print('%s: %s is scheduled for removal in %s' % (base, '.'.join(str(i) for i in result[0][:-2]), result[1]))
def _get_plugin_doc(plugin, loader, search_paths): # if the plugin lives in a non-python file (eg, win_X.ps1), require the corresponding python file for docs filename = loader.find_plugin(plugin, mod_type='.py', ignore_deprecated=True, check_aliases=True) if filename is None: raise PluginNotFound('%s was not found in %s' % (plugin, search_paths)) doc, plainexamples, returndocs, metadata = get_docstring(filename, fragment_loader, verbose=(context.CLIARGS['verbosity'] > 0)) # If the plugin existed but did not have a DOCUMENTATION element and was not removed, it's # an error if doc is None: # doc may be None when the module has been removed. Calling code may choose to # handle that but we can't. if 'status' in metadata and isinstance(metadata['status'], Container): if 'removed' in metadata['status']: raise RemovedPlugin('%s has been removed' % plugin) # Backwards compat: no documentation but valid metadata (or no metadata, which results in using the default metadata). # Probably should make this an error in 2.10 return {}, {}, {}, metadata else: # If metadata is invalid, warn but don't error display.warning(u'%s has an invalid ANSIBLE_METADATA field' % plugin) raise ValueError('%s did not contain a DOCUMENTATION attribute' % plugin) doc['filename'] = filename return doc, plainexamples, returndocs, metadata
def format_plugin_doc(self, plugin, loader, plugin_type, search_paths): text = '' try: # if the plugin lives in a non-python file (eg, win_X.ps1), require the corresponding python file for docs filename = loader.find_plugin(plugin, mod_type='.py', ignore_deprecated=True, check_aliases=True) if filename is None: display.warning("%s %s not found in:\n%s\n" % (plugin_type, plugin, search_paths)) return if any(filename.endswith(x) for x in C.BLACKLIST_EXTS): return try: doc, plainexamples, returndocs, metadata = get_docstring(filename, fragment_loader, verbose=(self.options.verbosity > 0)) except Exception: display.vvv(traceback.format_exc()) display.error( "%s %s has a documentation error formatting or is missing documentation." % (plugin_type, plugin), wrap_text=False) return if doc is not None: # assign from other sections doc['plainexamples'] = plainexamples doc['returndocs'] = returndocs doc['metadata'] = metadata # generate extra data if plugin_type == 'module': # is there corresponding action plugin? if plugin in action_loader: doc['action'] = True else: doc['action'] = False doc['filename'] = filename doc['now_date'] = datetime.date.today().strftime('%Y-%m-%d') if 'docuri' in doc: doc['docuri'] = doc[plugin_type].replace('_', '-') if self.options.show_snippet and plugin_type == 'module': text += self.get_snippet_text(doc) else: text += self.get_man_text(doc) return text else: if 'removed' in metadata.get('status', []): display.warning("%s %s has been removed\n" % (plugin_type, plugin)) return # this typically means we couldn't even parse the docstring, not just that the YAML is busted, # probably a quoting issue. raise AnsibleError("Parsing produced an empty object.") except Exception as e: display.vvv(traceback.format_exc()) raise AnsibleError( "%s %s missing documentation (or could not parse documentation): %s\n" % (plugin_type, plugin, str(e)))
def get_plugin_metadata(self, plugin_type, plugin_name, loader): # if the plugin lives in a non-python file (eg, win_X.ps1), require the corresponding python file for docs filename = loader.find_plugin(plugin_name, mod_type='.py', ignore_deprecated=True, check_aliases=True) if filename is None: raise AnsibleError("unable to load {0} plugin named {1} ".format(plugin_type, plugin_name)) try: doc, __, __, metadata = get_docstring(filename, fragment_loader, verbose=(self.options.verbosity > 0)) except Exception: display.vvv(traceback.format_exc()) raise AnsibleError( "%s %s at %s has a documentation error formatting or is missing documentation." % (plugin_type, plugin_name, filename)) if doc is None: if 'removed' not in metadata.get('status', []): raise AnsibleError( "%s %s at %s has a documentation error formatting or is missing documentation." % (plugin_type, plugin_name, filename)) # Removed plugins don't have any documentation return None return dict( name=plugin_name, namespace=self.namespace_from_plugin_filepath(filename, plugin_name, loader.package_path), description=doc.get('short_description', "UNKNOWN"), version_added=doc.get('version_added', "UNKNOWN") )
def render_module_docs(output_folder, module, template): print("Rendering {}".format(module)) doc, examples, returndocs, metadata = plugin_docs.get_docstring( module, fragment_loader, ) returndocs = returndocs or {} if isinstance(returndocs, str): returndocs = yaml.safe_load(returndocs) doc.update( examples=examples, returndocs=returndocs, metadata=metadata, ) doc["author"] = ensure_list(doc["author"]) doc["description"] = ensure_list(doc["description"]) convert_descriptions(doc["options"]) convert_descriptions(doc["returndocs"]) if "module" in doc: name = doc["module"] doc["plugin_type"] = "module" else: name = doc["name"].split(".")[-1] doc["module"] = name rst_path = os.path.join(output_folder, name + ".rst") with open(rst_path, "w") as fd: fd.write(template.render(doc))
def _get_plugin_doc(plugin, plugin_type, loader, search_paths): # if the plugin lives in a non-python file (eg, win_X.ps1), require the corresponding python file for docs result = loader.find_plugin_with_context(plugin, mod_type='.py', ignore_deprecated=True, check_aliases=True) if not result.resolved: raise PluginNotFound('%s was not found in %s' % (plugin, search_paths)) plugin_name = result.plugin_resolved_name filename = result.plugin_resolved_path collection_name = result.plugin_resolved_collection doc, plainexamples, returndocs, metadata = get_docstring( filename, fragment_loader, verbose=(context.CLIARGS['verbosity'] > 0), collection_name=collection_name, is_module=(plugin_type == 'module')) # If the plugin existed but did not have a DOCUMENTATION element and was not removed, it's an error if doc is None: raise ValueError('%s did not contain a DOCUMENTATION attribute' % plugin) doc['filename'] = filename doc['collection'] = collection_name return doc, plainexamples, returndocs, metadata
def get_docstrings(file_names: List[str]) -> List[Any]: """Extract and return a list of docstring information from a list of files Parameters ---------- file_names: List[str] A list of strings representing file names Returns ------- List[Any] A list of AnsibleMapping objects, representing docstring information (in dict form), excluding those that are marked as deprecated. """ found_docstrings: List[Any] = [] found_docstrings += [ get_docstring(file_name, fragment_loader)[0] for file_name in file_names ] return [ current_docstring for current_docstring in found_docstrings if current_docstring and not current_docstring.get("deprecated") ]
def get_documents(): for root, dirnames, filenames in os.walk(os.path.dirname(ansible.modules.__file__)): # noqa: E501 for filename in filenames: if filename == '__init__.py' or not filename.endswith('py'): continue documentation = plugin_docs.get_docstring(os.path.join(root, filename), fragment_loader)[0] if documentation is None: continue yield documentation
def get_module_meta(self, module): in_path = module_loader.find_plugin(module) if in_path and not in_path.endswith('.ps1'): module_vars, a, _, _ = plugin_docs.get_docstring( in_path, fragment_loader) else: module_vars = None return module_vars
def get_plugin_list_text(self, loader): columns = display.columns displace = max(len(x) for x in self.plugin_list) linelimit = columns - displace - 5 text = [] deprecated = [] for plugin in sorted(self.plugin_list): try: # if the module lives in a non-python file (eg, win_X.ps1), require the corresponding python file for docs filename = loader.find_plugin(plugin, mod_type='.py', ignore_deprecated=True, check_aliases=True) if filename is None: continue if filename.endswith(".ps1"): continue if os.path.isdir(filename): continue doc = None try: doc, plainexamples, returndocs, metadata = get_docstring( filename, fragment_loader) except: display.warning("%s has a documentation formatting error" % plugin) if not doc or not isinstance(doc, dict): desc = 'UNDOCUMENTED' display.warning( "%s parsing did not produce documentation." % plugin) else: desc = self.tty_ify( doc.get('short_description', 'INVALID SHORT DESCRIPTION').strip()) if len(desc) > linelimit: desc = desc[:linelimit] + '...' if plugin.startswith('_'): # Handle deprecated deprecated.append( "%-*s %-*.*s" % (displace, plugin[1:], linelimit, len(desc), desc)) else: text.append("%-*s %-*.*s" % (displace, plugin, linelimit, len(desc), desc)) except Exception as e: raise AnsibleError("Failed reading docs at %s: %s" % (plugin, to_native(e))) if len(deprecated) > 0: text.append("\nDEPRECATED:") text.extend(deprecated) return "\n".join(text)
def get_documents(): for root, dirs, files in os.walk(os.path.dirname(ansible.modules.__file__)): for f in files: if f == '__init__.py' or not f.endswith('py'): continue documentation = get_docstring(os.path.join(root, f), fragment_loader)[0] if documentation is None: continue yield documentation
def main(): module_keys = ('module', 'short_description', 'options', 'deprecated', 'suboptions') result = {'modules': [], 'directives': {}, 'lookup_plugins': []} for module in get_module_list(): print('module is: ' + module) if module in BLACKLIST_MODULES: continue filename = module_loader.find_plugin(module, mod_type='.py') if filename is None: continue if filename.endswith(".ps1"): continue if os.path.isdir(filename): continue try: doc = plugin_docs.get_docstring(filename, fragment_loader)[0] filtered_doc = {key: doc.get(key, None) for key in module_keys} result['modules'].append(filtered_doc) except: pass for aclass in (Play, Role, Block, Task): aobj = aclass() name = type(aobj).__name__ for attr in aobj.__dict__['_attributes']: if 'private' in attr and attr.private: continue direct_target = result['directives'].setdefault(attr, []) direct_target.append(name) if attr == 'action': local_action = result['directives'].setdefault( 'local_action', []) local_action.append(name) result['directives']['with_'] = ['Task'] for lookup in lookup_loader.all(): name = os.path.splitext(os.path.basename(lookup._original_path))[0] result['lookup_plugins'].append(name) fn = os.path.join(__path__, 'ansibleData.js') with codecs.open(fn, 'wb', encoding='utf-8') as f: json.dump(result, f, ensure_ascii=False, indent=2) p = re.compile('C\(([^(]*)\)') with open(fn, 'r') as f: content = f.read() with open(fn, 'w') as f: f.write('var ansibleData = ' + p.sub(r'<code>{\1}</code>', content))
def _load_config_defs(self, name, path): ''' Reads plugin docs to find configuration setting definitions, to push to config manager for later use ''' # plugins w/o class name don't support config if self.class_name: type_name = get_plugin_class(self.class_name) # if type name != 'module_doc_fragment': if type_name in C.CONFIGURABLE_PLUGINS: dstring = get_docstring(path, fragment_loader, verbose=False, ignore_errors=True)[0] if dstring and 'options' in dstring and isinstance(dstring['options'], dict): C.config.initialize_plugin_configuration_definitions(type_name, name, dstring['options']) display.debug('Loaded config def from plugin (%s/%s)' % (type_name, name))
def helpdefault(self, module_name): if module_name: in_path = module_loader.find_plugin(module_name) if in_path: oc, a, _dummy1, _dummy2 = plugin_docs.get_docstring(in_path, fragment_loader) if oc: display.display(oc['short_description']) display.display('Parameters:') for opt in oc['options'].keys(): display.display(' ' + stringc(opt, self.NORMAL_PROMPT) + ' ' + oc['options'][opt]['description'][0]) else: display.error('No documentation found for %s.' % module_name) else: display.error('%s is not a valid command, use ? to list all valid commands.' % module_name)
def helpdefault(self, module_name): if module_name in self.modules: in_path = module_loader.find_plugin(module_name) if in_path: oc, a, _, _ = plugin_docs.get_docstring(in_path) if oc: display.display(oc['short_description']) display.display('Parameters:') for opt in oc['options'].keys(): display.display(' ' + stringc(opt, C.COLOR_HIGHLIGHT) + ' ' + oc['options'][opt]['description'][0]) else: display.error('No documentation found for %s.' % module_name) else: display.error('%s is not a valid command, use ? to list all valid commands.' % module_name)
def render_module_docs(output_folder, module, template): print("Rendering {}".format(module)) doc, examples, returndocs, metadata = plugin_docs.get_docstring( module, fragment_loader, ) doc["options"] = sort_options(doc["options"]) doc.update( examples=examples, returndocs=yaml.safe_load(returndocs), metadata=metadata, ) module_rst_path = os.path.join(output_folder, doc["module"] + ".rst") with open(module_rst_path, "w") as fd: fd.write(template.render(doc))
def get_plugin_list_text(self, loader): columns = display.columns displace = max(len(x) for x in self.plugin_list) linelimit = columns - displace - 5 text = [] deprecated = [] for plugin in sorted(self.plugin_list): try: # if the module lives in a non-python file (eg, win_X.ps1), require the corresponding python file for docs filename = loader.find_plugin(plugin, mod_type='.py', ignore_deprecated=True, check_aliases=True) if filename is None: continue if filename.endswith(".ps1"): continue if os.path.isdir(filename): continue doc = None try: doc, plainexamples, returndocs, metadata = plugin_docs.get_docstring(filename) except: display.warning("%s has a documentation formatting error" % plugin) if not doc or not isinstance(doc, dict): desc = 'UNDOCUMENTED' display.warning("%s parsing did not produce documentation." % plugin) else: desc = self.tty_ify(doc.get('short_description', 'INVALID SHORT DESCRIPTION').strip()) if len(desc) > linelimit: desc = desc[:linelimit] + '...' if plugin.startswith('_'): # Handle deprecated deprecated.append("%-*s %-*.*s" % (displace, plugin[1:], linelimit, len(desc), desc)) else: text.append("%-*s %-*.*s" % (displace, plugin, linelimit, len(desc), desc)) except Exception as e: raise AnsibleError("Failed reading docs at %s: %s" % (plugin, to_native(e))) if len(deprecated) > 0: text.append("\nDEPRECATED:") text.extend(deprecated) return "\n".join(text)
def main(): module_keys = ('module', 'short_description', 'options', 'deprecated') result = {'modules': [], 'directives': {}, 'lookup_plugins': []} for module in get_module_list(): if module in BLACKLIST_MODULES: continue filename = module_loader.find_plugin(module, mod_type='.py') if filename is None: continue if filename.endswith(".ps1"): continue if os.path.isdir(filename): continue get_docstring_args = ((filename, fragment_loader) if USE_FRAGMENT_LOADER else (filename, )) try: doc = plugin_docs.get_docstring(*get_docstring_args)[0] filtered_doc = {key: doc.get(key, None) for key in module_keys} result['modules'].append(filtered_doc) except Exception as e: pass for aclass in (Play, Role, Block, Task): aobj = aclass() name = type(aobj).__name__ for attr in aobj.__dict__['_attributes']: if 'private' in attr and attr.private: continue direct_target = result['directives'].setdefault(attr, []) direct_target.append(name) if attr == 'action': local_action = result['directives'].setdefault( 'local_action', []) local_action.append(name) result['directives']['with_'] = ['Task'] for lookup in lookup_loader.all(path_only=True): name = os.path.splitext(os.path.basename(lookup))[0] result['lookup_plugins'].append(name) print(json.dumps(result))
def main(): module_keys = ('module', 'short_description', 'options', 'deprecated') result = {'modules': [], 'directives': {}, 'lookup_plugins': []} for module in get_module_list(): if module in BLACKLIST_MODULES: continue filename = module_loader.find_plugin(module, mod_type='.py') if filename is None: continue if filename.endswith(".ps1"): continue if os.path.isdir(filename): continue get_docstring_args = ((filename, fragment_loader) if USE_FRAGMENT_LOADER else (filename,)) try: doc = plugin_docs.get_docstring(*get_docstring_args)[0] filtered_doc = {key: doc.get(key, None) for key in module_keys} result['modules'].append(filtered_doc) except Exception as e: pass for aclass in (Play, Role, Block, Task): aobj = aclass() name = type(aobj).__name__ for attr in aobj.__dict__['_attributes']: if 'private' in attr and attr.private: continue direct_target = result['directives'].setdefault(attr, []) direct_target.append(name) if attr == 'action': local_action = result['directives'].setdefault( 'local_action', []) local_action.append(name) result['directives']['with_'] = ['Task'] for lookup in lookup_loader.all(path_only=True): name = os.path.splitext(os.path.basename(lookup))[0] result['lookup_plugins'].append(name) return json.dumps(result)
def helpdefault(self, module_name): if module_name in self.modules: in_path = module_loader.find_plugin(module_name) if in_path: oc, a, _, _ = plugin_docs.get_docstring(in_path) if oc: display.display(oc['short_description']) display.display('Parameters:') for opt in oc['options'].keys(): display.display(' ' + stringc(opt, C.COLOR_HIGHLIGHT) + ' ' + oc['options'][opt]['description'][0]) else: display.error('No documentation found for %s.' % module_name) else: display.error( '%s is not a valid command, use ? to list all valid commands.' % module_name)
def get_plugin_metadata(plugin_type, plugin_name): # if the plugin lives in a non-python file (eg, win_X.ps1), require the corresponding python file for docs loader = getattr(plugin_loader, '%s_loader' % plugin_type) filename = loader.find_plugin(plugin_name, mod_type='.py', ignore_deprecated=True, check_aliases=True) if filename is None: raise AnsibleError("unable to load {0} plugin named {1} ".format( plugin_type, plugin_name)) collection_name = 'ansible.builtin' if plugin_name.startswith('ansible_collections.'): collection_name = '.'.join(plugin_name.split('.')[1:3]) try: doc, __, __, metadata = get_docstring( filename, fragment_loader, verbose=(context.CLIARGS['verbosity'] > 0), collection_name=collection_name, is_module=(plugin_type == 'module')) except Exception: display.vvv(traceback.format_exc()) raise AnsibleError( "%s %s at %s has a documentation error formatting or is missing documentation." % (plugin_type, plugin_name, filename)) if doc is None: if 'removed' not in metadata.get('status', []): raise AnsibleError( "%s %s at %s has a documentation error formatting or is missing documentation." % (plugin_type, plugin_name, filename)) # Removed plugins don't have any documentation return None return dict(name=plugin_name, namespace=DocCLI.namespace_from_plugin_filepath( filename, plugin_name, loader.package_path), description=doc.get('short_description', "UNKNOWN"), version_added=doc.get('version_added', "UNKNOWN"))
def worker(pending_queue: multiprocessing.Queue, completed_queue: multiprocessing.Queue) -> None: """extract a doc from a plugin, place in completed q""" # pylint: disable=ungrouped-imports # pylint: disable=import-outside-toplevel # load the fragment_loader _after_ the path is set from ansible.plugins.loader import fragment_loader # type: ignore while True: entry = pending_queue.get() if entry is None: break collection_name, chksum, plugin_path = entry try: (doc, examples, returndocs, metadata) = get_docstring( filename=plugin_path, fragment_loader=fragment_loader, collection_name=collection_name, ) except Exception as exc: # pylint: disable=broad-except err_message = f"{type(exc).__name__} (get_docstring): {str(exc)}" completed_queue.put(("error", (chksum, plugin_path, err_message))) continue try: q_message = { "plugin": { "doc": doc, "examples": examples, "returndocs": returndocs, "metadata": metadata, }, "timestamp": datetime.utcnow().isoformat(), } completed_queue.put( ("plugin", (chksum, json.dumps(q_message, default=str)))) except JSONDecodeError as exc: err_message = f"{type(exc).__name__} (json_decode_doc): {str(exc)}" completed_queue.put(("error", (chksum, plugin_path, err_message)))
def get_module_docstring(file_path: str) -> Any: """Extract and return docstring information from a module file Parameters ---------- file_names: file_path[str] string representing module file Returns ------- Any An AnsibleMapping object, representing docstring information (in dict form), excluding those that are marked as deprecated. """ docstring = get_docstring(file_path, fragment_loader)[0] if docstring and not docstring.get("deprecated"): return docstring
def _load_config_defs(self, name, path): ''' Reads plugin docs to find configuration setting definitions, to push to config manager for later use ''' # plugins w/o class name don't support config if self.class_name: type_name = get_plugin_class(self.class_name) # FIXME: expand to other plugins, but never doc fragments # if type name != 'module_doc_fragment': if type_name in ('callback', 'connection', 'inventory', 'lookup', 'shell'): dstring = get_docstring(path, fragment_loader, verbose=False, ignore_errors=True)[0] if dstring and 'options' in dstring and isinstance( dstring['options'], dict): C.config.initialize_plugin_configuration_definitions( type_name, name, dstring['options']) display.debug('Loaded config def from plugin (%s/%s)' % (type_name, name))
def run(self): super(DocCLI, self).run() plugin_type = self.options.type # choose plugin type if plugin_type == 'cache': loader = cache_loader elif plugin_type == 'callback': loader = callback_loader elif plugin_type == 'connection': loader = connection_loader elif plugin_type == 'lookup': loader = lookup_loader elif plugin_type == 'strategy': loader = strategy_loader elif plugin_type == 'vars': loader = vars_loader elif plugin_type == 'inventory': loader = PluginLoader('InventoryModule', 'ansible.plugins.inventory', 'inventory_plugins', 'inventory_plugins') else: loader = module_loader # add to plugin path from command line if self.options.module_path is not None: for i in self.options.module_path.split(os.pathsep): loader.add_directory(i) # save only top level paths for errors search_paths = DocCLI.print_paths(loader) loader._paths = None # reset so we can use subdirs below # list plugins for type if self.options.list_dir: paths = loader._get_paths() for path in paths: self.find_plugins(path, plugin_type) self.pager(self.get_plugin_list_text(loader)) return 0 # process all plugins of type if self.options.all_plugins: paths = loader._get_paths() for path in paths: self.find_plugins(path, plugin_type) self.args = sorted(set(self.plugin_list)) if len(self.args) == 0: raise AnsibleOptionsError("Incorrect options passed") # process command line list text = '' for plugin in self.args: try: # if the plugin lives in a non-python file (eg, win_X.ps1), require the corresponding python file for docs filename = loader.find_plugin(plugin, mod_type='.py', ignore_deprecated=True, check_aliases=True) if filename is None: display.warning("%s %s not found in:\n%s\n" % (plugin_type, plugin, search_paths)) continue if any(filename.endswith(x) for x in C.BLACKLIST_EXTS): continue try: doc, plainexamples, returndocs, metadata = plugin_docs.get_docstring(filename, verbose=(self.options.verbosity > 0)) except: display.vvv(traceback.format_exc()) display.error("%s %s has a documentation error formatting or is missing documentation." % (plugin_type, plugin)) continue if doc is not None: # assign from other sections doc['plainexamples'] = plainexamples doc['returndocs'] = returndocs doc['metadata'] = metadata # generate extra data if plugin_type == 'module': # is there corresponding action plugin? if plugin in action_loader: doc['action'] = True else: doc['action'] = False doc['filename'] = filename doc['now_date'] = datetime.date.today().strftime('%Y-%m-%d') if 'docuri' in doc: doc['docuri'] = doc[plugin_type].replace('_', '-') if self.options.show_snippet and plugin_type == 'module': text += self.get_snippet_text(doc) else: text += self.get_man_text(doc) else: # this typically means we couldn't even parse the docstring, not just that the YAML is busted, # probably a quoting issue. raise AnsibleError("Parsing produced an empty object.") except Exception as e: display.vvv(traceback.format_exc()) raise AnsibleError("%s %s missing documentation (or could not parse documentation): %s\n" % (plugin_type, plugin, str(e))) if text: self.pager(text) return 0
def process_module(module, options, env, template, outputname, module_map, aliases): fname = module_map[module] if isinstance(fname, dict): return "SKIPPED" basename = os.path.basename(fname) deprecated = False # ignore files with extensions if not basename.endswith(".py"): return elif module.startswith("_"): if os.path.islink(fname): return # ignore, its an alias deprecated = True #module = module.replace("_","",1) print("rendering: %s" % module) # use ansible core library to parse out doc metadata YAML and plaintext examples doc, examples, returndocs, metadata = plugin_docs.get_docstring( fname, verbose=options.verbose) # crash if module is missing documentation and not explicitly hidden from docs index if doc is None: sys.exit("*** ERROR: MODULE MISSING DOCUMENTATION: %s, %s ***\n" % (fname, module)) if metadata is None: sys.exit("*** ERROR: MODULE MISSING METADATA: %s, %s ***\n" % (fname, module)) if deprecated and 'deprecated' not in doc: sys.exit( "*** ERROR: DEPRECATED MODULE MISSING 'deprecated' DOCUMENTATION: %s, %s ***\n" % (fname, module)) if module in aliases: doc['aliases'] = aliases[module] all_keys = [] if not 'version_added' in doc: sys.exit("*** ERROR: missing version_added in: %s ***\n" % module) added = 0 if doc['version_added'] == 'historical': del doc['version_added'] else: added = doc['version_added'] # don't show version added information if it's too old to be called out if too_old(added): del doc['version_added'] if 'options' in doc and doc['options']: for (k, v) in iteritems(doc['options']): # don't show version added information if it's too old to be called out if 'version_added' in doc['options'][k] and too_old( doc['options'][k]['version_added']): del doc['options'][k]['version_added'] if not 'description' in doc['options'][k]: raise AnsibleError( "Missing required description for option %s in %s " % (k, module)) required_value = doc['options'][k].get('required', False) if not isinstance(required_value, bool): raise AnsibleError( "Invalid required value '%s' for option '%s' in '%s' (must be truthy)" % (required_value, k, module)) if not isinstance(doc['options'][k]['description'], list): doc['options'][k]['description'] = [ doc['options'][k]['description'] ] all_keys.append(k) all_keys = sorted(all_keys) doc['option_keys'] = all_keys doc['filename'] = fname doc['docuri'] = doc['module'].replace('_', '-') doc['now_date'] = datetime.date.today().strftime('%Y-%m-%d') doc['ansible_version'] = options.ansible_version doc['plainexamples'] = examples #plain text doc['metadata'] = metadata if returndocs: try: doc['returndocs'] = yaml.safe_load(returndocs) except: print("could not load yaml: %s" % returndocs) raise else: doc['returndocs'] = None # here is where we build the table of contents... try: text = template.render(doc) except Exception as e: raise AnsibleError("Failed to render doc for %s: %s" % (fname, str(e))) write_data(text, options, outputname, module) return doc['short_description']
def get_plugin_info(module_dir, limit_to=None, verbose=False): ''' Returns information about plugins and the categories that they belong to :arg module_dir: file system path to the top of the plugin directory :kwarg limit_to: If given, this is a list of plugin names to generate information for. All other plugins will be ignored. :returns: Tuple of two dicts containing module_info, categories, and aliases and a set listing deprecated modules: :module_info: mapping of module names to information about them. The fields of the dict are: :path: filesystem path to the module :deprecated: boolean. True means the module is deprecated otherwise not. :aliases: set of aliases to this module name :metadata: The modules metadata (as recorded in the module) :doc: The documentation structure for the module :examples: The module's examples :returndocs: The module's returndocs :categories: maps category names to a dict. The dict contains at least one key, '_modules' which contains a list of module names in that category. Any other keys in the dict are subcategories with the same structure. ''' categories = dict() module_info = defaultdict(dict) # * windows powershell modules have documentation stubs in python docstring # format (they are not executed) so skip the ps1 format files # * One glob level for every module level that we're going to traverse files = ( glob.glob("%s/*.py" % module_dir) + glob.glob("%s/*/*.py" % module_dir) + glob.glob("%s/*/*/*.py" % module_dir) + glob.glob("%s/*/*/*/*.py" % module_dir) ) for module_path in files: # Do not list __init__.py files if module_path.endswith('__init__.py'): continue # Do not list blacklisted modules module = os.path.splitext(os.path.basename(module_path))[0] if module in plugin_docs.BLACKLIST['MODULE'] or module == 'base': continue # If requested, limit module documentation building only to passed-in # modules. if limit_to is not None and module.lower() not in limit_to: continue deprecated = False if module.startswith("_"): if os.path.islink(module_path): # Handle aliases source = os.path.splitext(os.path.basename(os.path.realpath(module_path)))[0] module = module.replace("_", "", 1) aliases = module_info[source].get('aliases', set()) aliases.add(module) # In case we just created this via get()'s fallback module_info[source]['aliases'] = aliases continue else: # Handle deprecations module = module.replace("_", "", 1) deprecated = True # # Regular module to process # category = categories # Start at the second directory because we don't want the "vendor" mod_path_only = os.path.dirname(module_path[len(module_dir):]) module_categories = [] # build up the categories that this module belongs to for new_cat in mod_path_only.split('/')[1:]: if new_cat not in category: category[new_cat] = dict() category[new_cat]['_modules'] = [] module_categories.append(new_cat) category = category[new_cat] category['_modules'].append(module) # the category we will use in links (so list_of_all_plugins can point to plugins/action_plugins/*' if module_categories: primary_category = module_categories[0] # use ansible core library to parse out doc metadata YAML and plaintext examples doc, examples, returndocs, metadata = plugin_docs.get_docstring(module_path, fragment_loader, verbose=verbose) # save all the information module_info[module] = {'path': module_path, 'source': os.path.relpath(module_path, module_dir), 'deprecated': deprecated, 'aliases': set(), 'metadata': metadata, 'doc': doc, 'examples': examples, 'returndocs': returndocs, 'categories': module_categories, 'primary_category': primary_category, } # keep module tests out of becoming module docs if 'test' in categories: del categories['test'] return module_info, categories
def process(collection, path): # pylint: disable-msg=too-many-locals """ Process the files in each subdirectory :param collection: The collection name :type collection: str :param path: The path to the collection :type path: str """ template = jinja_environment() docs_path = Path(path, "docs") if docs_path.is_dir(): logging.info("Purging content from directory %s", docs_path) shutil.rmtree(docs_path) logging.info("Making docs directory %s", docs_path) Path(docs_path).mkdir(parents=True, exist_ok=True) content = {} for subdir in SUBDIRS: if subdir == "modules": plugin_type = "module" else: plugin_type = subdir dirpath = Path(path, "plugins", subdir) if dirpath.is_dir(): content[subdir] = {} logging.info("Process content in %s", dirpath) for filename in os.listdir(dirpath): if filename.endswith(".py") and filename not in IGNORE_FILES: fullpath = Path(dirpath, filename) logging.info("Processing %s", fullpath) if subdir == "filter": content[subdir].update(handle_filters(collection, fullpath)) else: doc, examples, returndocs, metadata = plugin_docs.get_docstring( fullpath, fragment_loader ) if doc: doc["plugin_type"] = plugin_type if returndocs: # Seems a recent change in devel makes this return a dict not a yaml string. if isinstance(returndocs, dict): doc["returndocs"] = returndocs else: doc["returndocs"] = yaml.safe_load(returndocs) convert_descriptions(doc["returndocs"]) doc["metadata"] = (metadata,) if isinstance(examples, string_types): doc["plainexamples"] = examples else: doc["examples"] = examples doc["module"] = "{collection}.{plugin_name}".format( collection=collection, plugin_name=doc.get(plugin_type, doc.get('name')) ) doc["author"] = ensure_list(doc["author"]) doc["description"] = ensure_list(doc["description"]) try: convert_descriptions(doc["options"]) except KeyError: pass # This module takes no options module_rst_path = Path(path, "docs", doc["module"] + "_{0}".format(plugin_type) + ".rst") with open(module_rst_path, "w") as fd: fd.write(template.render(doc)) content[subdir][doc["module"]] = doc["short_description"] return content
def module_args(self, module_name): in_path = module_loader.find_plugin(module_name) oc, a, _, _ = plugin_docs.get_docstring(in_path) return list(oc['options'].keys())
def module_args(self, module_name): in_path = module_loader.find_plugin(module_name) oc, a, _, _ = plugin_docs.get_docstring(in_path, fragment_loader) return list(oc['options'].keys())
def _check_for_new_args(self, doc): if not self.base_branch or self._is_new_module(): return with CaptureStd(): try: existing_doc, _, _, _ = get_docstring(self.base_module, verbose=True) existing_options = existing_doc.get('options', {}) except AssertionError: fragment = doc['extends_documentation_fragment'] self.reporter.warning( path=self.object_path, code=392, msg='Pre-existing DOCUMENTATION fragment missing: %s' % fragment ) return except Exception as e: self.reporter.warning_trace( path=self.object_path, tracebk=e ) self.reporter.warning( path=self.object_path, code=391, msg=('Unknown pre-existing DOCUMENTATION ' 'error, see TRACE. Submodule refs may ' 'need updated') ) return try: mod_version_added = StrictVersion( str(existing_doc.get('version_added', '0.0')) ) except ValueError: mod_version_added = StrictVersion('0.0') options = doc.get('options', {}) should_be = '.'.join(ansible_version.split('.')[:2]) strict_ansible_version = StrictVersion(should_be) for option, details in options.items(): names = [option] + details.get('aliases', []) if any(name in existing_options for name in names): continue try: version_added = StrictVersion( str(details.get('version_added', '0.0')) ) except ValueError: version_added = details.get('version_added', '0.0') self.reporter.error( path=self.object_path, code=308, msg=('version_added for new option (%s) ' 'is not a valid version number: %r' % (option, version_added)) ) continue except: # If there is any other exception it should have been caught # in schema validation, so we won't duplicate errors by # listing it again continue if (strict_ansible_version != mod_version_added and (version_added < strict_ansible_version or strict_ansible_version < version_added)): self.reporter.error( path=self.object_path, code=309, msg=('version_added for new option (%s) should ' 'be %s. Currently %s' % (option, should_be, version_added)) )
def get_plugin_info(module_dir, limit_to=None, verbose=False): ''' Returns information about plugins and the categories that they belong to :arg module_dir: file system path to the top of the plugin directory :kwarg limit_to: If given, this is a list of plugin names to generate information for. All other plugins will be ignored. :returns: Tuple of two dicts containing module_info, categories, and aliases and a set listing deprecated modules: :module_info: mapping of module names to information about them. The fields of the dict are: :path: filesystem path to the module :deprecated: boolean. True means the module is deprecated otherwise not. :aliases: set of aliases to this module name :metadata: The modules metadata (as recorded in the module) :doc: The documentation structure for the module :examples: The module's examples :returndocs: The module's returndocs :categories: maps category names to a dict. The dict contains at least one key, '_modules' which contains a list of module names in that category. Any other keys in the dict are subcategories with the same structure. ''' categories = dict() module_info = defaultdict(dict) # * windows powershell modules have documentation stubs in python docstring # format (they are not executed) so skip the ps1 format files # * One glob level for every module level that we're going to traverse files = ( glob.glob("%s/*.py" % module_dir) + glob.glob("%s/*/*.py" % module_dir) + glob.glob("%s/*/*/*.py" % module_dir) + glob.glob("%s/*/*/*/*.py" % module_dir) ) for module_path in files: # Do not list __init__.py files if module_path.endswith('__init__.py'): continue # Do not list blacklisted modules module = os.path.splitext(os.path.basename(module_path))[0] if module in plugin_docs.BLACKLIST['MODULE'] or module == 'base': continue # If requested, limit module documentation building only to passed-in # modules. if limit_to is not None and module.lower() not in limit_to: continue deprecated = False if module.startswith("_"): if os.path.islink(module_path): # Handle aliases source = os.path.splitext(os.path.basename(os.path.realpath(module_path)))[0] module = module.replace("_", "", 1) aliases = module_info[source].get('aliases', set()) aliases.add(module) # In case we just created this via get()'s fallback module_info[source]['aliases'] = aliases continue else: # Handle deprecations module = module.replace("_", "", 1) deprecated = True # # Regular module to process # category = categories # Start at the second directory because we don't want the "vendor" mod_path_only = os.path.dirname(module_path[len(module_dir):]) module_categories = [] # build up the categories that this module belongs to for new_cat in mod_path_only.split('/')[1:]: if new_cat not in category: category[new_cat] = dict() category[new_cat]['_modules'] = [] module_categories.append(new_cat) category = category[new_cat] category['_modules'].append(module) # the category we will use in links (so list_of_all_plugins can point to plugins/action_plugins/*' if module_categories: primary_category = module_categories[0] # use ansible core library to parse out doc metadata YAML and plaintext examples doc, examples, returndocs, metadata = plugin_docs.get_docstring(module_path, fragment_loader, verbose=verbose) # save all the information module_info[module] = {'path': module_path, 'source': os.path.relpath(module_path, module_dir), 'deprecated': deprecated, 'aliases': module_info[module].get('aliases', set()), 'metadata': metadata, 'doc': doc, 'examples': examples, 'returndocs': returndocs, 'categories': module_categories, 'primary_category': primary_category, } # keep module tests out of becoming module docs if 'test' in categories: del categories['test'] return module_info, categories
def _validate_docs(self): doc_info = self._get_docs() deprecated = False if not bool(doc_info['DOCUMENTATION']['value']): self.reporter.error( path=self.object_path, code=301, msg='No DOCUMENTATION provided' ) else: doc, errors, traces = parse_yaml( doc_info['DOCUMENTATION']['value'], doc_info['DOCUMENTATION']['lineno'], self.name, 'DOCUMENTATION' ) for error in errors: self.reporter.error( path=self.object_path, code=302, **error ) for trace in traces: self.reporter.trace( path=self.object_path, tracebk=trace ) if not errors and not traces: with CaptureStd(): try: get_docstring(self.path, verbose=True) except AssertionError: fragment = doc['extends_documentation_fragment'] self.reporter.error( path=self.object_path, code=303, msg='DOCUMENTATION fragment missing: %s' % fragment ) except Exception: self.reporter.trace( path=self.object_path, tracebk=traceback.format_exc() ) self.reporter.error( path=self.object_path, code=304, msg='Unknown DOCUMENTATION error, see TRACE' ) if 'options' in doc and doc['options'] is None and doc.get('extends_documentation_fragment'): self.reporter.error( path=self.object_path, code=304, msg=('DOCUMENTATION.options must be a dictionary/hash when used ' 'with DOCUMENTATION.extends_documentation_fragment') ) if self.object_name.startswith('_') and not os.path.islink(self.object_path): deprecated = True if 'deprecated' not in doc or not doc.get('deprecated'): self.reporter.error( path=self.object_path, code=318, msg='Module deprecated, but DOCUMENTATION.deprecated is missing' ) if os.path.islink(self.object_path): # This module has an alias, which we can tell as it's a symlink # Rather than checking for `module: $filename` we need to check against the true filename self._validate_docs_schema(doc, doc_schema(os.readlink(self.object_path).split('.')[0]), 'DOCUMENTATION', 305) else: # This is the normal case self._validate_docs_schema(doc, doc_schema(self.object_name.split('.')[0]), 'DOCUMENTATION', 305) self._check_version_added(doc) self._check_for_new_args(doc) if not bool(doc_info['EXAMPLES']['value']): self.reporter.error( path=self.object_path, code=310, msg='No EXAMPLES provided' ) else: _, errors, traces = parse_yaml(doc_info['EXAMPLES']['value'], doc_info['EXAMPLES']['lineno'], self.name, 'EXAMPLES', load_all=True) for error in errors: self.reporter.error( path=self.object_path, code=311, **error ) for trace in traces: self.reporter.trace( path=self.object_path, tracebk=trace ) if not bool(doc_info['RETURN']['value']): if self._is_new_module(): self.reporter.error( path=self.object_path, code=312, msg='No RETURN provided' ) else: self.reporter.warning( path=self.object_path, code=312, msg='No RETURN provided' ) else: data, errors, traces = parse_yaml(doc_info['RETURN']['value'], doc_info['RETURN']['lineno'], self.name, 'RETURN') if data: for ret_key in data: self._validate_docs_schema(data[ret_key], return_schema(data[ret_key]), 'RETURN.%s' % ret_key, 319) for error in errors: self.reporter.error( path=self.object_path, code=313, **error ) for trace in traces: self.reporter.trace( path=self.object_path, tracebk=trace ) if not bool(doc_info['ANSIBLE_METADATA']['value']): self.reporter.error( path=self.object_path, code=314, msg='No ANSIBLE_METADATA provided' ) else: metadata = None if isinstance(doc_info['ANSIBLE_METADATA']['value'], ast.Dict): metadata = ast.literal_eval( doc_info['ANSIBLE_METADATA']['value'] ) else: metadata, errors, traces = parse_yaml( doc_info['ANSIBLE_METADATA']['value'].s, doc_info['ANSIBLE_METADATA']['lineno'], self.name, 'ANSIBLE_METADATA' ) for error in errors: self.reporter.error( path=self.object_path, code=315, **error ) for trace in traces: self.reporter.trace( path=self.object_path, tracebk=trace ) if metadata: self._validate_docs_schema(metadata, metadata_1_1_schema(deprecated), 'ANSIBLE_METADATA', 316) return doc_info