def get_docstring(filename, fragment_loader, verbose=False, ignore_errors=False, collection_name=None, is_module=False): """ DOCUMENTATION can be extended using documentation fragments loaded by the PluginLoader from the doc_fragments plugins. """ data = read_docstring(filename, verbose=verbose, ignore_errors=ignore_errors) if data.get('doc', False): # tag version_added if collection_name is not None: tag_versions_and_dates(data['doc'], '%s:' % (collection_name, ), is_module=is_module) # add fragments to documentation add_fragments(data['doc'], filename, fragment_loader=fragment_loader, is_module=is_module) return data['doc'], data['plainexamples'], data['returndocs'], data[ 'metadata']
def get_docstring(filename, fragment_loader, verbose=False, ignore_errors=False, collection_name=None, is_module=None, plugin_type=None): """ DOCUMENTATION can be extended using documentation fragments loaded by the PluginLoader from the doc_fragments plugins. """ if is_module is None: if plugin_type is None: is_module = False else: is_module = (plugin_type == 'module') else: # TODO deprecate is_module argument, now that we have 'type' pass data = read_docstring(filename, verbose=verbose, ignore_errors=ignore_errors) if data.get('doc', False): # add collection name to versions and dates if collection_name is not None: add_collection_to_versions_and_dates(data['doc'], collection_name, is_module=is_module) # add fragments to documentation add_fragments(data['doc'], filename, fragment_loader=fragment_loader, is_module=is_module) if data.get('returndocs', False): # add collection name to versions and dates if collection_name is not None: add_collection_to_versions_and_dates(data['returndocs'], collection_name, is_module=is_module, return_docs=True) return data['doc'], data['plainexamples'], data['returndocs'], data['metadata']
def get_docstring(filename, fragment_loader, verbose=False, ignore_errors=False): """ DOCUMENTATION can be extended using documentation fragments loaded by the PluginLoader from the doc_fragments plugins. """ data = read_docstring(filename, verbose=verbose, ignore_errors=ignore_errors) # add fragments to documentation if data.get('doc', False): add_fragments(data['doc'], filename, fragment_loader=fragment_loader) return data['doc'], data['plainexamples'], data['returndocs'], data['metadata']
def _load_config_defs(self, name, path): ''' Reads plugin docs to find configuration setting definitions, to push to config manager for later use ''' # plugins w/o class name don't support config if self.class_name and self.class_name in ('Connection'): # FIXME: expand from just connection type_name = get_plugin_class(self) dstring = read_docstring(path, verbose=False, ignore_errors=False) if dstring.get('doc', False): if 'options' in dstring['doc'] and isinstance(dstring['doc']['options'], dict): C.config.initialize_plugin_configuration_definitions(type_name, name, dstring['doc']['options']) display.debug('Loaded config def from plugin (%s/%s)' % (type_name, name))
def get_docstring(filename, verbose=False): """ DOCUMENTATION can be extended using documentation fragments loaded by the PluginLoader from the module_docs_fragments directory. """ data = read_docstring(filename, verbose=verbose) # add fragments to documentation if data.get('doc', False): add_fragments(data['doc'], filename) return data['doc'], data['plainexamples'], data['returndocs'], data['metadata']
def _get_spec(self, module): mloadr = self._shared_loader_obj.module_loader filename = mloadr.find_plugin(module) docstring = read_docstring(filename, verbose=True, ignore_errors=True) if not docstring.get('doc'): self._errors.append( "{} missing documentation string".format(module)) self._check_for_errors() spec = docstring.get('doc').get('options') if not spec: self._errors.append( "{} malformed documentation string".format(module)) self._check_for_errors() return spec
def _load_config_defs(self, name, path): ''' Reads plugin docs to find configuration setting definitions, to push to config manager for later use ''' # plugins w/o class name don't support config if self.class_name: type_name = get_plugin_class(self.class_name) # FIXME: expand from just connection and callback if type_name in ('connection', 'callback'): dstring = read_docstring(path, verbose=False, ignore_errors=False) if dstring.get('doc', False): if 'options' in dstring['doc'] and isinstance(dstring['doc']['options'], dict): C.config.initialize_plugin_configuration_definitions(type_name, name, dstring['doc']['options']) display.debug('Loaded config def from plugin (%s/%s)' % (type_name, name))
def populate_modules(): module_dir = os.path.join(BASE_PATH, 'lib/ansible/modules/') modules_rows = [] for root, dir_names, file_names in os.walk(module_dir): for file_name in file_names: module, extension = os.path.splitext(file_name) if module == '__init__' or extension != '.py': continue if module.startswith('_'): module = module[1:] namespace = os.path.join(root.replace(module_dir, '')).replace('/', '.') path = os.path.join(root, file_name) result = read_docstring(path) doc = result['doc'] modules_rows.append( dict( module=module, namespace=namespace, path=path.replace(BASE_PATH, ''), version_added=str(doc.get('version_added', '')) if doc else '', )) populate_data( dict(modules=dict(rows=modules_rows, schema=( ('module', 'TEXT'), ('namespace', 'TEXT'), ('path', 'TEXT'), ('version_added', 'TEXT'), )), ))
def test_spire_module_doc_okay(module: ModuleType) -> None: module_file = module.__file__ # {'doc':xxx, 'plainexamples':xxx, 'returndocs':xxx, 'metadata':xxx, 'seealso':xxx] docs: Dict[str, Any] = plugin_docs.read_docstring(filename=module_file, ignore_errors=False) #{[}'module':xxxx, 'short_description':xxxx, 'version_added':xxx, 'description':xxx, 'options':xxxx, 'author':xxx} doc: Dict[str, Dict[str, Any]] = docs["doc"] options: Dict[str, Any] = doc["options"] option_keys = set(options.keys()) module_args = get_module_args(module) args_keys = set(module_args.keys()) doc_ok, doc_issues = check_attr_yaml(mod=module, attr_name="DOCUMENTATION") ret_ok, ret_issues = check_attr_yaml(mod=module, attr_name="RETURN") if not (option_keys == args_keys and ret_ok and doc_ok): msg = f"""Entries in DOCUMENTATION do not match those in module_args: to remove:{option_keys-args_keys} to add :{args_keys-option_keys} option_keys : {option_keys} module_args_keys: {args_keys} return_yaml_issue: {ret_issues} documentation_yaml_issue: {doc_issues} """ pytest.fail(msg=msg, pytrace=False)
import os import codecs from ansible import utils from ansible import constants as C from ansible.errors import AnsibleError from ansible.plugins.lookup import LookupBase # ansible 2.4 try: from ansible.parsing.plugin_docs import read_docstring # load the definitions dstring = read_docstring(__file__.replace('.pyc', '.py'), verbose=False, ignore_errors=False) if dstring.get('doc', False): if 'options' in dstring['doc'] and isinstance( dstring['doc']['options'], dict): C.config.initialize_plugin_configuration_definitions( 'lookup', 'file', dstring['doc']['options']) except: None class LookupModule(LookupBase): def run(self, terms, variables=None, **kwargs): for term in terms: for path in self.get_paths(variables): path = os.path.join(path, 'files', term)
with open(path, 'r') as f: if 'DOCUMENTATION' not in f.read(): continue collection = '{}.{}'.format(namespace, name) req_data.setdefault(collection, []) if any(path.endswith(exclude) for exclude in excludes): continue assert os.path.exists(path) if path_parts[5] == 'doc_fragments': fragments = ast_fragment_parse(path) check_requirements = list(fragments.values()) else: plugin_data = read_docstring(path, verbose=False)['doc'] check_requirements = [plugin_data] for plugin_data in check_requirements: if plugin_data is None: print(path) raise Exception('Failed to correctly parse the DOCUMENTATION!') if 'requirements' in plugin_data: reqs = plugin_data['requirements'] assert isinstance(reqs, list) for entry in reqs: add_req(req_data[collection], entry) line_ct += 1 with open('sniff_req/discovered.json', 'w') as f: json.dump(req_data, f, indent=2)
def populate_modules(): module_dir = os.path.join(BASE_PATH, 'lib/ansible/modules/') modules_rows = [] module_statuses_rows = [] for root, dir_names, file_names in os.walk(module_dir): for file_name in file_names: module, extension = os.path.splitext(file_name) if module == '__init__' or extension != '.py': continue if module.startswith('_'): module = module[1:] namespace = os.path.join(root.replace(module_dir, '')).replace('/', '.') path = os.path.join(root, file_name) result = read_docstring(path) metadata = result['metadata'] doc = result['doc'] if not metadata: if module == 'async_wrapper': continue raise Exception('no metadata for: %s' % path) modules_rows.append( dict( module=module, namespace=namespace, path=path.replace(BASE_PATH, ''), supported_by=metadata['supported_by'], version_added=str(doc.get('version_added', '')) if doc else '', )) for status in metadata['status']: module_statuses_rows.append( dict( module=module, status=status, )) populate_data( dict( modules=dict(rows=modules_rows, schema=( ('module', 'TEXT'), ('namespace', 'TEXT'), ('path', 'TEXT'), ('supported_by', 'TEXT'), ('version_added', 'TEXT'), )), module_statuses=dict(rows=module_statuses_rows, schema=( ('module', 'TEXT'), ('status', 'TEXT'), )), ))