def test_invalid_dict_metadata(): with pytest.raises(SyntaxError): assert md.extract_metadata(module_data=LICENSE + FUTURE_IMPORTS + b'ANSIBLE_METADATA={"metadata_version": "1.1",\n' + REGULAR_IMPORTS) with pytest.raises(md.ParseError, match='Unable to find the end of dictionary'): assert md.extract_metadata(module_ast=ast.parse(LICENSE + FUTURE_IMPORTS + b'ANSIBLE_METADATA={"metadata_version": "1.1"}\n' + REGULAR_IMPORTS), module_data=LICENSE + FUTURE_IMPORTS + b'ANSIBLE_METADATA={"metadata_version": "1.1",\n' + REGULAR_IMPORTS, offsets=True)
def _get_plugin_list_descriptions(self, loader): descs = {} plugins = self._get_plugin_list_filenames(loader) for plugin in plugins.keys(): filename = plugins[plugin] doc = None try: doc = read_docstub(filename) except Exception: display.warning("%s has a documentation formatting error" % plugin) continue if not doc or not isinstance(doc, dict): with open(filename) as f: metadata = extract_metadata(module_data=f.read()) if metadata[0]: if 'removed' not in metadata[0].get('status', []): display.warning( "%s parsing did not produce documentation." % plugin) else: continue desc = 'UNDOCUMENTED' else: desc = doc.get('short_description', 'INVALID SHORT DESCRIPTION').strip() descs[plugin] = desc return descs
def _module_is_deprecated(module): module_file_path = _module_file_path(module) with module_file_path.open() as module_file: metadata = extract_metadata(module_data=module_file.read()) if metadata[0]: return 'deprecated' in metadata[0].get('status', []) else: return False
def upgrade_metadata(version=None): """Implement the subcommand to upgrade the default metadata in modules. :kwarg version: If given, the version of the metadata to upgrade to. If not given, upgrade to the latest format version. """ if version is None: # Number larger than any of the defined metadata formats. version = 9999999 requested_version = StrictVersion(version) # List all plugins plugins = module_loader.all(path_only=True) plugins = ((os.path.splitext((os.path.basename(p)))[0], p) for p in plugins) plugins = (p for p in plugins if p[0] not in NONMODULE_MODULE_NAMES) processed = set() diagnostic_messages = [] for name, filename in (info for info in plugins if info[0] not in processed): # For each plugin, read the existing metadata with open(filename, 'rb') as f: module_data = f.read() metadata = extract_metadata(module_data=module_data, offsets=True)[0] # If the metadata isn't the requested version, convert it to the new # version if 'metadata_version' not in metadata or metadata['metadata_version'] != version: # # With each iteration of metadata, add a new conditional to # upgrade from the previous version # if 'metadata_version' not in metadata: # First version, pre-1.0 final metadata metadata = convert_metadata_pre_1_0_to_1_0(metadata) if metadata['metadata_version'] == '1.0' and StrictVersion('1.0') < requested_version: metadata = convert_metadata_1_0_to_1_1(metadata) if metadata['metadata_version'] == '1.1' and StrictVersion('1.1') < requested_version: # 1.1 version => XXX. We don't yet have anything beyond 1.1 # so there's nothing here pass # Replace the existing metadata with the new format try: write_metadata(filename, metadata, version, overwrite=True) except ParseError as e: diagnostic_messages.append(e.args[0]) continue processed.add(name) if diagnostic_messages: pprint(diagnostic_messages) return 0
def test_multiple_statements_limitation(): with pytest.raises( md.ParseError, message= 'Multiple statements per line confuses the module metadata parser.' ): assert md.extract_metadata( module_data=LICENSE + FUTURE_IMPORTS + b'ANSIBLE_METADATA={"metadata_version": "1.1"}; a=b\n' + REGULAR_IMPORTS, offsets=True)
def get_plugin_list_text(self, loader): columns = display.columns displace = max(len(x) for x in self.plugin_list) linelimit = columns - displace - 5 text = [] deprecated = [] for plugin in sorted(self.plugin_list): try: # if the module lives in a non-python file (eg, win_X.ps1), require the corresponding python file for docs filename = loader.find_plugin(plugin, mod_type='.py', ignore_deprecated=True, check_aliases=True) if filename is None: continue if filename.endswith(".ps1"): continue if os.path.isdir(filename): continue doc = None try: doc = read_docstub(filename) except Exception: display.warning("%s has a documentation formatting error" % plugin) continue if not doc or not isinstance(doc, dict): with open(filename) as f: metadata = extract_metadata(module_data=f.read()) if metadata[0]: if 'removed' not in metadata[0].get('status', []): display.warning("%s parsing did not produce documentation." % plugin) else: continue desc = 'UNDOCUMENTED' else: desc = self.tty_ify(doc.get('short_description', 'INVALID SHORT DESCRIPTION').strip()) if len(desc) > linelimit: desc = desc[:linelimit] + '...' if plugin.startswith('_'): # Handle deprecated deprecated.append("%-*s %-*.*s" % (displace, plugin[1:], linelimit, len(desc), desc)) else: text.append("%-*s %-*.*s" % (displace, plugin, linelimit, len(desc), desc)) except Exception as e: raise AnsibleError("Failed reading docs at %s: %s" % (plugin, to_native(e)), orig_exc=e) if len(deprecated) > 0: text.append("\nDEPRECATED:") text.extend(deprecated) return "\n".join(text)
def write_metadata(filename, new_metadata, version=None, overwrite=False): with open(filename, 'rb') as f: module_data = f.read() try: current_metadata, start_line, start_col, end_line, end_col, targets = \ extract_metadata(module_data=module_data, offsets=True) except SyntaxError: if filename.endswith('.py'): raise # Probably non-python modules. These should all have python # documentation files where we can place the data raise ParseError('Could not add metadata to {}'.format(filename)) if current_metadata is None: # No current metadata so we can just add it start_line = find_documentation(module_data) if start_line < 0: if os.path.basename(filename) in NONMODULE_PY_FILES: # These aren't new-style modules return raise Exception( 'Module file {} had no ANSIBLE_METADATA or DOCUMENTATION'. format(filename)) module_data = insert_metadata(module_data, new_metadata, start_line, targets=('ANSIBLE_METADATA', )) elif overwrite or (version is not None and ('metadata_version' not in current_metadata or StrictVersion(current_metadata['metadata_version']) < StrictVersion(version))): # Current metadata that we do not want. Remove the current # metadata and put the new version in its place module_data = remove_metadata(module_data, start_line, start_col, end_line, end_col) module_data = insert_metadata(module_data, new_metadata, start_line, targets=targets) else: # Current metadata and we don't want to overwrite it return # Save the new version of the module with open(filename, 'wb') as f: f.write(module_data)
def return_metadata(plugins): """Get the metadata for all modules Handle duplicate module names :arg plugins: List of plugins to look for :returns: Mapping of plugin name to metadata dictionary """ metadata = {} for name, filename in plugins: # There may be several files for a module (if it is written in another # language, for instance) but only one of them (the .py file) should # contain the metadata. if name not in metadata or metadata[name] is not None: with open(filename, 'rb') as f: module_data = f.read() metadata[name] = extract_metadata(module_data=module_data, offsets=True)[0] return metadata
def read_docstring(filename, verbose=True, ignore_errors=True): """ Search for assignment of the DOCUMENTATION and EXAMPLES variables in the given file. Parse DOCUMENTATION from YAML and return the YAML doc or None together with EXAMPLES, as plain text. """ data = { 'doc': None, 'plainexamples': None, 'returndocs': None, 'metadata': None } string_to_vars = { 'DOCUMENTATION': 'doc', 'EXAMPLES': 'plainexamples', 'RETURN': 'returndocs', } try: b_module_data = open(filename, 'rb').read() M = ast.parse(b_module_data) for child in M.body: if isinstance(child, ast.Assign): for t in child.targets: try: theid = t.id except AttributeError: # skip errors can happen when trying to use the normal code display.warning("Failed to assign id for %s on %s, skipping" % (t, filename)) continue if theid in string_to_vars: varkey = string_to_vars[theid] if isinstance(child.value, ast.Dict): data[varkey] = ast.literal_eval(child.value) else: if theid == 'DOCUMENTATION': # string should be yaml data[varkey] = AnsibleLoader(child.value.s, file_name=filename).get_single_data() else: # not yaml, should be a simple string data[varkey] = child.value.s display.debug('assigned :%s' % varkey) # Metadata is per-file and a dict rather than per-plugin/function and yaml data['metadata'] = extract_metadata(module_ast=M)[0] # remove version if data['metadata']: for x in ('version', 'metadata_version'): if x in data['metadata']: del data['metadata'][x] except: if verbose: display.error("unable to parse %s" % filename) if not ignore_errors: raise return data
def read_docstring(filename, verbose=True, ignore_errors=True): """ Search for assignment of the DOCUMENTATION and EXAMPLES variables in the given file. Parse DOCUMENTATION from YAML and return the YAML doc or None together with EXAMPLES, as plain text. """ data = { 'doc': None, 'plainexamples': None, 'returndocs': None, 'metadata': None, 'seealso': None, } string_to_vars = { 'DOCUMENTATION': 'doc', 'EXAMPLES': 'plainexamples', 'RETURN': 'returndocs', } try: with open(filename, 'rb') as b_module_data: M = ast.parse(b_module_data.read()) for child in M.body: if isinstance(child, ast.Assign): for t in child.targets: try: theid = t.id except AttributeError: # skip errors can happen when trying to use the normal code display.warning( "Failed to assign id for %s on %s, skipping" % (t, filename)) continue if theid in string_to_vars: varkey = string_to_vars[theid] if isinstance(child.value, ast.Dict): data[varkey] = ast.literal_eval(child.value) else: if theid == 'DOCUMENTATION': # string should be yaml data[varkey] = AnsibleLoader( child.value.s, file_name=filename).get_single_data() else: # not yaml, should be a simple string data[varkey] = to_text(child.value.s) display.debug('assigned :%s' % varkey) # Metadata is per-file and a dict rather than per-plugin/function and yaml data['metadata'] = extract_metadata(module_ast=M)[0] if data['metadata']: # remove version for x in ('version', 'metadata_version'): if x in data['metadata']: del data['metadata'][x] else: # Add default metadata data['metadata'] = { 'supported_by': 'community', 'status': ['preview'] } except: if verbose: display.error("unable to parse %s" % filename) if not ignore_errors: raise return data
def read_docstring(filename, verbose=True, ignore_errors=True): """ Search for assignment of the DOCUMENTATION and EXAMPLES variables in the given file. Parse DOCUMENTATION from YAML and return the YAML doc or None together with EXAMPLES, as plain text. """ # FIXME: Should refactor this so that we have a docstring parsing # function and a separate variable parsing function # Can have a function one higher that invokes whichever is needed # # Should look roughly like this: # get_plugin_doc(filename, verbose=False) # documentation = extract_docstring(plugin_ast, identifier, verbose=False) # if not documentation and not (filter or test): # documentation = extract_variables(plugin_ast) # documentation['metadata'] = extract_metadata(plugin_ast) # return documentation data = { 'doc': None, 'plainexamples': None, 'returndocs': None, 'metadata': None } string_to_vars = { 'DOCUMENTATION': 'doc', 'EXAMPLES': 'plainexamples', 'RETURN': 'returndocs', } try: b_module_data = open(filename, 'rb').read() M = ast.parse(b_module_data) try: display.debug('Attempt first docstring is yaml docs') docstring = yaml.load(M.body[0].value.s) for string in string_to_vars.keys(): if string in docstring: data[string_to_vars[string]] = docstring[string] display.debug('assigned :%s' % string_to_vars[string]) except Exception as e: display.debug('failed docstring parsing: %s' % str(e)) if 'docs' not in data or not data['docs']: display.debug('Fallback to vars parsing') for child in M.body: if isinstance(child, ast.Assign): for t in child.targets: try: theid = t.id except AttributeError: # skip errors can happen when trying to use the normal code display.warning( "Failed to assign id for %s on %s, skipping" % (t, filename)) continue if theid in string_to_vars: varkey = string_to_vars[theid] if isinstance(child.value, ast.Dict): data[varkey] = ast.literal_eval(child.value) else: if theid == 'DOCUMENTATION': # string should be yaml data[varkey] = AnsibleLoader( child.value.s, file_name=filename).get_single_data() else: # not yaml, should be a simple string data[varkey] = child.value.s display.debug('assigned :%s' % varkey) # Metadata is per-file and a dict rather than per-plugin/function and yaml data['metadata'] = extract_metadata(module_ast=M)[0] # remove version if data['metadata']: for x in ('version', 'metadata_version'): if x in data['metadata']: del data['metadata'][x] except: if verbose: display.error("unable to parse %s" % filename) if not ignore_errors: raise return data
def test_module_data_param_given_with_offset(): with pytest.raises(TypeError, match='If offsets is True then module_data must also be given'): assert md.extract_metadata(module_ast='something', offsets=True)
def test_required_params(): with pytest.raises(TypeError, match='One of module_ast or module_data must be given'): assert md.extract_metadata()
def test_string_metadata(code, expected): # FIXME: String/yaml metadata is not implemented yet. with pytest.raises(NotImplementedError): assert md.extract_metadata(module_data=code, offsets=True) == expected
def test_dict_metadata(code, expected): assert md.extract_metadata(module_data=code, offsets=True) == expected
def test_extract_metadata(code, expected): assert md.extract_metadata(code) == expected
def get_docstring(filename, verbose=False): """ Search for assignment of the DOCUMENTATION and EXAMPLES variables in the given file. Parse DOCUMENTATION from YAML and return the YAML doc or None together with EXAMPLES, as plain text. DOCUMENTATION can be extended using documentation fragments loaded by the PluginLoader from the module_docs_fragments directory. """ data = { 'doc': None, 'plainexamples': None, 'returndocs': None, 'metadata': None } string_to_vars = { 'DOCUMENTATION': 'doc', 'EXAMPLES': 'plainexamples', 'RETURN': 'returndocs', 'ANSIBLE_METADATA': 'metadata' } try: b_module_data = open(filename, 'rb').read() M = ast.parse(b_module_data) try: display.debug('Attempt first docstring is yaml docs') docstring = yaml.load(M.body[0].value.s) for string in string_to_vars.keys(): if string in docstring: data[string_to_vars[string]] = docstring[string] display.debug('assigned :%s' % string_to_vars[string]) except Exception as e: display.debug('failed docstring parsing: %s' % str(e)) if 'docs' not in data or not data['docs']: display.debug('Fallback to vars parsing') for child in M.body: if isinstance(child, ast.Assign): for t in child.targets: try: theid = t.id except AttributeError: # skip errors can happen when trying to use the normal code display.warning( "Failed to assign id for %s on %s, skipping" % (t, filename)) continue if theid in string_to_vars: varkey = string_to_vars[theid] if isinstance(child.value, ast.Dict): data[varkey] = ast.literal_eval(child.value) else: if theid == 'DOCUMENTATION': # string should be yaml data[varkey] = AnsibleLoader( child.value.s, file_name=filename).get_single_data() else: # not yaml, should be a simple string data[varkey] = child.value.s display.debug('assigned :%s' % varkey) data['metadata'] = extract_metadata(b_module_data)[0] # add fragments to documentation if data['doc']: add_fragments(data['doc'], filename) # remove version if data['metadata']: for x in ('version', 'metadata_version'): if x in data['metadata']: del data['metadata'][x] except: display.error("unable to parse %s" % filename) if verbose is True: display.display("unable to parse %s" % filename) raise return data['doc'], data['plainexamples'], data['returndocs'], data[ 'metadata']
def populate_modules(): module_dir = os.path.join(BASE_PATH, 'lib/ansible/modules/') modules_rows = [] module_statuses_rows = [] for root, dir_names, file_names in os.walk(module_dir): for file_name in file_names: module, extension = os.path.splitext(file_name) if module == '__init__' or extension != '.py': continue if module.startswith('_'): module = module[1:] namespace = os.path.join(root.replace(module_dir, '')).replace('/', '.') path = os.path.join(root, file_name) with open(path, 'rb') as module_fd: module_data = module_fd.read() result = extract_metadata(module_data=module_data) metadata = result[0] if not metadata: if module == 'async_wrapper': continue raise Exception('no metadata for: %s' % path) modules_rows.append(dict( module=module, namespace=namespace, path=path.replace(BASE_PATH, ''), supported_by=metadata['supported_by'], )) for status in metadata['status']: module_statuses_rows.append(dict( module=module, status=status, )) populate_data(dict( modules=dict( rows=modules_rows, schema=( ('module', 'TEXT'), ('namespace', 'TEXT'), ('path', 'TEXT'), ('supported_by', 'TEXT'), )), module_statuses=dict( rows=module_statuses_rows, schema=( ('module', 'TEXT'), ('status', 'TEXT'), )), ))