def load(stream, ai_state=None): loader = yaml.Loader(stream) loader._ai_state = ai_state try: return loader.get_single_data() finally: loader.dispose()
def parse(self, declaration, source_name=None): if isinstance(declaration, str) and declaration.startswith('---'): pass else: declaration = self.read(declaration) loader = yaml.Loader(declaration) if source_name is not None: loader.name = source_name # Register the constructor for each registered expression class for name in self.ExpressionClasses: loader.add_constructor("!{}".format(name), self._constructor) loader.add_constructor("!INCLUDE", self._construct_include) loader.add_constructor("!CONFIG", self._construct_config) try: expression = loader.get_single_data() except yaml.scanner.ScannerError as e: raise DeclarationError("Syntax error in declaration: {}".format(e)) return None except yaml.constructor.ConstructorError as e: raise DeclarationError( "Unknown declarative expression: {}".format(e)) return None finally: loader.dispose() return expression
def load_yaml(file): loader = yaml.Loader(open(file).read()) def compose_node(parent, index): # the line number where the previous token has ended (plus empty lines) line = loader.line node = Composer.compose_node(loader, parent, index) # TODO(jroovers): special case -> document better if isinstance(node.value, list) and len(node.value) > 0: if not isinstance(node.value[0], tuple): # Processing a real yaml list -> substract 1 for list_node in node.value: list_node.value['__line__'] -= 1 node.value = {"__line__": line, "__val__": node.value} return node def construct_object(node, deep=False): line = node.value['__line__'] node.value = node.value['__val__'] data = Constructor.construct_object(loader, node, deep) if isinstance(data, str): data = LineStr(data) data.line = line return data loader.compose_node = compose_node loader.construct_object = construct_object return loader.get_single_data()
def load_yaml_with_lines(config): """ Loads a yaml file and adds line numbers to the objects :param config: configuration file stream or string :type config: TextIO or str :return: deserialized version of the yaml config or None if deserialization failed :rtype: dict or None """ try: # Source: https://stackoverflow.com/a/13319530 loader = yaml.Loader(config) def compose_node(parent, index): # the line number where the previous token has ended (plus empty lines) line = loader.line node = Composer.compose_node(loader, parent, index) node.__line__ = line + 1 return node def construct_mapping(node, deep=False): mapping = Constructor.construct_mapping(loader, node, deep=deep) mapping['__line__'] = node.__line__ return mapping loader.compose_node = compose_node loader.construct_mapping = construct_mapping return loader.get_single_data() except yaml.YAMLError as e: raise Exception('Error occurred while parsing YAML: {}'.format(e))
def load_yaml_with_linenumbers(fileobj): """ Return yaml with line numbers included in the dict. This is similar to our mundane ``load_yaml`` function, except that it modifies the yaml loader to include line numbers in the data. Our babel extension which is used to extract translatable strings from our yaml files uses those line numbers to make things easier on translators. """ loader = yaml.Loader(fileobj.read()) def compose_node(parent, index): # the line number where the previous token has ended (plus empty lines) line = loader.line node = yaml.composer.Composer.compose_node(loader, parent, index) node.__line__ = line + 1 return node def construct_mapping(node, deep=False): constructor = yaml.constructor.Constructor.construct_mapping mapping = constructor(loader, node, deep=deep) mapping['__line__'] = node.__line__ return mapping loader.compose_node = compose_node loader.construct_mapping = construct_mapping return loader.get_single_data()
def calculate(source: str) -> types.TSourceMap: """ Calculate the source map for a YAML document. Assume that the source is valid YAML. Args: source: The YAML document. Returns: The source map. """ if not isinstance(source, str): raise errors.InvalidInputError(f"source must be a string, got {type(source)}") if not source: raise errors.InvalidInputError("source must not be empty") try: yaml.safe_load(source) except (scanner.ScannerError, parser.ParserError) as error: raise errors.InvalidInputError("YAML is not valid") from error loader = yaml.Loader(source) loader.get_token() return dict(handle.value(loader=loader))
def hunt_repeated_yaml_keys(data): """Parses yaml and returns a list of repeated variables and the line on which they occur """ loader = yaml.Loader(data) def compose_node(parent, index): # the line number where the previous token has ended (plus empty lines) line = loader.line node = Composer.compose_node(loader, parent, index) node.__line__ = line + 1 return node def construct_mapping(node, deep=False): mapping = dict() errors = dict() for key_node, value_node in node.value: key = key_node.value if key in mapping: if key in errors: errors[key].append(key_node.__line__) else: errors[key] = [mapping[key], key_node.__line__] mapping[key] = key_node.__line__ return errors loader.compose_node = compose_node loader.construct_mapping = construct_mapping data = loader.get_single_data() return data
def parse_yaml_linenumbers(data, filename): """Parses yaml as ansible.utils.parse_yaml but with linenumbers. The line numbers are stored in each node's LINE_NUMBER_KEY key. """ def compose_node(parent, index): # the line number where the previous token has ended (plus empty lines) line = loader.line node = Composer.compose_node(loader, parent, index) node.__line__ = line + 1 return node def construct_mapping(node, deep=False): mapping = Constructor.construct_mapping(loader, node, deep=deep) mapping[LINE_NUMBER_KEY] = node.__line__ mapping[FILENAME_KEY] = filename return mapping try: loader = yaml.Loader(data) loader.compose_node = compose_node loader.construct_mapping = construct_mapping data = loader.get_single_data() except (yaml.parser.ParserError, yaml.scanner.ScannerError) as e: raise SystemExit("Failed to parse YAML in %s: %s" % (filename, str(e))) return data
def load_config(): """Prompt for missing values.""" with open("config.yaml") as file: data = file.read() cfg = yaml.Loader(data).get_data() if cfg["debug"]: log.set_level(0) return cfg
def load_yaml(stream): """ Load YAML data. """ loader = yaml.Loader(stream) try: return loader.get_single_data() finally: loader.dispose()
def parse_metadata(self, rule_text, start_index): rule_io = StringIO(u(rule_text)) rule_io.seek(start_index, 0) loader = yaml.Loader(rule_io) loader.check_node() node = loader.get_node() metadata = loader.construct_mapping(node) metadata_end = loader.get_mark().index return metadata, metadata_end
def read_series(fn): '''This is a generator object returning each YAML document. It will exhaust when there are no more documents to read. This is useful for reading a series of yaml documents contained in a single file. ''' fh_yaml = open(fn,'r') loader = yaml.Loader(fh_yaml) while loader.check_data(): yield loader.get_data() fh_yaml.close()
def test_mapping_error(source): """ GIVEN source WHEN loader is created and mapping is called with the loader THEN InvalidYamlError is raised. """ loader = yaml.Loader(source) loader.get_token() with pytest.raises(InvalidYamlError): mapping(loader=loader)
def test_primitive_error(source): """ GIVEN source WHEN loader is created and primitive is called with the loader THEN InvalidYamlError is raised. """ loader = yaml.Loader(source) loader.get_token() with pytest.raises(InvalidYamlError): primitive(loader=loader)
def read_file(file_path): """ Loads file content into Python object representation. Args: file_path: path to YAML file. Returns: dictionary representing YAML content """ with open(file_path, 'r', encoding='utf-8') as f: loader = yaml.Loader(f) return loader.get_single_data()
def process_measurements(self): """Process measurements""" loader = yaml.Loader(self.measurements_stream) setattr(loader, 'collector', self.collector) setattr(loader, 'system', self.system) setattr(loader, 'config', self.config) measurements = loader.get_data() for measurement_name in measurements: logging.debug('Process "{}" measurements: {}'.format( measurement_name, measurements[measurement_name])) for measurement in measurements[measurement_name]: self.send_data(measurement)
def load_yaml(path: Path): with path.open() as fin: loader = yaml.Loader(fin) meta = loader.get_data() # PyYAML Reader greedily consumes chunks from the stream. # We must recover any un-consumed data, as well as what's left in the stream. if loader.buffer: data = loader.buffer[loader.pointer:-1] else: data = '' data += fin.read() return data, meta
def test_primitive(source, expected_entries): """ GIVEN source and expected entries WHEN loader is created and primitive is called with the loader THEN the expected entries are returned. """ loader = yaml.Loader(source) loader.get_token() returned_entries = primitive(loader=loader) assert returned_entries == expected_entries
def _load_file(self, filepath): f = open(filepath) try: loader = yaml.Loader(f) headers = loader.get_data() body = loader.prefix(1000000000).strip('\r\n\0') # yuck except: f.seek(0) subject = os.path.split(filepath)[1] subject = os.path.splitext(subject)[0] headers = {'Subject': subject} body = f.read().strip('\r\n\0') f.close() return (headers, body)
def load_yaml(path: Path) -> LoaderResult: """ Loads a YAML file. Reads the first "document" (up to the '---' line) as meta, and treats the remainder of the file as content. """ with path.open() as fin: loader = yaml.Loader(fin) meta = loader.get_data() # PyYAML Reader greedily consumes chunks from the stream. # We must recover any un-consumed data, as well as what's left in the stream. data = loader.buffer[loader.pointer:-1] if loader.buffer else "" data += fin.read() return data, meta
def load(yaml_src): loader = yaml.Loader(yaml_src) loader.check_node() node = loader.get_node() filtered = scalars_to_strings(node) if isinstance(filtered, yaml.MappingNode): constructor = loader.construct_mapping return loader.construct_mapping(filtered, deep=True) if isinstance(filtered, yaml.SequenceNode): return loader.construct_sequence(filtered, deep=True) if isinstance(filtered, yaml.ScalarNode): return loader.construct_scalar(filtered) raise TypeError( 'scalars_to_strings returned a node that is neither MappingNode, SequenceNode, nor Scalar Node' )
def yaml_to_keywords(yaml_data) -> dict: loader = yaml.Loader(yaml_data) def _compose_node(parent, index): line = loader.line node = yaml.composer.Composer.compose_node(loader, parent, index) node.__line__ = line + 1 return node def _construct_mapping(node, deep=False): mapping = yaml.constructor.Constructor.construct_mapping(loader, node, deep=deep) mapping['__line__'] = node.__line__ return mapping loader.compose_node = _compose_node loader.construct_mapping = _construct_mapping data = loader.get_single_data() # Now one have dict with line numbers, it is time to select keywords. def _guess_line(v): if isinstance(v, dict): return v['__line__'] if isinstance(v, list): return _guess_line(v[0]) if v else None return None def _replace_name(x: str): return x.lower() def _recursive_load(found_so_far, node, result=None): if result is None: result = {} if isinstance(node, dict): for k, v in node.items(): line = _guess_line(v) if not line: continue tmp = found_so_far + [_replace_name(k)] print('Load: {} to line {}'.format(tmp, line)) result[frozenset(tmp)] = line result = _recursive_load(tmp, v, result) elif isinstance(node, list): for n in node: result = _recursive_load(found_so_far, n, result) return result return _recursive_load([], data)
def parse(self, step_set, toparse, start_index): step_io = StringIO(u(toparse)) step_io.seek(start_index, 0) loader = yaml.Loader(step_io) try: val = loader.get_data() except yaml.error.YAMLError: line = toparse[start_index:].split('\n', 1)[0] raise UnmatchedStepError(line) if loader.tokens: val_end = loader.tokens[0].start_mark.index else: val_end = loader.get_mark().index return (val, self), start_index + val_end
def parse(path, content, ctx): loader = yaml.Loader(content) loader.add_implicit_resolver('!expr', re.compile(r'^\(.*$'), '(') try: root = loader.get_single_node() finally: loader.dispose() root_map = props("root of {}".format(path), root, ['imports', 'locals', 'exports', 'result']) for name in ['imports', 'locals', 'exports']: root_map[name] = props("{} of {}".format(name, path), root_map.get(name), None) multiple_definitions = set() for n1, n2 in itertools.combinations(['imports', 'locals', 'exports'], 2): n1_names = set(k for k, v in root_map[n1].items()) n2_names = set(k for k, v in root_map[n2].items()) multiple_definitions |= n1_names & n2_names if multiple_definitions: raise Exception("multiple top-level definitions for {} in {}".format( multiple_definitions, path)) imports = {} if 'imports' in root_map: for imp_name, imp_relpath in root_map['imports'].items(): if not is_str_scalar(imp_relpath): raise Exception( "import path for {} not a str".format(imp_name)) imp_path = os.path.join(os.path.dirname(path), imp_relpath.value) imports[imp_name] = ctx.load(imp_path) internal = dict( list(root_map['locals'].items()) + list(root_map['exports'].items())) exports = dict(root_map['exports']) internal_scope = Scopes(internal, parent=None, internal=None) exports_scope = Scopes(exports, parent=None, internal=internal_scope) return Document( path=path, imports=imports, internal=internal_scope, exports=exports_scope, result=root_map.get('result'), )
def load_config(config_path: str) -> Dict[str, Any]: """ Loads the config YAML file """ config_dict = None try: with open(config_path, "r") as config_file: loader = yaml.Loader(config_file) config_dict = loader.get_data() except OSError as ex: log.error("An error ocurred during config file reading:") log.error(ex) except yaml.YAMLError as ex: log.error("An error ocurred during config YAML parsing:") log.error(ex) return config_dict
def load_frontmatter_md(path): data = path.read_text(encoding='utf-8') md = markdown.Markdown(output_format='html5') with path.open() as fin: loader = yaml.Loader(fin) meta = loader.get_data() # PyYAML Reader greedily consumes chunks from the stream. # We must recover any un-consumed data, as well as what's left in the stream. if loader.buffer: data = loader.buffer[loader.pointer:-1] else: data = '' data += fin.read() print('markdown frontmatter meta: ', meta) processed = md.convert(data) return processed, meta
def read_file(filename): # Read the entire YAML file inf = open(filename, "r") raw = inf.read() inf.close() # Connect a yaml loader to the data loader = yaml.Loader(raw) # These are callback to attach the file position data def compose_node(parent, index): # the line number where the previous token has ended (plus empty lines) line = loader.line column = loader.column node = yaml.composer.Composer.compose_node(loader, parent, index) node.__file__ = filename node.__line__ = line + 1 node.__column__ = column + 1 return node def construct_mapping(node, deep=False): mapping = yaml.constructor.Constructor.construct_mapping(loader, node, deep=deep) mapping['__FILE__'] = node.__file__ mapping['__LINE__'] = node.__line__ mapping['__COLUMN__'] = node.__column__ return mapping # These two lines hook into node creation to add __FILE__ and __LINE__ to # all nodes with children. It doesn't add to items that have a simple # value loader.compose_node = compose_node loader.construct_mapping = construct_mapping # The following line stops YAML from converting strings to numbers or # true/false loader.yaml_implicit_resolvers = {} # This does the actual parsing y = {} while loader.check_data(): tmp = loader.get_data() assert "name" in tmp, "YAML missing block name" block_name = tmp["name"] y[block_name] = tmp return y
def load_yaml_with_lines(filename): d = open(filename).read() loader = yaml.Loader(d) def compose_node(parent, index): # the line number where the previous token has ended (plus empty lines) line = loader.line node = Composer.compose_node(loader, parent, index) node.__line__ = line + 1 return node def construct_mapping(node, deep=False): mapping = Constructor.construct_mapping(loader, node, deep=deep) mapping['__line__'] = node.__line__ return mapping loader.compose_node = compose_node loader.construct_mapping = construct_mapping data = loader.get_single_data() return data
def parse_yaml_linenumbers(data, filename): """Parses yaml as ansible.utils.parse_yaml but with linenumbers. The line numbers are stored in each node's LINE_NUMBER_KEY key. """ def compose_node(parent, index): # the line number where the previous token has ended (plus empty lines) line = loader.line node = Composer.compose_node(loader, parent, index) node.__line__ = line + 1 return node def construct_mapping(node, deep=False): if ANSIBLE_VERSION < 2: mapping = Constructor.construct_mapping(loader, node, deep=deep) else: mapping = AnsibleConstructor.construct_mapping(loader, node, deep=deep) if hasattr(node, '__line__'): mapping[LINE_NUMBER_KEY] = node.__line__ else: mapping[LINE_NUMBER_KEY] = mapping._line_number mapping[FILENAME_KEY] = filename return mapping try: if ANSIBLE_VERSION < 2: loader = yaml.Loader(data) else: import inspect kwargs = {} if 'vault_password' in inspect.getargspec( AnsibleLoader.__init__).args: kwargs['vault_password'] = DEFAULT_VAULT_PASSWORD loader = AnsibleLoader(data, **kwargs) loader.compose_node = compose_node loader.construct_mapping = construct_mapping data = loader.get_single_data() except (yaml.parser.ParserError, yaml.scanner.ScannerError) as e: raise SystemExit("Failed to parse YAML in %s: %s" % (filename, str(e))) return data
def test_structure(data_filename, structure_filename, verbose=False): nodes1 = [] nodes2 = eval(open(structure_filename, 'rb').read()) try: loader = yaml.Loader(open(data_filename, 'rb')) while loader.check_event(): if loader.check_event(yaml.StreamStartEvent, yaml.StreamEndEvent, yaml.DocumentStartEvent, yaml.DocumentEndEvent): loader.get_event() continue nodes1.append(_convert_structure(loader)) if len(nodes1) == 1: nodes1 = nodes1[0] assert nodes1 == nodes2, (nodes1, nodes2) finally: if verbose: print "NODES1:" pprint.pprint(nodes1) print "NODES2:" pprint.pprint(nodes2)