def _from_yaml(cls, loader: yaml.Loader, node: yaml.Node) -> "ToJson": """Load an internal yaml node parsing.""" try: value = loader.construct_mapping(node, deep=True) except yaml.constructor.ConstructorError: value = loader.construct_sequence(node, deep=True) return cls(value)
def load_yaml(yaml_data, filename): """ Load YAML data extending it with line number information, nodes get a __line__ attribute """ if yaml_data is None: with open(filename, 'r') as data_file: yaml_data = data_file.read() loader = Loader(yaml_data) def compose_node(parent, index): # the line number where the previous token has ended (plus empty lines) line = loader.line node = Composer.compose_node(loader, parent, index) node.__line__ = line + 1 return node def construct_mapping(node, deep=False): mapping = Constructor.construct_mapping(loader, node, deep=deep) mapping['__line__'] = node.__line__ return mapping loader.compose_node = compose_node loader.construct_mapping = construct_mapping try: python_data = loader.get_single_data() except ParserError as error: print("YAML syntax error parsing file {} :".format(filename), file=sys.stderr) print(error, file=sys.stderr) exit(1) return python_data
def from_yaml(cls, loader: yaml.Loader, node): data = loader.construct_mapping(node) _map = cls.Map() _obj = cls.Objects(data) return {'map': _map, 'obj': _obj}
def construct_mapping(self, node, deep=False): mapping = Loader.construct_mapping(self, node, deep) for key in mapping: if not isinstance(key, types.StringTypes): continue new = self.REPLACE_RE.sub('_', key) if new == key: continue mapping[new] = mapping.pop(key, None) return mapping
def construct_tagless_yaml(loader: yaml.Loader, node: yaml.Node): # From yaml.constructor.BaseConstructor#construct_object if isinstance(node, yaml.ScalarNode): return loader.construct_scalar(node) elif isinstance(node, yaml.SequenceNode): return loader.construct_sequence(node) elif isinstance(node, yaml.MappingNode): return loader.construct_mapping(node) raise NotImplementedError('invalid node')
def construct_python_object(loader: yaml.Loader, node: yaml.Node) -> SimpleNamespace: result = SimpleNamespace.__new__(SimpleNamespace) yield result attributes = loader.construct_mapping(node) if not isinstance(attributes, dict): raise ValueError( f'Error constructing PyObj. Expected dictionary, found {attributes}' ) result.__init__(**attributes)
def construct_custom_class(cls, loader: yaml.Loader, node: yaml.Node): result = cls.__new__(cls) yield result if isinstance(node, yaml.ScalarNode): value = loader.construct_scalar(node) elif isinstance(node, yaml.SequenceNode): value = loader.construct_sequence(node) elif isinstance(node, yaml.MappingNode): value = loader.construct_mapping(node) else: assert False result.__init__(value)
def construct_dataframe(loader: yaml.Loader, node: yaml.Node): result = DataFrameFuture.__new__(DataFrameFuture) yield result if isinstance(node, yaml.SequenceNode): data = loader.construct_sequence(node) elif isinstance(node, yaml.MappingNode): data = loader.construct_mapping(node) else: raise ValueError( f'Error constructing DataFrame. Expected dictionary or array of dictionaries, found {type(node)}' ) result.__init__(data)
def no_duplicates_constructor(loader: yaml.Loader, node: yaml.Node, deep: bool = False) -> Any: mapping: Dict[str, Any] = {} for key_node, value_node in node.value: key = loader.construct_object(key_node, deep=deep) value = loader.construct_object(value_node, deep=deep) if key in mapping: raise yaml.constructor.ConstructorError( "while constructing a mapping", node.start_mark, f"found duplicate key {key}", key_node.start_mark, ) mapping[key] = value return loader.construct_mapping(node, deep)
def load_yaml(data): """ Load YAML data extending it with line number information, nodes get a __line__ attribute """ loader = Loader(data) def compose_node(parent, index): # the line number where the previous token has ended (plus empty lines) line = loader.line node = Composer.compose_node(loader, parent, index) node.__line__ = line + 1 return node def construct_mapping(node, deep=False): mapping = Constructor.construct_mapping(loader, node, deep=deep) mapping['__line__'] = node.__line__ return mapping loader.compose_node = compose_node loader.construct_mapping = construct_mapping data = loader.get_single_data() return data
def intrinsics_multi_constructor( # pylint: disable=unused-argument loader: yaml.Loader, tag_prefix: str, node: yaml.Node) -> Dict[str, Any]: """YAML constructor to parse CloudFormation intrinsics. This will return a dictionary with key being the intrinsic name """ # Get the actual tag name excluding the first exclamation tag = node.tag[1:] # Some intrinsic functions doesn't support prefix "Fn::" prefix = "Fn::" if tag in ["Ref", "Condition"]: prefix = "" cfntag = prefix + tag if tag == "GetAtt" and isinstance(node.value, str): # ShortHand notation for !GetAtt accepts Resource.Attribute format # while the standard notation is to use an array # [Resource, Attribute]. Convert shorthand to standard format value = node.value.split(".", 1) elif isinstance(node, yaml.ScalarNode): # Value of this node is scalar value = loader.construct_scalar(node) elif isinstance(node, yaml.SequenceNode): # Value of this node is an array (Ex: [1,2]) value = cast(MutableSequence[Any], loader.construct_sequence(node)) else: # Value of this node is an mapping (ex: {foo: bar}) value = cast(MutableMapping[Any, Any], loader.construct_mapping(node)) return {cfntag: value}
def handlertag(loader: yaml.Loader, tag_suffix, node): """A handler tag""" v = loader.construct_mapping(node) v["__handler__"] = tag_suffix return v
def _from_yaml(cls, loader: yaml.Loader, node: yaml.Node) -> "BotoError": """Load an internal yaml node parsing.""" value = loader.construct_mapping(node, deep=True) return cls(value)
def yaml_constructor(loader: yaml.Loader, node): mapping = loader.construct_mapping(node) return InstanceList(mapping)