def construct_env_tag(loader: yaml.Loader, node: yaml.Node) -> Any: """Assign value of ENV variable referenced at node.""" default = None if isinstance(node, yaml.nodes.ScalarNode): vars = [loader.construct_scalar(node)] elif isinstance(node, yaml.nodes.SequenceNode): child_nodes = node.value if len(child_nodes) > 1: # default is resolved using YAML's (implicit) types. default = loader.construct_object(child_nodes[-1]) child_nodes = child_nodes[:-1] # Env Vars are resolved as string values, ignoring (implicit) types. vars = [loader.construct_scalar(child) for child in child_nodes] else: raise yaml.constructor.ConstructorError( None, None, f'expected a scalar or sequence node, but found {node.id}', node.start_mark) for var in vars: if var in os.environ: value = os.environ[var] # Resolve value to Python type using YAML's implicit resolvers tag = loader.resolve(yaml.nodes.ScalarNode, value, (True, False)) return loader.construct_object(yaml.nodes.ScalarNode(tag, value)) return default
def _from_yaml(cls, loader: yaml.Loader, node: yaml.Node) -> "ToJson": """Load an internal yaml node parsing.""" try: value = loader.construct_mapping(node, deep=True) except yaml.constructor.ConstructorError: value = loader.construct_sequence(node, deep=True) return cls(value)
def load_data_definitions(datadir): """ Parse the yaml file of base yaml objects and return the information :arg file yaml_file: Open file object to read the yaml from :returns: An array of Markets that the user can travel to. """ _define_schemas(datadir) data_file = os.path.join(datadir, 'base', 'stellar-base.yml') with open(data_file) as f: loader = Loader(f.read()) base_data = loader.get_single_data() v_validate(base_data, BASE_SCHEMA) data_file = os.path.join(datadir, 'base', 'stellar-sol.yml') with open(data_file) as f: loader = Loader(f.read()) system_data = loader.get_single_data() v_validate(system_data, SYSTEM_SCHEMA) base_data.update(system_data) del base_data['version'] return base_data
def load_yaml(yaml_data, filename): """ Load YAML data extending it with line number information, nodes get a __line__ attribute """ if yaml_data is None: with open(filename, 'r') as data_file: yaml_data = data_file.read() loader = Loader(yaml_data) def compose_node(parent, index): # the line number where the previous token has ended (plus empty lines) line = loader.line node = Composer.compose_node(loader, parent, index) node.__line__ = line + 1 return node def construct_mapping(node, deep=False): mapping = Constructor.construct_mapping(loader, node, deep=deep) mapping['__line__'] = node.__line__ return mapping loader.compose_node = compose_node loader.construct_mapping = construct_mapping try: python_data = loader.get_single_data() except ParserError as error: print("YAML syntax error parsing file {} :".format(filename), file=sys.stderr) print(error, file=sys.stderr) exit(1) return python_data
def __init__(self, translation, api_token, project_id, language): Loader.add_constructor( 'tag:yaml.org,2002:float', lambda self, node: self.construct_yaml_str(node)) Loader.add_constructor( 'tag:yaml.org,2002:bool', lambda self, node: self.construct_yaml_str(node)) log.debug("file %s", translation) self.translation = translation self.api_token = api_token self.project_id = project_id self.language = language self.download() if self.translation.endswith(".yml") or self.translation.endswith( ".yaml") or self.translation.endswith(".yml.txt"): log.info("Save yaml") with codecs.open(self.translation, 'w', "utf-8") as f: yaml.safe_dump(self.data, f, default_flow_style=False, allow_unicode=True) elif self.translation.endswith(".json"): log.info("Save json") with codecs.open(self.translation, 'w', "utf-8") as f: json.dump(self.data, f, ensure_ascii=False, encoding='utf8') else: log.error("Only yaml or json supported") sys.exit(7) pass
def construct_tagless_yaml(loader: yaml.Loader, node: yaml.Node): # From yaml.constructor.BaseConstructor#construct_object if isinstance(node, yaml.ScalarNode): return loader.construct_scalar(node) elif isinstance(node, yaml.SequenceNode): return loader.construct_sequence(node) elif isinstance(node, yaml.MappingNode): return loader.construct_mapping(node) raise NotImplementedError('invalid node')
def __init__(cls, name, bases, kwds): """This overlaps quite a bit with YAMLObjectMetaclass.""" if name != "ManagementObject": yaml_tag = u"tag:yaml.org,2002:es.bsc.%s" % (cls.__module__) cls.yaml_loader = Loader cls.yaml_tag = yaml_tag # used by `ManagementObject.to_yaml` logger.trace("YAML TAG : %s", yaml_tag) Loader.add_constructor(yaml_tag, cls.from_yaml) Dumper.add_representer(cls, cls.to_yaml) super(ManagementMetaClass, cls).__init__(name, bases, kwds)
def fix_yaml_loader(): """Ensure that any string read by yaml is represented as unicode.""" from yaml import Loader, SafeLoader def construct_yaml_str(self, node): # Override the default string handling function # to always return unicode objects return self.construct_scalar(node) Loader.add_constructor(u'tag:yaml.org,2002:str', construct_yaml_str) SafeLoader.add_constructor(u'tag:yaml.org,2002:str', construct_yaml_str)
def sequence(*, loader: yaml.Loader) -> types.TSourceMapEntries: """ Calculate the source map of a sequence value. Args: loader: Source of YAML tokens. Returns: A list of JSON pointers and source map entries. """ # Look for sequence start token = loader.get_token() if not isinstance( token, (yaml.FlowSequenceStartToken, yaml.BlockSequenceStartToken)): raise errors.InvalidYamlError( f"expected sequence start but received {token=}") value_start = types.Location(token.start_mark.line, token.start_mark.column, token.start_mark.index) # Handle values sequence_index = 0 entries: types.TSourceMapEntries = [] while not isinstance( loader.peek_token(), ( yaml.FlowSequenceEndToken, yaml.BlockEndToken, yaml.DocumentEndToken, yaml.StreamEndToken, ), ): # Skip block entry if isinstance(loader.peek_token(), yaml.BlockEntryToken): loader.get_token() # Retrieve values value_entries = value(loader=loader) entries.extend((f"/{sequence_index}{pointer}", entry) for pointer, entry in value_entries) sequence_index += 1 # Skip flow entry if isinstance(loader.peek_token(), (yaml.FlowEntryToken)): loader.get_token() # Look for sequence end token = loader.get_token() if not isinstance(token, (yaml.FlowSequenceEndToken, yaml.BlockEndToken)): raise errors.InvalidYamlError( f"expected sequence end but received {token=}") value_end = types.Location(token.end_mark.line, token.end_mark.column, token.end_mark.index) return [("", types.Entry(value_start=value_start, value_end=value_end)) ] + entries
def construct_custom_class(cls, loader: yaml.Loader, node: yaml.Node): result = cls.__new__(cls) yield result if isinstance(node, yaml.ScalarNode): value = loader.construct_scalar(node) elif isinstance(node, yaml.SequenceNode): value = loader.construct_sequence(node) elif isinstance(node, yaml.MappingNode): value = loader.construct_mapping(node) else: assert False result.__init__(value)
def construct_dataframe(loader: yaml.Loader, node: yaml.Node): result = DataFrameFuture.__new__(DataFrameFuture) yield result if isinstance(node, yaml.SequenceNode): data = loader.construct_sequence(node) elif isinstance(node, yaml.MappingNode): data = loader.construct_mapping(node) else: raise ValueError( f'Error constructing DataFrame. Expected dictionary or array of dictionaries, found {type(node)}' ) result.__init__(data)
def getSchema(): def construct_yaml_str(self, node): return self.construct_scalar(node) Loader.add_constructor(u'tag:yaml.org,2002:str', construct_yaml_str) SafeLoader.add_constructor(u'tag:yaml.org,2002:str', construct_yaml_str) SCHEMA_DIR = getattr(settings, "STATIC_ROOT", None) SCHEMA_FILE = SCHEMA_DIR + 'schema.yaml' stream = open(SCHEMA_FILE, 'r') schema = yaml.load(stream) return schema
def yaml_load_unicode(stream): import yaml from yaml import Loader, SafeLoader def construct_yaml_str(self, node): # Override the default string handling function # to always return unicode objects return self.construct_scalar(node) Loader.add_constructor(u'tag:yaml.org,2002:str', construct_yaml_str) SafeLoader.add_constructor(u'tag:yaml.org,2002:str', construct_yaml_str) return yaml.load(stream)
def fix_yaml_loader(): """确保读出的yaml文件内容 可以被unicode编码 """ from yaml import Loader, SafeLoader def construct_yaml_str(self, node): # Override the default string handling function # to always return unicode objects return self.construct_scalar(node) Loader.add_constructor(u'tag:yaml.org,2002:str', construct_yaml_str) SafeLoader.add_constructor(u'tag:yaml.org,2002:str', construct_yaml_str)
def _handle_quirks(self): if self._unicode_quirk: self._logger.debug('Enabling unicode quirk') def construct_yaml_str(self, node): try: rawdata = b''.join([chr(ord(x)) for x in self.construct_scalar(node)]) return rawdata.decode('utf8') except ValueError: # apparently sometimes the data is already correctly encoded return self.construct_scalar(node) Loader.add_constructor(u'tag:yaml.org,2002:str', construct_yaml_str) SafeLoader.add_constructor(u'tag:yaml.org,2002:str', construct_yaml_str)
def __init__(self, filename): # Make sure pyyaml always returns unicode def construct_yaml_str(self, node): return self.construct_scalar(node) Loader.add_constructor(u'tag:yaml.org,2002:str', construct_yaml_str) SafeLoader.add_constructor(u'tag:yaml.org,2002:str', construct_yaml_str) with open(filename, 'r') as f: try: self.data = yaml.safe_load(f) except yaml.YAMLError as e: raise e
def construct_hook(loader: yaml.Loader, node: yaml.Node) -> Py: conn_name = loader.construct_yaml_str(node) if not re.match(r'\w+', conn_name): raise ValueError( f'Error constructing hook. Expected connection name, found {conn_name}' ) return Py(f'$HOOK.{conn_name}')
def from_yaml(cls, loader: yaml.Loader, node): data = loader.construct_mapping(node) _map = cls.Map() _obj = cls.Objects(data) return {'map': _map, 'obj': _obj}
def primitive(*, loader: yaml.Loader) -> types.TSourceMapEntries: """ Calculate the source map of a primitive type. Args: loader: Source of YAML tokens. Returns: A list of JSON pointers and source map entries. """ token = loader.get_token() if not isinstance(token, yaml.ScalarToken): raise errors.InvalidYamlError(f"expected scalar but received {token=}") return [( "", types.Entry( value_start=types.Location( token.start_mark.line, token.start_mark.column, token.start_mark.index, ), value_end=types.Location( token.end_mark.line, token.end_mark.column, token.end_mark.index, ), ), )]
def load(cls, stream, constructors=None, multi_constructors=None, implicit_resolvers=None): loader = Loader(stream) cs = dict(cls._constructors) if constructors: cs.update(constructors) ir = dict(cls._implicit_resolvers) if implicit_resolvers: ir.update(implicit_resolvers) mcs = dict(cls._multi_constructors) if multi_constructors: mcs.update(multi_constructors) if cs: for name, constructor in cs.items(): loader.add_constructor(name, constructor) if mcs: for name, constructor in mcs.items(): loader.add_multi_constructor(name, constructor) if ir: for name, pattern in ir.items(): loader.add_implicit_resolver(name, pattern, None) try: return loader.get_single_data() finally: loader.dispose()
def construct_variable(loader: yaml.Loader, node: yaml.Node) -> Py: var_id = loader.construct_yaml_str(node) if not re.match(r'\w+', var_id): raise ValueError( f'Error constructing variable. Expected variable id, found {var_id}' ) return Py(f'$VARIABLE.{var_id}')
def get_yaml_docs(): """Parse the YAML file""" source = read_yaml_file(args['infile']) if args.get('template'): source = read_yaml_file(args['template']) + source source_str = ''.join([line[0] for line in source]) def mark_str(mark): line = source[mark.line] return ("In file " + line[1] + ", line " + str(line[2]) + ", column " + str(mark.column + 1) + ":\n" + line[0].rstrip() + "\n" + ' ' * mark.column + "^\n") # We iterate through all of the documents to properly diagnose errors, # because the load_all generator does not handle exceptions correctly. docs = [] load = Loader(source_str) while load.check_data(): try: doc = load.get_data() except yaml.YAMLError as err: sys.exit((mark_str(err.problem_mark) if err.problem_mark else "") + (err.problem + "\n" if err.problem else "") + (err.note + "\n" if err.note else "")) else: docs.append(doc) return docs
def load_base_types(datadir): """ Parse the yaml file of base enum types and return the information :arg datadir: The data directory to find the types file :returns: A list of types """ flog = mlog.fields(func='load_base_types') flog.fields(datadir=datadir).debug('Entered load_base_types') data_file = os.path.join(datadir, 'base', 'stellar-types.yml') with_file_log = flog.fields(filename=data_file) with_file_log.debug('constructed data_file path {data_file}', data_file=data_file) with_file_log.debug('Opening data_file') with open(data_file, 'r') as data_fh: with_file_log.debug('reading data_file') yaml_data = data_fh.read() with_file_log.fields(yaml=yaml_data).debug('parsing yaml string') loader = Loader(yaml_data) data = loader.get_single_data() flog.fields(data=data).debug('Validating type data structure') data = v_validate(data, DATA_TYPES_SCHEMA) flog.debug('Returning type data') return data
def no_duplicates_constructor(loader: yaml.Loader, node: yaml.Node, deep: bool = False) -> Any: mapping: Dict[str, Any] = {} for key_node, value_node in node.value: key = loader.construct_object(key_node, deep=deep) value = loader.construct_object(value_node, deep=deep) if key in mapping: raise yaml.constructor.ConstructorError( "while constructing a mapping", node.start_mark, f"found duplicate key {key}", key_node.start_mark, ) mapping[key] = value return loader.construct_mapping(node, deep)
def not_constructor(loader: Loader, node) -> bool: value = loader.construct_sequence(node, deep=True) try: expr = not value[0] except: expr = False return expr
def join_constructor(loader: Loader, node): values = loader.construct_sequence(node, deep=True) try: sep, parts = str(values[0]), [str(_) for _ in values[1]] return sep.join(parts) except: return ''
def eval_constructor(loader: Loader, node): value = loader.construct_sequence(node, deep=True) try: expr = value[0] except: expr = None return EvalType(expr)
def test_app_config(): # Get configured auth params specified_app_config = utils.get_cluster_var('cartridge_app_config') if not specified_app_config: return # Get all configured instances configured_instances = utils.get_configured_instances() if not configured_instances: return # Get cartridge app config config_url = '%s/admin/config' % utils.get_any_instance_url() session = utils.get_authorized_session() response = session.get(config_url) assert response.status_code == 200 loader = Loader(response.content) app_config = loader.get_data() # Check if app config is equal to configured one for section_name, section in specified_app_config.items(): if section_is_deleted(section): assert section_name not in app_config else: assert section_name in app_config assert app_config[section_name] == section['body']
def get_config_yaml( path_config=os.path.dirname(os.path.realpath(__file__)) + "/../config.d/config.yaml", name_config="openstack"): import yaml from yaml import Loader, SafeLoader def construct_yaml_str(self, node): # Override the default string handling function # to always return unicode objects return self.construct_scalar(node) Loader.add_constructor(u'tag:yaml.org,2002:str', construct_yaml_str) SafeLoader.add_constructor(u'tag:yaml.org,2002:str', construct_yaml_str) f = open(path_config) # use safe_load instead load dataMap = yaml.load(f)[name_config] f.close() return dataMap
def key_constructor(loader: Loader, node): value = loader.construct_sequence(node, deep=True) try: obj = value[0] keys = value[1:] return getAttribute(obj, *keys) except: return None
def construct_mapping(self, node, deep=False): mapping = Loader.construct_mapping(self, node, deep) for key in mapping: if not isinstance(key, types.StringTypes): continue new = self.REPLACE_RE.sub('_', key) if new == key: continue mapping[new] = mapping.pop(key, None) return mapping
def _do_loads(self, s, *args, **kwargs): import yaml from yaml import Loader, SafeLoader # Force Unicode string output according to this SO question # 2890146/how-to-force-pyyaml-to-load-strings-as-unicode-objects def construct_yaml_str(self, node): # Override the default string handling function # to always return unicode objects return self.construct_scalar(node) _STR_TAG = 'tag:yaml.org,2002:str' Loader.add_constructor(_STR_TAG, construct_yaml_str) SafeLoader.add_constructor(_STR_TAG, construct_yaml_str) # TODO: optionally utilize C acceleration if available return yaml.load(s, *args, **kwargs)
def value(*, loader: yaml.Loader) -> types.TSourceMapEntries: """ Calculate the source map of any value. Args: loader: Source of YAML tokens. Returns: A list of JSON pointers and source map entries. """ if isinstance(loader.peek_token(), (yaml.FlowSequenceStartToken, yaml.BlockSequenceStartToken)): return sequence(loader=loader) if isinstance(loader.peek_token(), (yaml.FlowMappingStartToken, yaml.BlockMappingStartToken)): return mapping(loader=loader) return primitive(loader=loader)
def load(cls, stream, constructors=None, multi_constructors=None): loader = Loader(stream) cs = dict(cls._constructors) if constructors: cs.update(constructors) mcs = dict(cls._multi_constructors) if multi_constructors: mcs.update(multi_constructors) if cs: for name, constructor in cs.items(): loader.add_constructor(name, constructor) if mcs: for name, constructor in mcs.items(): loader.add_multi_constructor(name, constructor) try: return loader.get_single_data() finally: loader.dispose()
def load(stream, constructors=None): loader = Loader(stream) constructors = constructors or {} if not "timedelta" in constructors: loader.add_constructor("!timedelta", _timedelta_contructor) if not "re" in constructors: loader.add_constructor("!re", _re_constructor) loader.add_multi_constructor("!ref:", _ref_constructor) loader.add_multi_constructor("!factory:", _factory_constructor) loader.add_multi_constructor("!obj:", _obj_constructor) loader.add_multi_constructor("!extends:", _extends_constructor) loader.add_multi_constructor("!include:", _extends_constructor) if constructors: for name, constructor in constructors.items(): loader.add_constructor("!" + name, constructor) try: return loader.get_single_data() finally: loader.dispose()
def _construct_attr_dict(loader: yaml.Loader, node): loader.flatten_mapping(node) return AttrDict(loader.construct_pairs(node, deep=True))
def __init__(self, *args, **kwargs): Loader.__init__(self, *args, **kwargs)
def handlertag(loader: yaml.Loader, tag_suffix, node): """A handler tag""" v = loader.construct_mapping(node) v["__handler__"] = tag_suffix return v
def __init__(self, *args, **kwargs): Loader.__init__(self, *args, **kwargs) self.add_constructor(u"tag:yaml.org,2002:map", type(self).construct_yaml_map) self.add_constructor(u"tag:yaml.org,2002:omap", type(self).construct_yaml_map)
from util.config_parse import convert_tokens import os from path import path # https://stackoverflow.com/questions/2890146/how-to-force-pyyaml-to-load-strings-as-unicode-objects from yaml import Loader, SafeLoader def construct_yaml_str(self, node): """ Override the default string handling function to always return unicode objects """ return self.construct_scalar(node) Loader.add_constructor(u'tag:yaml.org,2002:str', construct_yaml_str) SafeLoader.add_constructor(u'tag:yaml.org,2002:str', construct_yaml_str) # SERVICE_VARIANT specifies name of the variant used, which decides what YAML # configuration files are read during startup. SERVICE_VARIANT = os.environ.get('SERVICE_VARIANT', None) # CONFIG_ROOT specifies the directory where the YAML configuration # files are expected to be found. If not specified, use the project # directory. CONFIG_ROOT = path(os.environ.get('CONFIG_ROOT', ENV_ROOT)) # CONFIG_PREFIX specifies the prefix of the YAML configuration files, # based on the service variant. If no variant is use, don't use a # prefix. CONFIG_PREFIX = SERVICE_VARIANT + "." if SERVICE_VARIANT else ""