def dump_db_to_yaml(mps_config, filename): session = mps_config.session f = file(filename, 'w') yaml.add_multi_representer(models.Base, model_representer) model_classes = [models.Crate, models.LinkNode, models.ApplicationType, models.ApplicationCard, models.DigitalChannel, models.AnalogChannel, models.DeviceType, models.DeviceState, models.DigitalDevice, models.DeviceInput, models.Fault, models.FaultInput, models.FaultState, models.AnalogDevice, models.MitigationDevice, models.BeamDestination, models.BeamClass, models.AllowedClass, models.Condition, models.IgnoreCondition, models.ConditionInput] for model_class in model_classes: collection = session.query(model_class).order_by(model_class.id).all() yaml.dump({model_class.__name__: collection}, f, explicit_start=True) f.close()
def configure(): """ Configures YAML parser for Step serialization and deserialization Called in drain/__init__.py """ yaml.add_multi_representer(Step, step_multi_representer) yaml.add_multi_constructor('!step', step_multi_constructor)
def configure(): """ Configures YAML parser for Step serialization and deserialization Called in drain/__init__.py """ yaml.add_multi_representer(Step, step_multi_representer) yaml.add_multi_constructor('!step', step_multi_constructor) yaml.Dumper.ignore_aliases = lambda *args: True
def yaml(self, human=True): import yaml if human: yaml.add_multi_representer( list, yaml.representer.SafeRepresenter.represent_list) yaml.add_multi_representer( tuple, yaml.representer.SafeRepresenter.represent_list) return yaml.dump(self)
def serializable(cls): """Register representer method of decorated class with YAML.""" if hasattr(cls, '_representer'): yaml.add_representer(cls, cls._representer) elif hasattr(cls, '_multi_representer'): yaml.add_multi_representer(cls, cls._multi_representer) else: raise RuntimeError return cls
def __init__(cls, name, bases, clsdict, *, adapts=None, ignore_aliases=False, **kwargs): super(ObjectMeta, cls).__init__(name, bases, clsdict, adapts=adapts, **kwargs) if hasattr(cls, '__sx_getstate__'): representer = cls.represent_wrapper adaptee = cls.get_adaptee() if adaptee is not None: yaml.add_multi_representer(adaptee, representer, Dumper=dumper.Dumper) else: yaml.add_multi_representer(cls, representer, Dumper=dumper.Dumper)
def dump(node): """ Dump initialized object structure to yaml """ from qubell.api.private.platform import Auth, QubellPlatform from qubell.api.private.organization import Organization from qubell.api.private.application import Application from qubell.api.private.instance import Instance from qubell.api.private.revision import Revision from qubell.api.private.environment import Environment from qubell.api.private.zone import Zone from qubell.api.private.manifest import Manifest # Exclude keys from dump # Format: { 'ClassName': ['fields', 'to', 'exclude']} exclusion_list = { Auth: ['cookies'], QubellPlatform: [ 'auth', ], Organization: ['auth', 'organizationId', 'zone'], Application: ['auth', 'applicationId', 'organization'], Instance: ['auth', 'instanceId', 'application'], Manifest: ['name', 'content'], Revision: ['auth', 'revisionId'], Environment: ['auth', 'environmentId', 'organization'], Zone: ['auth', 'zoneId', 'organization'], } def obj_presenter(dumper, obj): for x in exclusion_list.keys(): if isinstance(obj, x): # Find class fields = obj.__dict__.copy() for excl_item in exclusion_list[x]: try: fields.pop(excl_item) except: log.warn('No item %s in object %s' % (excl_item, x)) return dumper.represent_mapping('tag:yaml.org,2002:map', fields) return dumper.represent_mapping('tag:yaml.org,2002:map', obj.__dict__) noalias_dumper = yaml.dumper.Dumper noalias_dumper.ignore_aliases = lambda self, data: True yaml.add_representer( unicode, lambda dumper, value: dumper.represent_scalar( u'tag:yaml.org,2002:str', value)) yaml.add_multi_representer(object, obj_presenter) serialized = yaml.dump(node, default_flow_style=False, Dumper=noalias_dumper) return serialized
def initialize(): """Add constructors to yaml parser. """ from eemeter.meter.base import MeterBase from eemeter.models.temperature_sensitivity import Model yaml.add_multi_constructor('!obj:', multi_constructor_obj) yaml.add_constructor('!setting', constructor_setting) yaml.add_multi_representer(MeterBase, multi_representer_obj) yaml.add_multi_representer(Model, multi_representer_obj) is_initialized = True
def add_custom_yaml_representer(cls, data_type, representer_fn): """ Add custom representer to regression YAML dumper. It is polymorphic, so it works also for subclasses of `data_type`. :param type data_type: Type of objects. :param callable representer_fn: Function that receives ``(dumper, data)`` type as argument and must must return a YAML-convertible representation. """ # Use multi-representer instead of simple-representer because it supports polymorphism. yaml.add_multi_representer(data_type, multi_representer=representer_fn, Dumper=cls)
def dump(node): """ Dump initialized object structure to yaml """ from qubell.api.private.platform import Auth, QubellPlatform from qubell.api.private.organization import Organization from qubell.api.private.application import Application from qubell.api.private.instance import Instance from qubell.api.private.revision import Revision from qubell.api.private.provider import Provider from qubell.api.private.environment import Environment from qubell.api.private.zone import Zone from qubell.api.private.manifest import Manifest # Exclude keys from dump # Format: { 'ClassName': ['fields', 'to', 'exclude']} exclusion_list = { Auth: ["cookies"], QubellPlatform: ["auth"], Organization: ["auth", "organizationId", "zone"], Application: ["auth", "applicationId", "organization"], Instance: ["auth", "instanceId", "application"], Manifest: ["name", "content"], Revision: ["auth", "revisionId"], Provider: ["auth", "providerId", "organization"], Environment: ["auth", "environmentId", "organization"], Zone: ["auth", "zoneId", "organization"], } def obj_presenter(dumper, obj): for x in exclusion_list.keys(): if isinstance(obj, x): # Find class fields = obj.__dict__.copy() for excl_item in exclusion_list[x]: try: fields.pop(excl_item) except: log.warn("No item %s in object %s" % (excl_item, x)) return dumper.represent_mapping("tag:yaml.org,2002:map", fields) return dumper.represent_mapping("tag:yaml.org,2002:map", obj.__dict__) noalias_dumper = yaml.dumper.Dumper noalias_dumper.ignore_aliases = lambda self, data: True yaml.add_representer(unicode, lambda dumper, value: dumper.represent_scalar(u"tag:yaml.org,2002:str", value)) yaml.add_multi_representer(object, obj_presenter) serialized = yaml.dump(node, default_flow_style=False, Dumper=noalias_dumper) return serialized
def register_yaml_representers(): """Configure YAML output from PyYAML.""" # TODO better to add these to a custom Dumper than pollute the global object? # Add marshalling/representers for custom types yaml.add_representer(str, represent_string) yaml.add_multi_representer(CustomYamlObject, CustomYamlObject.represent_object) # Don't silently fail if we try to export something weird def unknown_type(dumper, data): raise TypeError( "{} object cannot be dumped to YAML because it does not " "extend CustomYamlObject".format(data.__class__.__name__)) yaml.add_multi_representer(object, unknown_type)
def write_yml(self, model): ################### # YAML config file ################### def keras_model_representer(dumper, keras_model): model_path = model.config.get_output_dir() + '/keras_model.h5' keras_model.save(model_path) return dumper.represent_scalar(u'!keras_model', model_path) try: from tensorflow.keras import Model as KerasModel yaml.add_multi_representer(KerasModel, keras_model_representer) except: pass with open(model.config.get_output_dir() + '/' + config_filename, 'w') as file: yaml.dump(model.config.config, file)
def init_yaml_handling(): """ Allow load/dump of our custom classes in YAML. """ def simpleobject_representer(dumper, data): """ Output the properties of a SimpleObject implementation as a map. We deliberately output in constructor-arg order for human readability. eg. the document id should be at the top of the document. :type dumper: yaml.representer.BaseRepresenter :type data: ptype.SimpleObject :rtype: yaml.nodes.Node """ k_v = [(_clean_identifier(k), v) for k, v in data.items_ordered()] return dumper.represent_mapping(u'tag:yaml.org,2002:map', k_v) def ordereddict_representer(dumper, data): """ Output an OrderedDict as a dict. The order is purely for readability of the document. :type dumper: yaml.representer.BaseRepresenter :type data: collections.OrderedDict :rtype: yaml.nodes.Node """ return dumper.represent_mapping(u'tag:yaml.org,2002:map', data.items()) def uuid_representer(dumper, data): """ :type dumper: yaml.representer.BaseRepresenter :type data: uuid.UUID :rtype: yaml.nodes.Node """ return dumper.represent_scalar(u'tag:yaml.org,2002:str', '%s' % data) def unicode_representer(dumper, data): """ It's strange that PyYaml doesn't use unicode internally. We're doing everything in UTF-8 so we translate. :type dumper: yaml.representer.BaseRepresenter :type data: unicode :rtype: yaml.nodes.Node """ return dumper.represent_scalar(u'tag:yaml.org,2002:str', data.encode('utf-8')) yaml.add_multi_representer(ptype.SimpleObject, simpleobject_representer) yaml.add_multi_representer(uuid.UUID, uuid_representer) # TODO: This proabbly shouldn't be performed globally as it changes the output behaviour for a built-in type. # (although the default behaviour doesn't seem very widely useful: it outputs as a list.) yaml.add_multi_representer(collections.OrderedDict, ordereddict_representer) if compat.PY2: # 'unicode' is undefined in python 3 # pylint: disable=undefined-variable yaml.add_representer(unicode, unicode_representer)
def register_yaml(cls): yaml.add_multi_representer(cls, cls.to_yaml) yaml.add_multi_constructor(cls.yaml_tag_prefix, cls.from_yaml)
Raise ValueError on validation error. Raise OSError on read error. """ # raise OSError, ValueError with path.open("rb") as f: jsonish = yaml.safe_load(f) # raise ValueError return load_spec(jsonish) if __name__ == "__main__": import sys filename = sys.argv[1] path = Path(filename) spec = load_spec_file(path) def repr_ParamField(dumper, data): return dumper.represent_mapping(type(data).__name__, asdict(data)) def repr_NamedTuple(dumper, data): return dumper.represent_mapping(type(data).__name__, data._asdict()) yaml.add_multi_representer(ParamField, repr_ParamField) yaml.add_multi_representer(tuple, repr_NamedTuple) print(yaml.dump(spec))
self.domains: typing.Dict[str, Domain] = DefaultDict(Domain) @property def hosts(self) -> typing.Iterator[typing.Tuple[str, str, str]]: for domain in self.domains: for host in self.domains[domain].hosts: yield domain, host @property def plugins(self) -> typing.Iterator[typing.Tuple[str, str, str]]: for domain in self.domains: for host in self.domains[domain].hosts: for plugin in self.domains[domain].hosts[host].plugins: yield domain, host, plugin @property def fields(self) -> typing.Iterator[typing.Tuple[str, str, str, str]]: for domain in self.domains: for host in self.domains[domain].hosts: for plugin in self.domains[domain].hosts[host].plugins: for field in self.domains[domain].hosts[host].plugins[ plugin].fields: yield domain, host, plugin, field def represent_node(dumper: Dumper, data: DefaultDict): return dumper.represent_dict(data) yaml.add_multi_representer(DefaultDict, represent_node)
@staticmethod def parse(path): with open(path, 'r') as f: return Config(yaml.safe_load(f)) @staticmethod def yaml_representer(dumper, value): return dumper.represent_data( value._items(allow_internal=False, allow_fallback=False)) def dump(self, path): with open(path, 'w+') as f: yaml.dump(self, f) yaml.add_multi_representer(Config, Config.yaml_representer) class DefaultConfig(Config): def __init__(self, default=None): super().__init__() self.__dict__['_default'] = default def set(self, key, value): if key not in self.keys(allow_fallback=False): value = self._default(value) return super().set(key, value) else: return super().set(key, value)
del params["name"] del params["exec"] super(DictBlock, self).__init__(name, exe, **params) class Pipeline(object): def __init__(self, settings=None): if settings is None: settings = {} self.pipeline = {} self.settings = settings def __repr__(self) -> str: out = { "version": VERSION, "pipeline": self.pipeline, "settings": self.settings } return yaml.safe_dump(out, default_flow_style=False) def load_from_file(self, fn: str): for k, v in yaml.load(open(fn, "r")): self.pipeline[k] = DictBlock(v) def add(self, block: Block): self.pipeline[block.name] = block yaml.add_multi_representer(Block, yaml.dumper.Representer.represent_dict) yaml.SafeDumper.add_multi_representer(Block, yaml.SafeDumper.represent_dict)
if items_class is not None: value = items_class(value) self[index] = value return value class SchemaAwareString(str, SchemaAware): """ Schema aware dictionary. """ pass try: # If YAML is installed (see extras_require), ensure that base classes are # registered as multi representers; otherwise, dumping the object model # to YAML won't work properly as primitives. # # Note that loading from YAML is not recommended because numeric dictionary # keys (e.g. for HTTP status codes) are not forced to be strings in YAML # and pattern matching in `jsonschema` assumes does not work on numbers. from yaml import add_multi_representer from yaml.representer import SafeRepresenter add_multi_representer(SchemaAwareDict, SafeRepresenter.represent_dict) add_multi_representer(SchemaAwareList, SafeRepresenter.represent_list) add_multi_representer(SchemaAwareString, SafeRepresenter.represent_str) except ImportError: pass
import yaml import logging from bs4 import PageElement from .base import OutputBase logger = logging.getLogger(__name__) def PageElement_representer(dumper, data): return dumper.represent_data(None) yaml.add_multi_representer(PageElement, PageElement_representer) class Yaml(OutputBase): def __init__(self, yaml_path): logger.info("Output set to {}".format(yaml_path)) self.yaml_path = yaml_path self.all_objects = [] def output_tweets(self, tweets): self.all_objects += tweets def output_users(self, users): self.all_objects += users def stop(self): with open(self.yaml_path, 'w') as f: yaml.dump([u for u in self.all_objects],
name=self.plugin.name, variant=self.variant_name, variant_labels=self.plugin.variant_labels, ) class YAMLEnum(str, Enum): def __str__(self): return self.value @staticmethod def yaml_representer(dumper, obj): return dumper.represent_scalar("tag:yaml.org,2002:str", str(obj)) yaml.add_multi_representer(YAMLEnum, YAMLEnum.yaml_representer) class PluginType(YAMLEnum): EXTRACTORS = "extractors" LOADERS = "loaders" TRANSFORMS = "transforms" MODELS = "models" DASHBOARDS = "dashboards" ORCHESTRATORS = "orchestrators" TRANSFORMERS = "transformers" FILES = "files" def __str__(self): return self.value
def dump_anydict_as_map_inheriting( anydict): yaml.add_multi_representer( anydict, _represent_dictorder)
# TODO handle core Resource attributes: CreationPolicy, DeletionPOlicy, DependsON, Name, Metadata, Properties, UpdatePolicy def get_logical_name(self): # TODO figure out based on the object type (eg. Resource, Parameter) and subtype (eg. RoleName vs. Name), where an answer is possible raise NotImplementedError() class Function(object): """Base class to represent an AWS Cloud Formation function.""" def as_yaml_node(self, dumper): """Convert this instance to a PyYAML node.""" raise NotImplementedError( "as_yaml_node() not implemented in abstract class") yaml.add_multi_representer(Function, lambda dumper, data: data.as_yaml_node(dumper)) class Resource(AWSObject): """Base class to represent a single resource in AWS Cloud Formation.""" AWS_RESOURCE_TYPE = None def __init__(self, *args, **kwargs): properties = kwargs.get("Properties", None) if properties is not None: del kwargs["Properties"] AWSObject.__init__(self, *args, **kwargs) for key, value in properties.items():
def multi_constructor(loader, tag_suffix, node): tagname = str(tag_suffix) tagname = re_compatibility.sub('pf.', tagname) cls = g_tagname_to_class[tagname] kwargs = dict(loader.construct_mapping(node, deep=True).iteritems()) o = cls(**kwargs) o.validate(regularize=True, depth=1) return o def dict_noflow_representer(dumper, data): return dumper.represent_mapping('tag:yaml.org,2002:map', data, flow_style=False) yaml.add_multi_representer(Object, multi_representer, Dumper=SafeDumper) yaml.add_multi_constructor('!', multi_constructor, Loader=SafeLoader) yaml.add_representer(dict, dict_noflow_representer, Dumper=SafeDumper) class Constructor(object): def __init__(self, add_namespace_maps=False, strict=False): self.stack = [] self.queue = [] self.namespaces = {} self.namespaces_rev = {} self.add_namespace_maps = add_namespace_maps self.strict = strict def start_element(self, name, attrs): name = name.split()[-1] if self.stack and self.stack[-1][1] is not None:
def var_entity_presenter(dumper, data): def representer(val): return yaml.dump(val, indent=data.indent, allow_unicode=True, default_flow_style=False, Dumper=StringDumper) data.renderer = representer if hasattr(dumper, 'represent_unicode'): return dumper.represent_unicode(data) else: return dumper.represent_str(data) yaml.add_multi_representer(VarEntity, var_entity_presenter, Dumper=BlockDumper) yaml.add_multi_representer(VarEntity, var_entity_presenter, Dumper=SafeDumper) def yaml_safe_dump(*args, **kwargs): stream = FakeStringIO() kwargs['stream'] = stream kwargs['default_flow_style'] = False kwargs['allow_unicode'] = True kwargs['Dumper'] = SafeDumper yaml.dump(*args, **kwargs) return stream.get_value() def yaml_dump(*args, **kwargs): stream = FakeStringIO()
def root_representer(dumper: yaml.Dumper, data: YAMLRoot): """ YAML callback -- used to filter out empty values (None, {}, [] and false) @param dumper: data dumper @param data: data to be dumped @return: """ rval = dict() for k, v in data.__dict__.items(): if not k.startswith('_') and v is not None and (not isinstance(v, (dict, list)) or v): rval[k] = v return dumper.represent_data(rval) yaml.add_multi_representer(YAMLRoot, root_representer) def as_yaml(schema: YAMLRoot) -> str: """ Return schema in a YAML representation :param schema: YAML object :return: Stringified representation """ # TODO: figure out how do to a safe dump; # def default_representer(_, data) -> str: # return ScalarNode(None, str(data)) # SafeDumper.add_representer(None, default_representer) return yaml.dump(schema)
orig_doc = "" if hasattr(func, "__doc__") and func.__doc__ is not None: orig_doc = func.__doc__ func.__doc__ = """.. admonition:: Variable Helper Can be used inside configuration files. """ + orig_doc func.__is_variable_helper = True return func try: import yaml def ycd_representer(dumper, data): return dumper.represent_mapping('!' + data.__class__.__name__, data.items()) yaml.add_multi_representer(YamlConfigDocument, ycd_representer) except ImportError: pass # Public classes and functions __all__ = [ 'YamlConfigDocument', 'DocReference', 'variable_helper', 'load_multiple_yml', 'ConfigcrunchError', 'ReferencedDocumentNotFound', 'CircularDependencyError', 'VariableProcessingError', 'InvalidDocumentError', 'InvalidHeaderError', 'InvalidRemoveError' ]
else: warnings.warn(UserWarning('Too many dots in name {!r}.'.format(k))) def __getstate__(self): return self._values_, self._chain_ def __setstate__(self, value): v, c = value self._values_ = v self._chain_ = c if c is not None: c.value_changed += self.__on_chained_value_change ConfigGroup = Settings # deprecated alias import pathlib import pprint Conversions.register(pathlib.Path, pathlib.Path) Conversions.register(tuple, tuple) def _path_rep(dumper, data): return dumper.represent_scalar('!path', str(data)) def _path_cons(loader, node): return pathlib.Path(loader.construct_scalar(node)) yaml.add_multi_representer(pathlib.Path, _path_rep) yaml.add_constructor('!path', _path_cons)
"options": condition.options.to_dict() }) def condition_constructor(loader, tag_suffix, data): mapping = loader.construct_mapping(data, deep=True) condition_class = conditions.get_conditions()[tag_suffix] return condition_class(loader._ai_state, mapping['options'], mapping['name']) yaml.add_representer(tasks.Task, task_representer) yaml.add_constructor("!task", task_constructor) yaml.add_representer(tasks.TaskScript, script_representer) yaml.add_multi_constructor("!script/", script_constructor) yaml.add_multi_representer(conditions.Condition, condition_representer) yaml.add_multi_constructor("!condition/", condition_constructor) dump = yaml.dump def load(stream, ai_state=None): loader = yaml.Loader(stream) loader._ai_state = ai_state try: return loader.get_single_data() finally: loader.dispose() def load_state(stream, tasks):
# if not node.value: # return node.tag # return f"{node.tag} {node.value}" def yaml_representer(dumper, data): return data # Empty string means all custom tags are handled by yaml_constructor() # the constructor function handles what tags are specific to this # script and which aren't. For example, !Cloudformation must be handled # by this script, and non-local tags like !Sub, or !Ref must be passed # kept as is. yaml.add_multi_constructor("", yaml_constructor) yaml.add_multi_representer(yaml.nodes.Node, yaml_representer) def get_aws_stack_output(stack, output): if not STACK_CACHE.get(stack): STACK_CACHE[stack] = AWSSession().resource.Stack(stack) for stack_output in STACK_CACHE[stack].outputs: if stack_output["OutputKey"] == output: return stack_output["OutputValue"] def get_azure_stack_output(resource_group, deployment, output, subscription=None):
def configure(): yaml.add_multi_representer(Step, step_multi_representer) yaml.add_multi_constructor('!step', step_multi_constructor)
def _setup_yaml(): """Have custom dict types produce standard format YAML output for dicts""" yaml.add_multi_representer(OrderedDict, _dict_representer) yaml.add_multi_representer(Bunch, _dict_representer)
if executor.options: return dumper.represent_mapping( 'tag:yaml.org,2002:map', { 'name': executor.full_name, 'options': executor.options, }, ) else: return dumper.represent_scalar( 'tag:yaml.org,2002:str', executor.full_name, ) yaml.add_multi_representer(BaseExecutor, executor_representer) class ShellExecutor(BaseExecutor): """ An executor that execute commands through the system shell. """ def execute_one(self, environment, command, output): # Python 2 subprocess doesn't deal well with unicode commands. command = (command.encode(getpreferredencoding()) if PY2 else command) process = subprocess.Popen( command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True,
return dumper.represent_scalar('tag:yaml.org,2002:null', '') class On: """`on` == `true`, so enable forcing it back to `on`""" @staticmethod def presenter(dumper, data): return dumper.represent_scalar('tag:yaml.org,2002:bool', 'on') def exp_presenter(dumper, data): return dumper.represent_scalar('tag:yaml.org,2002:str', '${{ ' + str(data) + ' }}') yaml.add_multi_representer(exp.Expression, exp_presenter) yaml.add_representer(str, str_presenter) yaml.add_representer(None.__class__, none_presenter) yaml.add_representer(On, On.presenter) def expand_workflow(current_workflow, to_path): src_path = os.path.relpath('/' + str(current_workflow.path), start='/' + str(os.path.dirname(to_path))) if isinstance(current_workflow, LocalFilePath): dst_path = current_workflow.repo_root / to_path else: dst_path = to_path workflow_filepath = get_filepath(current_workflow, './' + str(current_workflow.path))
# bool values are valid and should be forwarded if v is not False: continue # empty canonicals should be skipped if isinstance(v, Canonical) and not dict(v): continue yield (k, v) def __len__(self): return len(self.__dict__) def __contains__(self, obj): return obj in self.__dict__ def update(self, other): other = Canonical.as_canonical(other) for k, v in other.items(): setattr(self, k, v) @classmethod def yaml(cls, dumper, obj): return dumper.represent_mapping( "tag:yaml.org,2002:map", Canonical.as_canonical(obj), flow_style=False ) yaml.add_multi_representer(Canonical, Canonical.yaml)
for a in t.__args__: a_errors = check(a, v, prefix) if not a_errors: return [] errors = errors + a_errors return errors if t_orig is collections.abc.Callable: if not callable(v): errors.append(f"{prefix}expected `{t}` received `{type(v)}`") return errors if not isinstance(v, t): errors.append(f"{prefix}expected `{t}` received `{type(v)}`") return errors return errors def Struct_representer(dumper, self): fields = self.__yaml__() return dumper.represent_mapping(f"!{self.__class__.__name__}", list(fields.items())) yaml.add_multi_representer(Struct, Struct_representer) def cached(f): return functools.lru_cache(maxsize=None, typed=True)(f) undefined = object()
def install(max_inline=2): smartr = YamlSmartSeqRepresenter(max_inline) yaml.add_representer(list, smartr) yaml.add_multi_representer(list, smartr)
value = data.isoformat() return dumper.represent_scalar("tag:yaml.org,2002:timestamp", value) def enum_representer(dumper, data): return dumper.represent_str(str(data.value)) yaml.add_representer(np.int64, int_representer, Dumper=yaml.SafeDumper) yaml.add_representer(np.integer, int_representer, Dumper=yaml.SafeDumper) yaml.add_representer(np.float64, float_representer, Dumper=yaml.SafeDumper) yaml.add_representer(np.floating, float_representer, Dumper=yaml.SafeDumper) yaml.add_representer(np.ndarray, numpy_representer_seq, Dumper=yaml.SafeDumper) yaml.add_representer(np.datetime64, date_representer, Dumper=yaml.SafeDumper) yaml.add_representer(Timestamp, date_representer, Dumper=yaml.SafeDumper) yaml.add_multi_representer(enum.Enum, enum_representer, Dumper=yaml.SafeDumper) def dict_to_yaml(struct): try: data = yaml.safe_dump(struct, default_flow_style=False, sort_keys=False) except RepresenterError as exc: raise ValueError( f"error: data result cannot be serialized to YAML, {exc}") return data # solve numpy json serialization class MyEncoder(json.JSONEncoder):
class SerialisationError(Exception): pass class MultilineString(str): """ A string that will be represented preserved in multi-line format in yaml. """ pass def literal_presenter(dumper, data): return dumper.represent_scalar('tag:yaml.org,2002:str', data, style='|') yaml.add_representer(MultilineString, literal_presenter) def as_string_representer(dumper, data): """ :type dumper: yaml.representer.BaseRepresenter :type data: uuid.UUID :rtype: yaml.nodes.Node """ return dumper.represent_scalar(u'tag:yaml.org,2002:str', '%s' % data) yaml.add_multi_representer(pathlib.Path, as_string_representer)