def _write(self, file, _format): """Internal function to dump to file in either yaml or json formats :param file: file object to write to :type file: file :param _format: file format that can be "yml", "yaml", or "json :type _ext: str :raises ValueError: _format must be one of "yml", "yaml" or "json" """ if _format.lower() not in Writable._FORMATS: raise ValueError( "invalid _ext: {_format}, extension must be one of {formats}". format(_format=_format, formats=Writable._FORMATS)) if _format == "yml" or _format == "yaml": # TODO: figure out how to get yaml.dump to recurse down into nested objects # yaml.dump(_CustomEncoder().default(self), file, sort_keys=False) yaml.dump( json.loads(json.dumps(self, cls=_CustomEncoder), object_pairs_hook=OrderedDict), file, allow_unicode=True, ) else: json.dump(self, file, cls=_CustomEncoder, indent=4, ensure_ascii=False)
def test__get_braindump(self): # create a fake temporary submit dir and braindump.yml file with TemporaryDirectory() as td: bd_path = Path(td) / "braindump.yml" with bd_path.open("w+") as bd_file: yaml.dump({ "user": "******", "submit_dir": "/submit_dir" }, bd_file) bd_file.seek(0) bd = Workflow._get_braindump(bd_path.parent) assert bd.user == "ryan" assert bd.submit_dir == Path("/submit_dir")
def dump(obj: Braindump, fp: TextIO, *args, **kwargs) -> None: """ Serialize ``obj`` as a Braindump formatted stream to ``fp`` (a ``.write()``-supporting file-like object). [extended_summary] :param obj: [description] :type obj: Dict :param fp: [description] :type fp: TextIO :return: [description] :rtype: NoReturn """ yaml.dump(attr.asdict(obj), fp, *args, **kwargs)
def test_loads_yaml(sc1): # dump sc1 to str, then load into new sc new_sc = loads(yaml.dump(json.loads(json.dumps(sc1, cls=_CustomEncoder)))) # assert that what was loaded is equal to the original result = json.loads(json.dumps(new_sc, cls=_CustomEncoder)) expected = json.loads(json.dumps(sc1, cls=_CustomEncoder)) assert result == expected
def test_loads_yaml(wf1): # dump wf1 to str, then load into new wf new_wf = loads(yaml.dump(json.loads(json.dumps(wf1, cls=_CustomEncoder)))) # assert that what was loaded is equal to the original result = json.loads(json.dumps(new_wf, cls=_CustomEncoder)) expected = json.loads(json.dumps(wf1, cls=_CustomEncoder)) assert sort_parts(result) == sort_parts(expected)
def _write(self, file, _format): """Internal function to dump to file in either yaml or json formats :param file: file object to write to :type file: file :param _format: file format that can be "yml", "yaml", or "json :type _ext: str :raises ValueError: _format must be one of "yml", "yaml" or "json" """ if _format.lower() not in Writable._FORMATS: raise ValueError( "invalid _ext: {_format}, extension must be one of {formats}".format( _format=_format, formats=Writable._FORMATS ) ) # add file info self_as_dict = OrderedDict( [ ( "x-pegasus", { "createdBy": getpass.getuser(), "createdOn": datetime.now().strftime(r"%m-%d-%yT%H:%M:%SZ"), "apiLang": "python", }, ) ] ) self_as_dict.update( json.loads( json.dumps(self, cls=_CustomEncoder), object_pairs_hook=OrderedDict ) ) if _format == "yml" or _format == "yaml": # TODO: figure out how to get yaml.dump to recurse down into nested objects # yaml.dump(_CustomEncoder().default(self), file, sort_keys=False) yaml.dump(self_as_dict, file, allow_unicode=True) else: json.dump( self_as_dict, file, cls=_CustomEncoder, indent=4, ensure_ascii=False )