def yaml(self): """Return standard out as YAML.""" if self._stdout_bytes: if not self._yaml: self._yaml = yaml.load(self.output) return self._yaml else: return None
def loads(s: str, *args, **kwargs) -> Workflow: """ Deserialize ``s`` (a ``str``, ``bytes`` or ``bytearray`` instance containing a Workflow document) to a :py:class:`~Pegasus.api.workflow.Workflow` object. :param s: string to load from :type s: str :return: deserialized Workflow object :rtype: Workflow """ return _to_wf(yaml.load(s))
def load(fp: TextIO, *args, **kwargs) -> Workflow: """ Deserialize ``fp`` (a ``.read()``-supporting file-like object containing a Workflow document) to a :py:class:`~Pegasus.api.workflow.Workflow` object. :param fp: file like object to load from :type fp: TextIO :return: deserialized Workflow object :rtype: Workflow """ return _to_wf(yaml.load(fp))
def loads(s: str, *args, **kwargs) -> TransformationCatalog: """ Deserialize ``s`` (a ``str``, ``bytes`` or ``bytearray`` instance containing a TransformationCatalog document) to a :py:class:`~Pegasus.api.transformation_catalog.TransformationCatalog` object. :param s: string to load from :type s: str :return: deserialized TransformationCatalog object :rtype: TransformationCatalog """ return _to_tc(yaml.load(s))
def load(fp: TextIO, *args, **kwargs) -> TransformationCatalog: """ Deserialize ``fp`` (a ``.read()``-supporting file-like object containing a TransformationCatalog document) to a :py:class:`~Pegasus.api.transformation_catalog.TransformationCatalog` object. :param fp: file like object to load from :type fp: TextIO :return: deserialized TransformationCatalog object :rtype: TransformationCatalog """ return _to_tc(yaml.load(fp))
def loads(s: str, *args, **kwargs) -> SiteCatalog: """ Deserialize ``s`` (a ``str``, ``bytes`` or ``bytearray`` instance containing a SiteCatalog document) to a :py:class:`~Pegasus.api.site_catalog.SiteCatalog` object. :param s: string to load from :type s: str :return: deserialized SiteCatalog object :rtype: SiteCatalog """ return _to_sc(yaml.load(s))
def load(fp: TextIO, *args, **kwargs) -> SiteCatalog: """ Deserialize ``fp`` (a ``.read()``-supporting file-like object containing a SiteCatalog document) to a :py:class:`~Pegasus.api.site_catalog.SiteCatalog` object. :param fp: file like object to load from :type fp: TextIO :return: deserialized SiteCatalog object :rtype: SiteCatalog """ return _to_sc(yaml.load(fp))
def load_wf_inputs(input_spec_file_path: str) -> dict: try: with open(input_spec_file_path) as f: wf_inputs = yaml.load(f) log.info( "Loaded workflow inputs file: {}".format(input_spec_file_path)) except FileNotFoundError: log.exception("Unable to find {}".format(input_spec_file_path)) sys.exit(1) return wf_inputs
def read_workflows(wf_gallery, site): data = yaml.load(open(wf_gallery)) workflows_available = [ x for x in data if "training" in x and x["training"] is True and site.name in x["execution_sites"] ] workflows_available_tmp = sorted(workflows_available, key=lambda x: (x["organization"], x["repo_name"])) workflows_available = {} for i in range(len(workflows_available_tmp)): workflows_available[i + 1] = workflows_available_tmp[i] return workflows_available
def loads(s: str, *args, **kwargs) -> Braindump: """ Deserialize ``s`` (a ``str``, ``bytes`` or ``bytearray`` instance containing a Braindump document) to a Python object. [extended_summary] :param s: [description] :type s: str :return: [description] :rtype: Dict """ _dict = yaml.load(s, *args, **kwargs) if not isinstance(_dict, dict): raise ValueError("Invalid braindump file.") return Braindump(**_dict)
def load_tr_specs(tr_specs_file_path: str) -> dict: log.info("Validating {}".format(tr_specs_file_path)) schema = { "type": "object", "patternProperties": { ".+": { "type": "object", "properties": { "site": { "type": "string" }, "is_stageable": { "type": "boolean" }, }, "required": ["site", "is_stageable"], "additionalPropertes": False, } }, } try: with open(tr_specs_file_path) as f: specs = yaml.load(f) validate(instance=specs, schema=schema) except ValidationError: log.exception( "Invalid transformation spec file. File should be in the following format:\n" "\t\t\t<tr name1>:\n" "\t\t\t site: <site name>\n" "\t\t\t is_stageable: <boolean>\n" "\t\t\t<tr name2>:\n" "\t\t\t site: <site name>\n" "\t\t\t is_stageable: <boolean>\n" "\t\t\t...\n") sys.exit(1) except FileNotFoundError: log.exception("Unable to find transformation spec file: {}".format( tr_specs_file_path)) sys.exit(1) log.info("Successfully loaded {}".format(tr_specs_file_path)) return specs
def read_pegasushub_config(wf_dir, workflow): config = None config = yaml.load( open( os.path.join(os.getcwd(), wf_dir, workflow["repo_name"], ".pegasushub.yml"))) if config: if "scripts" in config: if (not "generator" in config["scripts"]) or ( config["scripts"]["generator"] == ""): config["scripts"]["generator"] = "workflow_generator.py" else: config["scripts"] = {"generator": "workflow_generator.py"} else: config = {"scripts": {"generator": "workflow_generator.py"}} return config
def parse_yamlfile(fname, include_files): """ Parse a DAG from a YAML workflow file. """ with open(fname) as f: wf = yaml.load(f) dag = DAG() for job in wf["jobs"]: # parse job j = Job() # compute job if job["type"] == "job": j.xform = job["name"] # subworkflow job else: j.xform = job["file"] j.id = j.label = job["id"] dag.nodes[j.id] = j # parse uses (files) if include_files: for use in job["uses"]: if use["lfn"] in dag.nodes: f = dag.nodes[use["lfn"]] else: f = File() f.id = f.label = use["lfn"] dag.nodes[f.id] = f link_type = use["type"] if link_type == "input": j.parents.append(f) f.children.append(j) elif link_type == "output": j.children.append(f) f.parents.append(j) elif link_type == "inout": print( "WARNING: inout file {} of {} creates a cycle.".format( f.id, j.id)) f.children.append(j) f.parents.append(j) j.parents.append(f) j.children.append(f) elif link_type == "none": pass else: raise Exception( "Unrecognized link value: {}".format(link_type)) for dep in wf["jobDependencies"]: for child in dep["children"]: dag.nodes[dep["id"]].children.append(dag.nodes[child]) dag.nodes[child].parents.append(dag.nodes[dep["id"]]) return dag