def _to_wf(d: dict) -> Workflow: """Convert dict to Workflow :param d: Workflow represented as a dict :type d: dict :raises PegasusError: encountered error parsing :return: a Workflow object based on d :rtype: Workflow """ try: # wf = Workflow(d["name"], infer_dependencies=False) # add rc if "replicaCatalog" in d: wf.replica_catalog = _to_rc(d["replicaCatalog"]) # add tc if "transformationCatalog" in d: wf.transformation_catalog = _to_tc(d["transformationCatalog"]) # add sc if "siteCatalog" in d: wf.site_catalog = _to_sc(d["siteCatalog"]) # add jobs for j in d["jobs"]: # create appropriate job based on type if j["type"] == "job": job = Job( j["name"], _id=j["id"], node_label=j.get("nodeLabel"), namespace=j.get("namespace"), version=j.get("version"), ) elif j["type"] in {"pegasusWorkflow", "condorWorkflow"}: f = File(j["file"]) is_planned = False if j["type"] == "pegasusWorkflow" else True job = SubWorkflow( f, is_planned, _id=j["id"], node_label=j.get("nodeLabel") ) else: raise ValueError # add args args = list() for a in j["arguments"]: args.append(a) job.args = args # add uses uses = set() for u in j["uses"]: f = File(u["lfn"], size=u.get("size")) try: f.metadata = u["metadata"] except KeyError: pass uses.add( _Use( f, getattr(_LinkType, u["type"].upper()), stage_out=u.get("stageOut"), register_replica=u.get("registerReplica"), bypass_staging=u.get("bypass"), ) ) job.uses = uses # set stdin if "stdin" in j: for u in job.uses: if u.file.lfn == j["stdin"]: job.stdin = u.file break # set stdout if "stdout" in j: for u in job.uses: if u.file.lfn == j["stdout"]: job.stdout = u.file break # set stderr if "stderr" in j: for u in job.uses: if u.file.lfn == j["stderr"]: job.stderr = u.file break # add profiles if j.get("profiles"): job.profiles = defaultdict(dict, j.get("profiles")) # add metadata if j.get("metadata"): job.metadata = j.get("metadata") # add hooks if j.get("hooks"): job.hooks = defaultdict(list, j.get("hooks")) # add job to wf wf.add_jobs(job) # add dependencies if d.get("jobDependencies"): dependencies = defaultdict(_JobDependency) for item in d.get("jobDependencies"): dependencies[item["id"]] = _JobDependency( item["id"], {child for child in item["children"]} ) wf.dependencies = dependencies # add profiles if d.get("profiles"): wf.profiles = defaultdict(dict, d.get("profiles")) # add metadata if d.get("metadata"): wf.metadata = d.get("metadata") # add hooks if d.get("hooks"): wf.hooks = defaultdict(list, d.get("hooks")) return wf except (KeyError, ValueError): raise PegasusError("error parsing {}".format(d))
def test_eq(self): assert _Use(File("a"), _LinkType.INPUT) == _Use(File("a"), _LinkType.OUTPUT) assert _Use(File("a"), _LinkType.INPUT) != _Use( File("b"), _LinkType.INPUT)
def test_eq_invalid(self): with pytest.raises(ValueError) as e: _Use(File("a"), _LinkType.INPUT) == "use" assert "_Use cannot be compared with" in str(e)
def test_invalid_use_bad_link_type(self): with pytest.raises(TypeError) as e: _Use(File("a"), "link") assert "invalid link_type: link;" in str(e)
def test_invalid_use_bad_file(self): with pytest.raises(TypeError) as e: _Use(123, _LinkType.INPUT) assert "invalid file: 123; file must be of type File" in str(e)
def test_valid_use(self): assert _Use(File("a"), _LinkType.INPUT)
class Test_Use: def test_valid_use(self): assert _Use(File("a"), _LinkType.INPUT) def test_invalid_use_bad_file(self): with pytest.raises(TypeError) as e: _Use(123, _LinkType.INPUT) assert "invalid file: 123; file must be of type File" in str(e) def test_invalid_use_bad_link_type(self): with pytest.raises(TypeError) as e: _Use(File("a"), "link") assert "invalid link_type: link;" in str(e) def test_eq(self): assert _Use(File("a"), _LinkType.INPUT) == _Use(File("a"), _LinkType.OUTPUT) assert _Use(File("a"), _LinkType.INPUT) != _Use( File("b"), _LinkType.INPUT) def test_eq_invalid(self): with pytest.raises(ValueError) as e: _Use(File("a"), _LinkType.INPUT) == "use" assert "_Use cannot be compared with" in str(e) @pytest.mark.parametrize( "use, expected", [ ( _Use(File("a"), _LinkType.INPUT, stage_out=None, register_replica=False), { "lfn": "a", "type": "input", "registerReplica": False, }, ), ( _Use(File("a"), _LinkType.INPUT, stage_out=None, register_replica=None), { "lfn": "a", "type": "input", }, ), ( _Use( File("a", size=2048).add_metadata(createdBy="ryan"), _LinkType.OUTPUT, stage_out=False, register_replica=True, ), { "lfn": "a", "size": 2048, "metadata": { "createdBy": "ryan" }, "type": "output", "stageOut": False, "registerReplica": True, }, ), ( _Use( File("a", size=1024), _LinkType.CHECKPOINT, stage_out=True, register_replica=True, ), { "lfn": "a", "size": 1024, "type": "checkpoint", "stageOut": True, "registerReplica": True, }, ), ], ) def test_tojson(self, use, expected): result = json.loads(json.dumps(use, cls=_CustomEncoder)) assert result == expected
def test_add_checkpoint(self): job = AbstractJob() job.add_checkpoint(File("checkpoint")) assert _Use(File("checkpoint"), _LinkType.CHECKPOINT) in job.uses