def test_workflow_key_ordering_on_yml_write(self): tc = TransformationCatalog() rc = ReplicaCatalog() sc = SiteCatalog() wf = Workflow("wf") wf.add_transformation_catalog(tc) wf.add_replica_catalog(rc) wf.add_site_catalog(sc) wf.add_jobs(Job("t1", _id="a")) wf.add_env(JAVA_HOME="/java/home") wf.add_shell_hook(EventType.START, "/bin/echo hi") wf.add_metadata(key="value") wf.write() EXPECTED_FILE = Path("workflow.yml") with EXPECTED_FILE.open() as f: # reading in as str so ordering of keys is not disrupted # when loaded into a dict result = f.read() EXPECTED_FILE.unlink() """ Check that wf keys have been ordered as follows (while ignoring nested keys): - pegasus, - name, - hooks, - profiles, - metadata, - siteCatalog, - replicaCatalog, - transformationCatalog, - jobs - jobDependencies """ p = re.compile( r"pegasus: '5.0'[\w\W]+name:[\w\W]+hooks:[\w\W]+profiles:[\w\W]+metadata:[\w\W]+siteCatalog:[\w\W]+replicaCatalog:[\w\W]+transformationCatalog:[\w\W]+jobs:[\w\W]+jobDependencies:[\w\W]+" ) assert p.match(result) is not None
def wf(): wf = Workflow("wf") j1 = (Job("t1", _id="a").add_outputs(File("f1"), File("f2")).add_args( File("do-nothing"), "-n", 1, 1.1).set_stdin("stdin").set_stdout("stdout").set_stderr("stderr")) j2 = (Job("t1", _id="b").add_inputs(File("f1"), File("f2")).add_checkpoint( File("checkpoint"))) j3 = SubWorkflow("subworkflow.dag", True, _id="c").add_args("--sites", "condorpool") j4 = SubWorkflow(File("subworkflow.dax"), False, _id="d") wf.add_jobs(j1, j2, j3, j4) wf._infer_dependencies() wf.add_env(JAVA_HOME="/java/home") wf.add_shell_hook(EventType.START, "/bin/echo hi") wf.add_metadata(key="value") return wf