def test_add_container(self): tc = TransformationCatalog() tc.add_containers( Container("container", Container.DOCKER, "image", ["mount"])) assert len(tc.containers) == 1 assert "container" in tc.containers
def test_write(self): tc = TransformationCatalog() (tc.add_transformations(Transformation("t1")).add_transformations( Transformation("t2"))) expected = { "pegasus": "5.0", "transformations": [ { "name": "t1", "sites": [] }, { "name": "t2", "sites": [] }, ], } expected["transformations"] = sorted(expected["transformations"], key=lambda t: t["name"]) with NamedTemporaryFile("r+") as f: tc.write(f, _format="json") f.seek(0) result = json.load(f) result["transformations"] = sorted(expected["transformations"], key=lambda t: t["name"]) assert result == expected
def test_write(self): tc = TransformationCatalog() ( tc.add_transformations(Transformation("t1")).add_transformations( Transformation("t2") ) ) expected = { "pegasus": "5.0", "transformations": [ {"name": "t1", "sites": []}, {"name": "t2", "sites": []}, ], } expected["transformations"] = sorted( expected["transformations"], key=lambda t: t["name"] ) with NamedTemporaryFile("r+") as f: tc.write(f, _format="json") f.seek(0) result = json.load(f) result["transformations"] = sorted( expected["transformations"], key=lambda t: t["name"] ) assert "createdOn" in result["x-pegasus"] assert result["x-pegasus"]["createdBy"] == getpass.getuser() assert result["x-pegasus"]["apiLang"] == "python" del result["x-pegasus"] assert result == expected
def test_example_transformation_catalog( self, convert_yaml_schemas_to_json, load_schema, _format, loader ): # validates the sample tc in pegasus/etc/sample-5.0-data/tc.yml tc = TransformationCatalog() foo = ( Transformation("foo") .add_globus_profile(max_time=2) .add_dagman_profile(retry=2) .add_metadata(size=2048) .add_sites( TransformationSite( "local", "/nfs/u2/ryan/bin/foo", True, arch=Arch.X86_64, os_type=OS.LINUX, ) .add_env(JAVA_HOME="/usr/bin/java") .add_metadata(size=2048) ) .add_requirement("bar") .add_shell_hook(EventType.START, "/bin/echo 'starting'") ) bar = Transformation("bar").add_sites( TransformationSite( "local", "/nfs/u2/ryan/bin/bar", True, arch=Arch.X86_64, os_type=OS.LINUX, ) ) centos_pegasus_container = Container( "centos-pegasus", Container.DOCKER, "docker:///ryan/centos-pegasus:latest", arguments="--shm-size 123", mounts=["/Volumes/Work/lfs1:/shared-data/:ro"], ).add_env(JAVA_HOME="/usr/bin/java") (tc.add_transformations(foo, bar).add_containers(centos_pegasus_container)) with NamedTemporaryFile(mode="r+") as f: tc.write(f, _format=_format) f.seek(0) tc_json = loader(f) tc_schema = load_schema("tc-5.0.json") validate(instance=tc_json, schema=tc_schema)
def test_add_multiple_transformations(self): tc = TransformationCatalog() t1 = Transformation("name") t2 = Transformation("name", namespace="namespace") t3 = Transformation("name", namespace="namespace", version="version") tc.add_transformations(t1, t2, t3) assert "None::name::None" in tc.transformations assert "namespace::name::None" in tc.transformations assert "namespace::name::version" in tc.transformations assert len(tc.transformations) == 3
def test_chaining(self): tc = TransformationCatalog() (tc.add_transformations(Transformation("t1")).add_transformations( Transformation("t2")).add_containers( Container("container1", Container.DOCKER, "image", ["mount1", "mount2"])).add_containers( Container("container2", Container.DOCKER, "image", ["mount1", "mount2"]))) assert "None::t1::None" in tc.transformations assert "None::t2::None" in tc.transformations assert "container1" in tc.containers assert "container2" in tc.containers
def dumps(obj: TransformationCatalog, _format="yml", *args, **kwargs) -> str: """ Serialize ``obj`` to a :py:class:`~Pegasus.api.transformation_catalog.TransformationCatalog` formatted ``str``. :param obj: TransformationCatalog to serialize :type obj: TransformationCatalog :param _format: format to write to if fp does not have an extension; can be one of ["yml" | "yaml" | "json"], defaults to "yml" :type _format: str :return: TransformationCatalog serialized as a string :rtype: str """ with StringIO() as s: obj.write(s, _format=_format) s.seek(0) return s.read()
def test_tojson_no_containers(self, convert_yaml_schemas_to_json, load_schema): tc = TransformationCatalog() (tc.add_transformations( Transformation("t1").add_sites( TransformationSite("local", "/pfn", False))).add_transformations( Transformation("t2").add_sites( TransformationSite( "local2", "/pfn", True)))) expected = { "pegasus": PEGASUS_VERSION, "transformations": [ { "name": "t1", "sites": [{ "name": "local", "pfn": "/pfn", "type": "installed" }], }, { "name": "t2", "sites": [{ "name": "local2", "pfn": "/pfn", "type": "stageable" }], }, ], } expected["transformations"] = sorted(expected["transformations"], key=lambda t: t["name"]) result = json.loads(json.dumps(tc, cls=_CustomEncoder)) result["transformations"] = sorted(result["transformations"], key=lambda t: t["name"]) tc_schema = load_schema("tc-5.0.json") validate(instance=result, schema=tc_schema) assert expected == result
def test_write_default(self): expected_file = Path("transformations.yml") TransformationCatalog().write() try: expected_file.unlink() except FileNotFoundError: pytest.fail("could not find {}".format(expected_file))
def dump(obj: TransformationCatalog, fp: TextIO, _format="yml", *args, **kwargs) -> None: """ Serialize ``obj`` as a :py:class:`~Pegasus.api.transformation_catalog.TransformationCatalog` formatted stream to ``fp`` (a ``.write()``-supporting file-like object). :param obj: TransformationCatalog to serialize :type obj: TransformationCatalog :param fp: file like object to serialize to :type fp: TextIO :param _format: format to write to if fp does not have an extension; can be one of ["yml" | "yaml" | "json"], defaults to "yml" :type _format: str :rtype: NoReturn """ obj.write(fp, _format=_format)
def wf3(): wf = Workflow("test") wf.add_jobs(Job("ls")) wf.add_site_catalog(SiteCatalog()) wf.add_transformation_catalog(TransformationCatalog()) wf.add_replica_catalog(ReplicaCatalog()) return wf
def test_add_transformation_catalog(self): tc = TransformationCatalog() wf = Workflow("wf") try: wf.add_transformation_catalog(tc) except: pytest.fail("should not have raised exception")
def test_add_duplicate_transformation_catalog(self): tc = TransformationCatalog() wf = Workflow("wf") wf.add_transformation_catalog(tc) with pytest.raises(DuplicateError) as e: wf.add_transformation_catalog(tc) assert "a TransformationCatalog has already" in str(e)
def test_add_duplicate_container(self): tc = TransformationCatalog() tc.add_containers(Container("container", Container.DOCKER, "image", ["mount"])) with pytest.raises(DuplicateError): tc.add_containers( Container("container", Container.DOCKER, "image", ["mount"]) )
def tc2(): return (TransformationCatalog().add_transformations( Transformation("t1", namespace="test", version="1.0").add_sites( TransformationSite( "local", "/pfn", True, ))).add_containers( Container( "cont", Container.DOCKER, "docker:///ryan/centos-pegasus:latest", mounts=["/Volumes/Work/lfs1:/shared-data/:ro"], image_site="local", )))
def test_transformation_catalog_ordering_on_yml_write(self): tc = TransformationCatalog() tc.add_transformations(Transformation("t1")) tc.add_containers(Container("c1", Container.DOCKER, "img")) tc.write() EXPECTED_FILE = Path("transformations.yml") with EXPECTED_FILE.open() as f: result = f.read() EXPECTED_FILE.unlink() """ Check that tc keys have been ordered as follows: - pegasus - transformations - containers """ p = re.compile( r"pegasus: '5.0'[\w\W]+transformations:[\w\W]+containers[\w\W]+") assert p.match(result) is not None
def test_workflow_key_ordering_on_yml_write(self): tc = TransformationCatalog() rc = ReplicaCatalog() sc = SiteCatalog() wf = Workflow("wf") wf.add_transformation_catalog(tc) wf.add_replica_catalog(rc) wf.add_site_catalog(sc) wf.add_jobs(Job("t1", _id="a")) wf.add_env(JAVA_HOME="/java/home") wf.add_shell_hook(EventType.START, "/bin/echo hi") wf.add_metadata(key="value") wf.write() EXPECTED_FILE = Path("workflow.yml") with EXPECTED_FILE.open() as f: # reading in as str so ordering of keys is not disrupted # when loaded into a dict result = f.read() EXPECTED_FILE.unlink() """ Check that wf keys have been ordered as follows (while ignoring nested keys): - pegasus, - name, - hooks, - profiles, - metadata, - siteCatalog, - replicaCatalog, - transformationCatalog, - jobs - jobDependencies """ p = re.compile( r"pegasus: '5.0'[\w\W]+name:[\w\W]+hooks:[\w\W]+profiles:[\w\W]+metadata:[\w\W]+siteCatalog:[\w\W]+replicaCatalog:[\w\W]+transformationCatalog:[\w\W]+jobs:[\w\W]+jobDependencies:[\w\W]+" ) assert p.match(result) is not None
def test_write_wf_catalogs_included(self): wf = Workflow("test") wf.add_jobs(Job("ls")) wf.add_transformation_catalog(TransformationCatalog()) wf.add_site_catalog(SiteCatalog()) wf.add_replica_catalog(ReplicaCatalog()) wf_path = Path("workflow.yml") with wf_path.open("w+") as f: wf.write(f) f.seek(0) result = yaml.load(f) expected = { "pegasus": "5.0", "name": "test", "siteCatalog": { "sites": [] }, "replicaCatalog": { "replicas": [] }, "transformationCatalog": { "transformations": [] }, "jobs": [{ "type": "job", "name": "ls", "id": "ID0000001", "arguments": [], "uses": [], }], "jobDependencies": [], } assert expected == result wf_path.unlink()
def tc1(): return (TransformationCatalog().add_transformations( Transformation("t1", namespace="test", version="1.0").add_sites( TransformationSite( "local", "/pfn", True, arch=Arch.X86_64, os_type=OS.LINUX, os_release="1", os_version="1", container="cont", ).add_dagman_profile(retry="3").add_metadata( JAVA_HOME="/usr/bin/java")).add_requirement( "t2", namespace="test", version="1.0").add_shell_hook( EventType.START, "echo hello")).add_containers( Container( "cont", Container.DOCKER, "docker:///ryan/centos-pegasus:latest", mounts=["/Volumes/Work/lfs1:/shared-data/:ro"], image_site="local", ).add_env(JAVA_HOME="/usr/bin/java")))
def test_add_single_transformation(self): tc = TransformationCatalog() tc.add_transformations(Transformation("test")) assert "None::test::None" in tc.transformations assert len(tc.transformations) == 1
class TestWorkflow: @pytest.mark.parametrize( "job", [ (Job("t1", _id="job")), (SubWorkflow(File("f1"), False, _id="job")), (SubWorkflow("f1", True, _id="job")), ], ) def test_add_job(self, job): wf = Workflow("wf") wf.add_jobs(job) assert job == wf.get_job("job") def test_add_duplicate_job(self): wf = Workflow("wf") with pytest.raises(DuplicateError): wf.add_jobs(Job("t1", _id="j1"), Job("t2", _id="j1")) def test_get_job(self): wf = Workflow("wf") j1 = Job("t1", _id="j1") wf.add_jobs(j1) assert j1 == wf.get_job("j1") def test_get_invalid_job(self): wf = Workflow("wf") with pytest.raises(NotFoundError): wf.get_job("abc123") def test_job_id_assignment_by_workflow(self): wf = Workflow("wf") j1 = Job("t1", _id="a") j2 = Job("t2") j3 = Job("t3", _id="b") j4 = Job("t4") j5 = Job("t5") wf.add_jobs(j1, j2, j3, j4, j5) assert j2._id == "ID0000001" assert j4._id == "ID0000002" assert j5._id == "ID0000003" def test_add_site_catalog(self): sc = SiteCatalog() wf = Workflow("wf") try: wf.add_site_catalog(sc) except: pytest.fail("should not have raised exception") def test_add_invalid_site_catalog(self): wf = Workflow("wf") with pytest.raises(TypeError) as e: wf.add_site_catalog(123) assert "invalid catalog: 123" in str(e) def test_add_duplicate_site_catalog(self): sc = SiteCatalog() wf = Workflow("wf") wf.add_site_catalog(sc) with pytest.raises(DuplicateError) as e: wf.add_site_catalog(sc) assert "a SiteCatalog has already" in str(e) def test_add_replica_catalog(self): rc = ReplicaCatalog() wf = Workflow("wf") try: wf.add_replica_catalog(rc) except: pytest.fail("should not have raised exception") def test_add_invalid_replica_catalog(self): wf = Workflow("wf") with pytest.raises(TypeError) as e: wf.add_replica_catalog(123) assert "invalid catalog: 123" in str(e) def test_add_duplicate_replica_catalog(self): rc = ReplicaCatalog() wf = Workflow("wf") wf.add_replica_catalog(rc) with pytest.raises(DuplicateError) as e: wf.add_replica_catalog(rc) assert "a ReplicaCatalog has already" in str(e) def test_add_transformation_catalog(self): tc = TransformationCatalog() wf = Workflow("wf") try: wf.add_transformation_catalog(tc) except: pytest.fail("should not have raised exception") def test_add_invalid_transformation_catalog(self): wf = Workflow("wf") with pytest.raises(TypeError) as e: wf.add_transformation_catalog(123) assert "invalid catalog: 123" in str(e) def test_add_duplicate_transformation_catalog(self): tc = TransformationCatalog() wf = Workflow("wf") wf.add_transformation_catalog(tc) with pytest.raises(DuplicateError) as e: wf.add_transformation_catalog(tc) assert "a TransformationCatalog has already" in str(e) def test_add_dependency_parents(self): wf = Workflow("wf") job = Job("t", _id="job") parents = [ Job("t", _id="parent1"), Job("t", _id="parent2"), Job("t", _id="parent3"), ] wf.add_jobs(job, *parents) wf.add_dependency(job, parents=[parents[0]]) wf.add_dependency(job, parents=parents[1:]) for parent in parents: assert wf.dependencies[parent._id] == _JobDependency( parent._id, {job._id}) def test_add_dependency_children(self): wf = Workflow("wf") job = Job("t", _id="job") children = [ Job("t", _id="child1"), Job("t", _id="child2"), Job("t", _id="child3"), ] wf.add_jobs(job, *children) wf.add_dependency(job, children=[children[0]]) assert wf.dependencies[job._id] == _JobDependency( job._id, {children[0]._id}) wf.add_dependency(job, children=children[1:]) assert wf.dependencies[job._id] == _JobDependency( job._id, {child._id for child in children}) def test_add_dependency_parents_and_children(self): wf = Workflow("wf") job = Job("t", _id="job") parents = [Job("t", _id="parent1"), Job("t", _id="parent2")] children = [Job("t", _id="child1"), Job("t", _id="child2")] wf.add_jobs(*parents, *children) # add nothing wf.add_dependency(job) assert len(wf.dependencies) == 0 wf.add_dependency(job, parents=parents, children=children) for parent in parents: assert wf.dependencies[parent._id] == _JobDependency( parent._id, {job._id}) assert wf.dependencies[job._id] == _JobDependency( job._id, {child._id for child in children}) def test_add_duplicate_parent_dependency(self): wf = Workflow("wf") job = Job("t", _id="job") parent = Job("t", _id="parent") wf.add_jobs(job, parent) with pytest.raises(DuplicateError) as e: wf.add_dependency(job, parents=[parent, parent]) assert ( "A dependency already exists between parent id: parent and job id: job" in str(e)) def test_add_duplicate_child_dependency(self): wf = Workflow("wf") job = Job("t", _id="job") child = Job("t", _id="child") wf.add_jobs(job, child) with pytest.raises(DuplicateError) as e: wf.add_dependency(job, children=[child, child]) assert ( "A dependency already exists between job id: job and child id: child" in str(e)) def test_add_dependency_invalid_job(self): wf = Workflow("wf") job = Job("t") with pytest.raises(ValueError) as e: wf.add_dependency(job) assert "The given job does not have an id" in str(e) def test_add_dependency_invalid_parent(self): wf = Workflow("wf") job = Job("t", _id="job") parent = Job("t") with pytest.raises(ValueError) as e: wf.add_dependency(job, parents=[parent]) assert "One of the given parents does not have an id" in str(e) def test_add_dependency_invalid_child(self): wf = Workflow("wf") job = Job("t", _id="job") child = Job("t") with pytest.raises(ValueError) as e: wf.add_dependency(job, children=[child]) assert "One of the given children does not have an id" in str(e) def test_infer_dependencies_fork_join_wf(self): wf = Workflow("wf") f1 = File("f1") f2 = File("f2") f3 = File("f3") f4 = File("f4") fork = Job("t1", _id="fork").add_outputs(f1, f2) work1 = Job("t1", _id="work1").add_inputs(f1).add_outputs(f3) work2 = Job("t1", _id="work2").add_inputs(f2).add_outputs(f4) join = Job("t1", _id="join").add_inputs(f3, f4) wf.add_jobs(fork, work1, work2, join) # manually call _infer_dependencies() as it is only called when # wf.write() is called wf._infer_dependencies() assert wf.dependencies["fork"] == _JobDependency( "fork", {"work1", "work2"}) assert wf.dependencies["work1"] == _JobDependency("work1", {"join"}) assert wf.dependencies["work2"] == _JobDependency("work2", {"join"}) def test_infer_dependencies_when_job_uses_stdin_stdout_and_stderr(self): wf = Workflow("wf") j1 = Job("t1", _id="j1").add_outputs(File("f1")) j2 = Job("t1", _id="j2").set_stdin(*j1.get_outputs()).set_stdout(File("f2")) j3 = Job("t1", _id="j3").add_inputs(*j2.get_outputs()) wf.add_jobs(j1, j2, j3) # manually call _infer_dependencies() as it is only called when # wf.write() is called wf._infer_dependencies() assert wf.dependencies["j1"] == _JobDependency("j1", {"j2"}) assert wf.dependencies["j2"] == _JobDependency("j2", {"j3"}) def test_tojson(self, convert_yaml_schemas_to_json, load_schema, wf, expected_json): result = json.loads(json.dumps(wf, cls=_CustomEncoder)) workflow_schema = load_schema("wf-5.0.json") validate(instance=result, schema=workflow_schema) result["jobs"] = sorted(result["jobs"], key=lambda j: j["id"]) result["jobs"][0]["uses"] = sorted(result["jobs"][0]["uses"], key=lambda u: u["lfn"]) result["jobs"][1]["uses"] = sorted(result["jobs"][1]["uses"], key=lambda u: u["lfn"]) assert result == expected_json @pytest.mark.parametrize("_format, loader", [("json", json.load), ("yml", yaml.safe_load)]) def test_write_file_obj( self, convert_yaml_schemas_to_json, load_schema, wf, expected_json, _format, loader, ): with NamedTemporaryFile("r+") as f: wf.write(f, _format=_format) # _path should be set by the call to write assert wf._path == f.name f.seek(0) result = loader(f) workflow_schema = load_schema("wf-5.0.json") validate(instance=result, schema=workflow_schema) result["jobs"] = sorted(result["jobs"], key=lambda j: j["id"]) result["jobs"][0]["uses"] = sorted(result["jobs"][0]["uses"], key=lambda u: u["lfn"]) result["jobs"][1]["uses"] = sorted(result["jobs"][1]["uses"], key=lambda u: u["lfn"]) assert result == expected_json def test_write_str_filename(self, wf, load_schema, expected_json): path = "wf.yml" wf.write(path) # _path should be set by the call to write assert wf._path == path with open(path) as f: result = yaml.safe_load(f) workflow_schema = load_schema("wf-5.0.json") validate(instance=result, schema=workflow_schema) result["jobs"] = sorted(result["jobs"], key=lambda j: j["id"]) result["jobs"][0]["uses"] = sorted(result["jobs"][0]["uses"], key=lambda u: u["lfn"]) result["jobs"][1]["uses"] = sorted(result["jobs"][1]["uses"], key=lambda u: u["lfn"]) assert result == expected_json os.remove(path) def test_write_default_filename(self, wf, expected_json): wf.write() EXPECTED_FILE = "workflow.yml" with open(EXPECTED_FILE) as f: result = yaml.safe_load(f) result["jobs"] = sorted(result["jobs"], key=lambda j: j["id"]) for i in range(len(result["jobs"])): result["jobs"][i]["uses"] = sorted(result["jobs"][i]["uses"], key=lambda u: u["lfn"]) assert result == expected_json os.remove(EXPECTED_FILE) def test_write_wf_catalogs_included(self): wf = Workflow("test") wf.add_jobs(Job("ls")) wf.add_transformation_catalog(TransformationCatalog()) wf.add_site_catalog(SiteCatalog()) wf.add_replica_catalog(ReplicaCatalog()) wf_path = Path("workflow.yml") with wf_path.open("w+") as f: wf.write(f) f.seek(0) result = yaml.load(f) expected = { "pegasus": "5.0", "name": "test", "siteCatalog": { "sites": [] }, "replicaCatalog": { "replicas": [] }, "transformationCatalog": { "transformations": [] }, "jobs": [{ "type": "job", "name": "ls", "id": "ID0000001", "arguments": [], "uses": [], }], "jobDependencies": [], } assert expected == result wf_path.unlink() def test_write_valid_hierarchical_workflow(self, mocker): mocker.patch("Pegasus.api.workflow.Workflow.write") try: wf = Workflow("test") wf.add_jobs(SubWorkflow("file", False)) wf.write(file="workflow.yml", _format="yml") except PegasusError: pytest.fail("shouldn't have thrown PegasusError") Pegasus.api.workflow.Workflow.write.assert_called_once_with( file="workflow.yml", _format="yml") @pytest.mark.parametrize( "sc, tc", [ (SiteCatalog(), None), (None, TransformationCatalog()), (SiteCatalog(), TransformationCatalog()), ], ) def test_write_hierarchical_workflow_when_catalogs_are_inlined( self, sc, tc): wf = Workflow("test") wf.add_jobs(SubWorkflow("file", False)) if sc: wf.add_site_catalog(sc) if tc: wf.add_transformation_catalog(tc) with pytest.raises(PegasusError) as e: wf.write() assert ( "Site Catalog and Transformation Catalog must be written as a separate" in str(e)) def test_workflow_key_ordering_on_yml_write(self): tc = TransformationCatalog() rc = ReplicaCatalog() sc = SiteCatalog() wf = Workflow("wf") wf.add_transformation_catalog(tc) wf.add_replica_catalog(rc) wf.add_site_catalog(sc) wf.add_jobs(Job("t1", _id="a")) wf.add_env(JAVA_HOME="/java/home") wf.add_shell_hook(EventType.START, "/bin/echo hi") wf.add_metadata(key="value") wf.write() EXPECTED_FILE = Path("workflow.yml") with EXPECTED_FILE.open() as f: # reading in as str so ordering of keys is not disrupted # when loaded into a dict result = f.read() EXPECTED_FILE.unlink() """ Check that wf keys have been ordered as follows (while ignoring nested keys): - pegasus, - name, - hooks, - profiles, - metadata, - siteCatalog, - replicaCatalog, - transformationCatalog, - jobs - jobDependencies """ p = re.compile( r"pegasus: '5.0'[\w\W]+name:[\w\W]+hooks:[\w\W]+profiles:[\w\W]+metadata:[\w\W]+siteCatalog:[\w\W]+replicaCatalog:[\w\W]+transformationCatalog:[\w\W]+jobs:[\w\W]+jobDependencies:[\w\W]+" ) assert p.match(result) is not None def test_plan_workflow_already_written(self, wf, mocker): mocker.patch("shutil.which", return_value="/usr/bin/pegasus-version") mocker.patch("Pegasus.client._client.Client.plan") path = "wf.yml" wf.write(path).plan() assert wf._path == path Pegasus.client._client.Client.plan.assert_called_once_with( path, cleanup="none", conf=None, dir=None, force=False, input_dirs=None, output_dir=None, output_sites=["local"], relative_dir=None, sites=None, staging_sites=None, submit=False, verbose=0, ) os.remove(path) def test_plan_workflow_not_written(self, wf, mocker): mocker.patch("shutil.which", return_value="/usr/bin/pegasus-version") mocker.patch("Pegasus.client._client.Client.plan") DEFAULT_WF_PATH = "workflow.yml" wf.plan() assert wf._path == DEFAULT_WF_PATH Pegasus.client._client.Client.plan.assert_called_once_with( DEFAULT_WF_PATH, cleanup="none", conf=None, dir=None, force=False, input_dirs=None, output_dir=None, output_sites=["local"], relative_dir=None, sites=None, staging_sites=None, submit=False, verbose=0, ) os.remove(DEFAULT_WF_PATH) def test_run(self, wf, mocker): mocker.patch("Pegasus.client._client.Client.run") mocker.patch("shutil.which", return_value="/usr/bin/pegasus-version") wf.run() Pegasus.client._client.Client.run.assert_called_once_with(None, verbose=0) def test_status(self, wf, mocker): mocker.patch("Pegasus.client._client.Client.status") mocker.patch("shutil.which", return_value="/usr/bin/pegasus-version") wf._submit_dir = "submit_dir" wf.status() Pegasus.client._client.Client.status.assert_called_once_with( wf._submit_dir, long=0, verbose=0) def test_remove(self, wf, mocker): mocker.patch("Pegasus.client._client.Client.remove") mocker.patch("shutil.which", return_value="/usr/bin/pegasus-version") wf._submit_dir = "submit_dir" wf.remove() Pegasus.client._client.Client.remove.assert_called_once_with( wf._submit_dir, verbose=0) def test_analyze(self, wf, mocker): mocker.patch("Pegasus.client._client.Client.analyzer") mocker.patch("shutil.which", return_value="/usr/bin/pegasus-version") wf._submit_dir = "submit_dir" wf.analyze() Pegasus.client._client.Client.analyzer.assert_called_once_with( wf._submit_dir, verbose=0) def test_statistics(self, wf, mocker): mocker.patch("Pegasus.client._client.Client.statistics") mocker.patch("shutil.which", return_value="/usr/bin/pegasus-version") wf._submit_dir = "submit_dir" wf.statistics() Pegasus.client._client.Client.statistics.assert_called_once_with( wf._submit_dir, verbose=0)
def test_tojson(self, convert_yaml_schemas_to_json, load_schema): tc = TransformationCatalog() ( tc.add_transformations( Transformation("t1").add_sites( TransformationSite("local", "/pfn", False) ) ) .add_transformations( Transformation("t2").add_sites( TransformationSite("local", "/pfn", False) ) ) .add_containers( Container( "container1", Container.DOCKER, "image", arguments="--shm-size 123", mounts=["mount1"], bypass_staging=True, ) ) .add_containers( Container("container2", Container.DOCKER, "image", mounts=["mount1"]) ) ) expected = { "pegasus": PEGASUS_VERSION, "transformations": [ { "name": "t1", "sites": [{"name": "local", "pfn": "/pfn", "type": "installed"}], }, { "name": "t2", "sites": [{"name": "local", "pfn": "/pfn", "type": "installed"}], }, ], "containers": [ { "name": "container1", "type": "docker", "image": "image", "mounts": ["mount1"], "bypass": True, "profiles": {"pegasus": {"container.arguments": "--shm-size 123"}}, }, { "name": "container2", "type": "docker", "image": "image", "mounts": ["mount1"], }, ], } expected["transformations"] = sorted( expected["transformations"], key=lambda t: t["name"] ) expected["containers"] = sorted(expected["containers"], key=lambda c: c["name"]) result = json.loads(json.dumps(tc, cls=_CustomEncoder)) result["transformations"] = sorted( result["transformations"], key=lambda t: t["name"] ) result["containers"] = sorted(result["containers"], key=lambda c: c["name"]) tc_schema = load_schema("tc-5.0.json") validate(instance=result, schema=tc_schema) assert result == expected
def test_add_invalid_container(self): tc = TransformationCatalog() with pytest.raises(TypeError) as e: tc.add_containers("container") assert "invalid container: container" in str(e)
def _to_tc(d: dict) -> TransformationCatalog: """Convert dict to TransformationCatalog :param d: TransformationCatalog represented as a dict :type d: dict :raises PegasusError: encountered error parsing :return: a TransformationCatalog object based on d :rtype: TransformationCatalog """ try: tc = TransformationCatalog() # add transformations for tr in d["transformations"]: tr_to_add = Transformation( tr["name"], tr.get("namespace"), tr.get("version"), checksum=tr.get("checksum"), ) # add transformation sites for s in tr["sites"]: site_to_add = TransformationSite( s["name"], s["pfn"], True if s["type"] == "stageable" else False, bypass_staging=s.get("bypass"), arch=getattr(Arch, s.get("arch").upper()) if s.get("arch") else None, os_type=getattr(OS, s.get("os.type").upper()) if s.get("os.type") else None, os_release=s.get("os.release"), os_version=s.get("os.version"), container=s.get("container"), ) # add profiles if s.get("profiles"): site_to_add.profiles = defaultdict(dict, s.get("profiles")) # add metadata if s.get("metadata"): site_to_add.metadata = s.get("metadata") # add site to this tr tr_to_add.add_sites(site_to_add) # add requires if tr.get("requires"): tr_to_add.requires = set(tr.get("requires")) # add profiles if tr.get("profiles"): tr_to_add.profiles = defaultdict(dict, tr.get("profiles")) # add hooks if tr.get("hooks"): tr_to_add.hooks = defaultdict(list, tr.get("hooks")) # add metadata if tr.get("metadata"): tr_to_add.metadata = tr.get("metadata") # add tr to tc tc.add_transformations(tr_to_add) # add containers if "containers" in d: for cont in d["containers"]: cont_to_add = Container( cont["name"], getattr(Container, cont["type"].upper()), cont["image"], mounts=cont.get("mounts"), image_site=cont.get("image.site"), checksum=cont.get("checksum"), bypass_staging=cont.get("bypass"), ) # add profiles if cont.get("profiles"): cont_to_add.profiles = defaultdict(dict, cont.get("profiles")) # add cont to tc tc.add_containers(cont_to_add) return tc except KeyError: raise PegasusError("error parsing {}".format(d))
def test_add_invalid_transformation(self): tc = TransformationCatalog() with pytest.raises(TypeError) as e: tc.add_transformations(1) assert "invalid transformation: 1" in str(e)
def test_add_duplicate_transformation(self): tc = TransformationCatalog() tc.add_transformations(Transformation("name")) with pytest.raises(DuplicateError): tc.add_transformations(Transformation("name", namespace=None, version=None))