def test_chaining(self): a = Directory(Directory.LOCAL_SCRATCH, "/path") b = a.add_file_servers(FileServer("url", Operation.PUT)).add_file_servers( FileServer("url", Operation.GET)) assert id(a) == id(b)
def sc1(): return SiteCatalog().add_sites( Site( "local", arch=Arch.X86_64, os_type=OS.LINUX, os_release="1", os_version="1", ).add_directories( Directory(Directory.LOCAL_SCRATCH, "/path").add_file_servers( FileServer("url", Operation.ALL).add_dagman_profile( retry=1))).add_dagman_profile(retry=1).add_grids( Grid( Grid.CONDOR, "contact", Scheduler.CONDOR, job_type=SupportedJobs.REGISTER, free_mem=1, total_mem=1, max_count=1, max_cpu_time=1, running_jobs=1, jobs_in_queue=1, idle_nodes=1, total_nodes=1, )))
def test_tojson_with_profiles(self): site = Site( "s", arch=Arch.X86_64, os_type=OS.LINUX, os_release="release", os_version="1.2.3", ) site.add_directories( Directory(Directory.LOCAL_SCRATCH, "/path").add_file_servers( FileServer("url", Operation.GET))) site.add_grids( Grid( Grid.GT5, "smarty.isi.edu/jobmanager-pbs", Scheduler.PBS, job_type=SupportedJobs.AUXILLARY, )) site.add_env(JAVA_HOME="/usr/bin/java") result = json.loads(json.dumps(site, cls=_CustomEncoder)) expected = { "name": "s", "arch": "x86_64", "os.type": "linux", "os.release": "release", "os.version": "1.2.3", "directories": [{ "type": "localScratch", "path": "/path", "fileServers": [{ "url": "url", "operation": "get" }], }], "grids": [{ "type": "gt5", "contact": "smarty.isi.edu/jobmanager-pbs", "scheduler": "pbs", "jobtype": "auxillary", }], "profiles": { "env": { "JAVA_HOME": "/usr/bin/java" } }, } assert result == expected
def sc2(): return SiteCatalog().add_sites( Site("local",) .add_directories( Directory(Directory.LOCAL_SCRATCH, "/path").add_file_servers( FileServer("url", Operation.ALL) ) ) .add_grids(Grid(Grid.CONDOR, "contact", Scheduler.CONDOR,)) )
def test_tojson(self): directory = Directory(Directory.LOCAL_SCRATCH, "/path").add_file_servers( FileServer("url", Operation.PUT)) result = json.loads(json.dumps(directory, cls=_CustomEncoder)) expected = { "type": "localScratch", "path": "/path", "fileServers": [{ "url": "url", "operation": "put" }], } assert result == expected
def sc1(): return SiteCatalog().add_sites( Site( "local", arch=Arch.X86_64, os_type=OS.LINUX, os_release="1", os_version="1", ) .add_directories( Directory(Directory.LOCAL_SCRATCH, "/path").add_file_servers( FileServer("url", Operation.ALL).add_dagman_profile(retry=1) ) ) .add_dagman_profile(retry=1) .add_grids( Grid( Grid.CONDOR, "contact", Scheduler.CONDOR, job_type=SupportedJobs.REGISTER, ) ) )
def test_tojson_with_profiles(self, convert_yaml_schemas_to_json, load_schema): file_server = FileServer("url", Operation.PUT).add_env(SOME_ENV="1") result = json.loads(json.dumps(file_server, cls=_CustomEncoder)) expected = { "url": "url", "operation": "put", "profiles": { "env": { "SOME_ENV": "1" } }, } file_server_schema = load_schema("sc-5.0.json")["$defs"]["fileServer"] validate(instance=result, schema=file_server_schema) assert result == expected
def _to_sc(d: dict) -> SiteCatalog: """Convert dict to SiteCatalog :param d: SiteCatalog represented as a dict :type d: dict :raises PegasusError: encountered error parsing :return: a SiteCatalog object based on d :rtype: SiteCatalog """ try: sc = SiteCatalog() for s in d["sites"]: site = Site( s["name"], arch=getattr(Arch, s.get("arch").upper()) if s.get("arch") else None, os_type=getattr(OS, s.get("os.type").upper()) if s.get("os.type") else None, os_release=s.get("os.release"), os_version=s.get("os.version"), ) # add directories for _dir in s["directories"]: dir_type = None for enum_name, enum in _DirectoryType.__members__.items(): if _dir["type"] == enum.value: dir_type = enum_name break directory = Directory(getattr(Directory, dir_type), _dir["path"]) # add file servers for fs in _dir["fileServers"]: file_server = FileServer( fs["url"], getattr(Operation, fs["operation"].upper())) # add profiles if fs.get("profiles"): file_server.profiles = defaultdict( dict, fs.get("profiles")) # add file server to this directory directory.add_file_servers(file_server) # add directory to this site site.add_directories(directory) # add grids if s.get("grids"): for gr in s.get("grids"): grid = Grid( getattr(Grid, gr["type"].upper()), gr["contact"], getattr(Scheduler, gr["scheduler"].upper()), job_type=getattr(SupportedJobs, gr.get("jobtype").upper()) if gr.get("jobtype") else None, ) # add grid to this site site.add_grids(grid) # add profiles if s.get("profiles"): site.profiles = defaultdict(dict, s.get("profiles")) # add site to sc sc.add_sites(site) return sc except KeyError: raise PegasusError("error parsing {}".format(d))
def test_add_valid_file_server(self): d = Directory(Directory.LOCAL_SCRATCH, "/path") assert d.add_file_servers(FileServer("url", Operation.PUT))
def test_write(self, expected_json, _format, loader): sc = (SiteCatalog().add_sites( Site("local", arch=Arch.X86_64, os_type=OS.LINUX).add_directories( Directory(Directory.SHARED_SCRATCH, "/tmp/workflows/scratch").add_file_servers( FileServer("file:///tmp/workflows/scratch", Operation.ALL)), Directory(Directory.LOCAL_STORAGE, "/tmp/workflows/outputs").add_file_servers( FileServer("file:///tmp/workflows/outputs", Operation.ALL)), )).add_sites( Site("condor_pool", arch=Arch.X86_64, os_type=OS.LINUX).add_directories( Directory( Directory.SHARED_SCRATCH, "/lustre").add_file_servers( FileServer( "gsiftp://smarty.isi.edu/lustre", Operation.ALL))).add_grids( Grid( Grid.GT5, "smarty.isi.edu/jobmanager-pbs", Scheduler.PBS, job_type=SupportedJobs.AUXILLARY, ), Grid( Grid.GT5, "smarty.isi.edu/jobmanager-pbs", Scheduler.PBS, job_type=SupportedJobs.COMPUTE, ), ).add_env(JAVA_HOME="/usr/bin/java"), Site("staging_site", arch=Arch.X86_64, os_type=OS.LINUX).add_directories( Directory(Directory.SHARED_SCRATCH, "/data").add_file_servers( FileServer("scp://obelix.isi.edu/data", Operation.PUT), FileServer("http://obelix.isi.edu/data", Operation.GET), )), )) with NamedTemporaryFile(mode="r+") as f: sc.write(f, _format=_format) f.seek(0) result = loader(f) result["sites"].sort(key=lambda s: s["name"]) for i in range(len(result["sites"])): result["sites"][i]["directories"].sort(key=lambda d: d["path"]) for j in range(len(result["sites"][i]["directories"])): result["sites"][i]["directories"][j]["fileServers"].sort( key=lambda fs: fs["url"]) if "grids" in result["sites"][i]: result["sites"][i]["grids"].sort(key=lambda g: g["jobtype"]) assert "createdOn" in result["x-pegasus"] assert result["x-pegasus"]["createdBy"] == getpass.getuser() assert result["x-pegasus"]["apiLang"] == "python" del result["x-pegasus"] assert result == expected_json
def test_tojson(self, convert_yaml_schemas_to_json, load_schema, expected_json): sc = SiteCatalog().add_sites( Site("local", arch=Arch.X86_64, os_type=OS.LINUX).add_directories( Directory(Directory.SHARED_SCRATCH, "/tmp/workflows/scratch").add_file_servers( FileServer("file:///tmp/workflows/scratch", Operation.ALL)), Directory(Directory.LOCAL_STORAGE, "/tmp/workflows/outputs").add_file_servers( FileServer("file:///tmp/workflows/outputs", Operation.ALL)), ), Site("condor_pool", arch=Arch.X86_64, os_type=OS.LINUX).add_directories( Directory( Directory.SHARED_SCRATCH, "/lustre").add_file_servers( FileServer( "gsiftp://smarty.isi.edu/lustre", Operation.ALL))).add_grids( Grid( Grid.GT5, "smarty.isi.edu/jobmanager-pbs", Scheduler.PBS, job_type=SupportedJobs.AUXILLARY, ), Grid( Grid.GT5, "smarty.isi.edu/jobmanager-pbs", Scheduler.PBS, job_type=SupportedJobs.COMPUTE, ), ).add_env(JAVA_HOME="/usr/bin/java"), Site("staging_site", arch=Arch.X86_64, os_type=OS.LINUX).add_directories( Directory( Directory.SHARED_SCRATCH, "/data").add_file_servers( FileServer("scp://obelix.isi.edu/data", Operation.PUT)).add_file_servers( FileServer( "http://obelix.isi.edu/data", Operation.GET))), ) result = json.loads(json.dumps(sc, cls=_CustomEncoder)) sc_schema = load_schema("sc-5.0.json") validate(instance=result, schema=sc_schema) result["sites"].sort(key=lambda s: s["name"]) for i in range(len(result["sites"])): result["sites"][i]["directories"].sort(key=lambda d: d["path"]) for j in range(len(result["sites"][i]["directories"])): result["sites"][i]["directories"][j]["fileServers"].sort( key=lambda fs: fs["url"]) if "grids" in result["sites"][i]: result["sites"][i]["grids"].sort(key=lambda g: g["jobtype"]) assert result == expected_json
def test_invlaid_file_server(self): with pytest.raises(TypeError) as e: FileServer("url", "put") assert "invalid operation_type: put" in str(e)
def test_valid_file_server(self): assert FileServer("url", Operation.PUT)