def test_chaining(self): site = Site("s") a = site.add_directories(Directory(Directory.LOCAL_SCRATCH, "/path")) b = site.add_grids( Grid( Grid.GT5, "smarty.isi.edu/jobmanager-pbs", Scheduler.PBS, job_type=SupportedJobs.AUXILLARY, )) assert id(a) == id(b)
def sc1(): return SiteCatalog().add_sites( Site( "local", arch=Arch.X86_64, os_type=OS.LINUX, os_release="1", os_version="1", ).add_directories( Directory(Directory.LOCAL_SCRATCH, "/path").add_file_servers( FileServer("url", Operation.ALL).add_dagman_profile( retry=1))).add_dagman_profile(retry=1).add_grids( Grid( Grid.CONDOR, "contact", Scheduler.CONDOR, job_type=SupportedJobs.REGISTER, free_mem=1, total_mem=1, max_count=1, max_cpu_time=1, running_jobs=1, jobs_in_queue=1, idle_nodes=1, total_nodes=1, )))
def test_tojson_with_profiles(self): site = Site( "s", arch=Arch.X86_64, os_type=OS.LINUX, os_release="release", os_version="1.2.3", ) site.add_directories( Directory(Directory.LOCAL_SCRATCH, "/path").add_file_servers( FileServer("url", Operation.GET))) site.add_grids( Grid( Grid.GT5, "smarty.isi.edu/jobmanager-pbs", Scheduler.PBS, job_type=SupportedJobs.AUXILLARY, )) site.add_env(JAVA_HOME="/usr/bin/java") result = json.loads(json.dumps(site, cls=_CustomEncoder)) expected = { "name": "s", "arch": "x86_64", "os.type": "linux", "os.release": "release", "os.version": "1.2.3", "directories": [{ "type": "localScratch", "path": "/path", "fileServers": [{ "url": "url", "operation": "get" }], }], "grids": [{ "type": "gt5", "contact": "smarty.isi.edu/jobmanager-pbs", "scheduler": "pbs", "jobtype": "auxillary", }], "profiles": { "env": { "JAVA_HOME": "/usr/bin/java" } }, } assert result == expected
def test_valid_site(self): assert Site( "site", arch=Arch.X86_64, os_type=OS.LINUX, os_release="release", os_version="1.1.1", )
def sc2(): return SiteCatalog().add_sites( Site("local",) .add_directories( Directory(Directory.LOCAL_SCRATCH, "/path").add_file_servers( FileServer("url", Operation.ALL) ) ) .add_grids(Grid(Grid.CONDOR, "contact", Scheduler.CONDOR,)) )
def sc1(): return SiteCatalog().add_sites( Site( "local", arch=Arch.X86_64, os_type=OS.LINUX, os_release="1", os_version="1", ) .add_directories( Directory(Directory.LOCAL_SCRATCH, "/path").add_file_servers( FileServer("url", Operation.ALL).add_dagman_profile(retry=1) ) ) .add_dagman_profile(retry=1) .add_grids( Grid( Grid.CONDOR, "contact", Scheduler.CONDOR, job_type=SupportedJobs.REGISTER, ) ) )
def test_add_valid_grid(self): site = Site("s") site.add_grids( Grid( Grid.GT5, "smarty.isi.edu/jobmanager-pbs", Scheduler.PBS, job_type=SupportedJobs.AUXILLARY, )) site.add_grids( Grid( Grid.GT5, "smarty.isi.edu/jobmanager-pbs", Scheduler.PBS, job_type=SupportedJobs.COMPUTE, )) assert len(site.grids) == 2
def test_add_valid_site(self): sc = SiteCatalog() assert sc.add_sites(Site("local"))
def _to_sc(d: dict) -> SiteCatalog: """Convert dict to SiteCatalog :param d: SiteCatalog represented as a dict :type d: dict :raises PegasusError: encountered error parsing :return: a SiteCatalog object based on d :rtype: SiteCatalog """ try: sc = SiteCatalog() for s in d["sites"]: site = Site( s["name"], arch=getattr(Arch, s.get("arch").upper()) if s.get("arch") else None, os_type=getattr(OS, s.get("os.type").upper()) if s.get("os.type") else None, os_release=s.get("os.release"), os_version=s.get("os.version"), ) # add directories for _dir in s["directories"]: dir_type = None for enum_name, enum in _DirectoryType.__members__.items(): if _dir["type"] == enum.value: dir_type = enum_name break directory = Directory(getattr(Directory, dir_type), _dir["path"]) # add file servers for fs in _dir["fileServers"]: file_server = FileServer( fs["url"], getattr(Operation, fs["operation"].upper())) # add profiles if fs.get("profiles"): file_server.profiles = defaultdict( dict, fs.get("profiles")) # add file server to this directory directory.add_file_servers(file_server) # add directory to this site site.add_directories(directory) # add grids if s.get("grids"): for gr in s.get("grids"): grid = Grid( getattr(Grid, gr["type"].upper()), gr["contact"], getattr(Scheduler, gr["scheduler"].upper()), job_type=getattr(SupportedJobs, gr.get("jobtype").upper()) if gr.get("jobtype") else None, ) # add grid to this site site.add_grids(grid) # add profiles if s.get("profiles"): site.profiles = defaultdict(dict, s.get("profiles")) # add site to sc sc.add_sites(site) return sc except KeyError: raise PegasusError("error parsing {}".format(d))
def test_add_valid_directory(self): site = Site("s") site.add_directories(Directory(Directory.LOCAL_SCRATCH, "/path")) site.add_directories(Directory(Directory.LOCAL_STORAGE, "/path")) assert len(site.directories) == 2
def test_write(self, expected_json, _format, loader): sc = (SiteCatalog().add_sites( Site("local", arch=Arch.X86_64, os_type=OS.LINUX).add_directories( Directory(Directory.SHARED_SCRATCH, "/tmp/workflows/scratch").add_file_servers( FileServer("file:///tmp/workflows/scratch", Operation.ALL)), Directory(Directory.LOCAL_STORAGE, "/tmp/workflows/outputs").add_file_servers( FileServer("file:///tmp/workflows/outputs", Operation.ALL)), )).add_sites( Site("condor_pool", arch=Arch.X86_64, os_type=OS.LINUX).add_directories( Directory( Directory.SHARED_SCRATCH, "/lustre").add_file_servers( FileServer( "gsiftp://smarty.isi.edu/lustre", Operation.ALL))).add_grids( Grid( Grid.GT5, "smarty.isi.edu/jobmanager-pbs", Scheduler.PBS, job_type=SupportedJobs.AUXILLARY, ), Grid( Grid.GT5, "smarty.isi.edu/jobmanager-pbs", Scheduler.PBS, job_type=SupportedJobs.COMPUTE, ), ).add_env(JAVA_HOME="/usr/bin/java"), Site("staging_site", arch=Arch.X86_64, os_type=OS.LINUX).add_directories( Directory(Directory.SHARED_SCRATCH, "/data").add_file_servers( FileServer("scp://obelix.isi.edu/data", Operation.PUT), FileServer("http://obelix.isi.edu/data", Operation.GET), )), )) with NamedTemporaryFile(mode="r+") as f: sc.write(f, _format=_format) f.seek(0) result = loader(f) result["sites"].sort(key=lambda s: s["name"]) for i in range(len(result["sites"])): result["sites"][i]["directories"].sort(key=lambda d: d["path"]) for j in range(len(result["sites"][i]["directories"])): result["sites"][i]["directories"][j]["fileServers"].sort( key=lambda fs: fs["url"]) if "grids" in result["sites"][i]: result["sites"][i]["grids"].sort(key=lambda g: g["jobtype"]) assert "createdOn" in result["x-pegasus"] assert result["x-pegasus"]["createdBy"] == getpass.getuser() assert result["x-pegasus"]["apiLang"] == "python" del result["x-pegasus"] assert result == expected_json
def test_tojson(self, convert_yaml_schemas_to_json, load_schema, expected_json): sc = SiteCatalog().add_sites( Site("local", arch=Arch.X86_64, os_type=OS.LINUX).add_directories( Directory(Directory.SHARED_SCRATCH, "/tmp/workflows/scratch").add_file_servers( FileServer("file:///tmp/workflows/scratch", Operation.ALL)), Directory(Directory.LOCAL_STORAGE, "/tmp/workflows/outputs").add_file_servers( FileServer("file:///tmp/workflows/outputs", Operation.ALL)), ), Site("condor_pool", arch=Arch.X86_64, os_type=OS.LINUX).add_directories( Directory( Directory.SHARED_SCRATCH, "/lustre").add_file_servers( FileServer( "gsiftp://smarty.isi.edu/lustre", Operation.ALL))).add_grids( Grid( Grid.GT5, "smarty.isi.edu/jobmanager-pbs", Scheduler.PBS, job_type=SupportedJobs.AUXILLARY, ), Grid( Grid.GT5, "smarty.isi.edu/jobmanager-pbs", Scheduler.PBS, job_type=SupportedJobs.COMPUTE, ), ).add_env(JAVA_HOME="/usr/bin/java"), Site("staging_site", arch=Arch.X86_64, os_type=OS.LINUX).add_directories( Directory( Directory.SHARED_SCRATCH, "/data").add_file_servers( FileServer("scp://obelix.isi.edu/data", Operation.PUT)).add_file_servers( FileServer( "http://obelix.isi.edu/data", Operation.GET))), ) result = json.loads(json.dumps(sc, cls=_CustomEncoder)) sc_schema = load_schema("sc-5.0.json") validate(instance=result, schema=sc_schema) result["sites"].sort(key=lambda s: s["name"]) for i in range(len(result["sites"])): result["sites"][i]["directories"].sort(key=lambda d: d["path"]) for j in range(len(result["sites"][i]["directories"])): result["sites"][i]["directories"][j]["fileServers"].sort( key=lambda fs: fs["url"]) if "grids" in result["sites"][i]: result["sites"][i]["grids"].sort(key=lambda g: g["jobtype"]) assert result == expected_json
def test_chaining(self): sc = SiteCatalog() a = sc.add_sites(Site("local")) b = sc.add_sites(Site("condor_pool")) assert id(a) == id(b)
def test_add_duplicate_site(self): sc = SiteCatalog() sc.add_sites(Site("local")) with pytest.raises(DuplicateError): sc.add_sites(Site("local"))
def test_invalid_site(self, name, arch, os_type, invalid_var): with pytest.raises(TypeError) as e: Site(name, arch=arch, os_type=os_type) assert "invalid {invalid_var}: {value}".format( invalid_var=invalid_var, value=locals()[invalid_var]) in str(e)
def test_add_invalid_directory(self): with pytest.raises(TypeError) as e: site = Site("s") site.add_directories("baddirectory") assert "invalid directory: baddirectory" in str(e)
def test_add_invalid_grid(self): with pytest.raises(TypeError) as e: site = Site("s") site.add_grids("badgrid") assert "invalid grid: badgrid" in str(e)