def read(self): """Read existing data from project-level storage.""" hdf = ProjectHDFio(self._project, file_name="project_data") if self.table_name not in hdf.list_groups(): raise KeyError( f"Table name {self.table_name} was not found -- Project data is empty." ) self.from_hdf(hdf=hdf)
def rewrite_hdf(self, hdf: ProjectHDFio, group_name: str = None): """ Update the HDF representation. If an object is read from an older layout, this will remove the old data and rewrite it in the newest layout. Args: hdf (:class:`.ProjectHDFio`): HDF group to read/write group_name (str, optional): name of subgroup """ with _WithHDF(hdf, group_name) as hdf: obj = hdf.to_object() hdf.remove_group() obj.to_hdf(hdf)
def wrapper(job_name, delete_existing_job=False, delete_aborted_job=False): """ Create one of the following jobs: - 'ExampleJob': example job just generating random number - 'SerialMaster': series of jobs run in serial - 'ParallelMaster': series of jobs run in parallel - 'ScriptJob': Python script or jupyter notebook job container - 'ListMaster': list of jobs Args: job_name (str): name of the job delete_existing_job (bool): delete an existing job - default false delete_aborted_job (bool): delete an existing and aborted job - default false Returns: GenericJob: job object depending on the job_type selected """ job_name = _get_safe_job_name(job_name) return JobType( class_name=name, project=ProjectHDFio(project=self._project.copy(), file_name=job_name), job_name=job_name, job_class_dict=self._job_class_dict, delete_existing_job=delete_existing_job, delete_aborted_job=delete_aborted_job, )
def __call__( self, job_type: Union[str, Type[GenericJob]], job_name: str, delete_existing_job: bool = False, delete_aborted_job: bool = False, ) -> GenericJob: """ Create a job. Args: job_type (str|Type[GenericJob]): The job class to be instantiated, either the string from a known class, or an actual class, e.g. in the case of custom user-made jobs. job_name (str): name of the job. delete_existing_job (bool): delete an existing job. (Default is False.) delete_aborted_job (bool): delete an existing and aborted job. (Default is False.) Returns: GenericJob: job object depending on the job_type selected """ job = JobType( class_name=job_type, # Pass the class directly, JobType can handle that project=ProjectHDFio(project=self._project.copy(), file_name=job_name), job_name=job_name, job_class_dict=self._job_class_dict, delete_existing_job=delete_existing_job, delete_aborted_job=delete_aborted_job, ) if state.settings.login_user is not None: job.user = state.settings.login_user return job
def wrapper( job_name: str, delete_existing_job: bool = False, delete_aborted_job: bool = False, ) -> GenericJob: """ Create a job. Args: job_name (str): name of the job delete_existing_job (bool): delete an existing job - default false delete_aborted_job (bool): delete an existing and aborted job - default false Returns: GenericJob: job object depending on the job_type selected """ return JobType( class_name=self._job_class_dict[ name ], # Pass the class directly, JobType can handle that project=ProjectHDFio( project=self._project.copy(), file_name=job_name ), job_name=job_name, job_class_dict=self._job_class_dict, delete_existing_job=delete_existing_job, delete_aborted_job=delete_aborted_job, )
def test_hdf(self): pr = Project(self.file_location) file_name = os.path.join(self.file_location, "genericpara.h5") hdf = ProjectHDFio(project=pr, file_name=file_name, h5_path="/test", mode="a") hdf.create_group("test") self.generic_parameters_str.to_hdf(hdf=hdf, group_name="input") gp_reload = GenericParameters(table_name="str") gp_reload.from_hdf(hdf=hdf, group_name="input") self.assertEqual(gp_reload.get("par___1"), 1) self.assertEqual(gp_reload.get("par_2"), "all") self.assertEqual(gp_reload.get("count"), 0) self.assertTrue(gp_reload.get("write_restart")) self.assertFalse(gp_reload.get("read_restart")) self.assertEqual(gp_reload.get("dict"), {"a": 1, "b": 2}) self.assertEqual(gp_reload.get("list"), [1, "s"]) os.remove(file_name)
def from_hdf(self, hdf: ProjectHDFio, group_name: str = None): """ Read object to HDF. If group_name is given descend into subgroup in hdf first. Args: hdf (:class:`.ProjectHDFio`): HDF group to read from group_name (str, optional): name of subgroup """ group_name = (group_name if group_name is not None else self._get_hdf_group_name()) with _WithHDF(hdf, group_name) as hdf: version = hdf.get("HDF_VERSION", "0.1.0") self._from_hdf(hdf, version=version)
def create(self, job_name, delete_existing_job=False): """ Internal helper function for pr.create.job.Code() Args: job_name (str): name of the job delete_existing_job (bool): delete an existing job - default false Returns: GenericJob: job object depending on the job_type selected """ return JobType(class_name=self._class_name, project=ProjectHDFio(project=self._project.copy(), file_name=job_name), job_name=job_name, job_class_dict=self._job_class_dict, delete_existing_job=delete_existing_job)
def to_hdf(self, hdf: ProjectHDFio, group_name: str = None): """ Write object to HDF. If group_name is given create a subgroup in hdf first. Args: hdf (:class:`.ProjectHDFio`): HDF group to write to group_name (str, optional): name of subgroup """ group_name = (group_name if group_name is not None else self._get_hdf_group_name()) with _WithHDF(hdf, group_name) as hdf: if len(hdf.list_dirs()) > 0 and group_name is None: raise ValueError( "HDF group must be empty when group_name is not set.") self._to_hdf(hdf) self._store_type_to_hdf(hdf)
def setUpClass(cls): cls.pl = InputList([{ "foo": "bar" }, 2, 42, { "next": [0, { "depth": 23 }] }], table_name="input") cls.pl["tail"] = InputList([2, 4, 8]) file_location = os.path.dirname(os.path.abspath(__file__)) pr = Project(file_location) cls.file_name = os.path.join(file_location, "input.h5") cls.hdf = ProjectHDFio(project=pr, file_name=cls.file_name, h5_path="/test", mode="a")
def __init__(self, job_path): job_path_lst = job_path.replace("\\", "/").split(".h5") if len(job_path_lst) != 2: raise ValueError sub_job = job_path_lst[1] h5_path = None if sub_job is not None: if len(sub_job.strip()) > 0: h5_path = "/".join(sub_job.split("/")[:-1]) hdf_project = ProjectHDFio( project=Project(os.path.dirname(job_path_lst[0])), file_name=job_path_lst[0].split("/")[-1] + ".h5", h5_path=h5_path, mode="r", ) super(JobPathBase, self).__init__(project=hdf_project, job_name=job_path_lst[1].split("/")[-1])
def write(self): """Write data to project-level storage.""" self.to_hdf(ProjectHDFio(self._project, file_name="project_data"))