示例#1
0
 def setUpClass(cls):
     cls.current_dir = os.path.dirname(os.path.abspath(__file__)).replace("\\", "/")
     cls.empty_hdf5 = FileHDFio(file_name=cls.current_dir + "/filehdfio_empty.h5")
     cls.full_hdf5 = FileHDFio(file_name=cls.current_dir + "/filehdfio_full.h5")
     cls.i_o_hdf5 = FileHDFio(file_name=cls.current_dir + "/filehdfio_io.h5")
     cls.es_hdf5 = FileHDFio(
         file_name=cls.current_dir + "/../static/dft/es_hdf.h5"
     )
     with cls.full_hdf5.open("content") as hdf:
         hdf["array"] = np.array([1, 2, 3, 4, 5, 6])
         hdf["array_3d"] = np.array([[1, 2, 3], [4, 5, 6]])
         hdf["traj"] = np.array([[[1, 2, 3], [4, 5, 6]], [[7, 8, 9]]], dtype=object)
         hdf["dict"] = {"key_1": 1, "key_2": "hallo"}
         hdf["dict_numpy"] = {"key_1": 1, "key_2": np.array([1, 2, 3, 4, 5, 6])}
         hdf['indices'] = np.array([1, 1, 1, 1, 6], dtype=int)
         with hdf.open('group') as grp:
             grp['some_entry'] = 'present'
     with cls.i_o_hdf5.open("content") as hdf:
         hdf["exists"] = True
     # Open and store value in a hdf file to use test_remove_file on it, do not use otherwise
     cls.to_be_removed_hdf = FileHDFio(file_name=cls.current_dir + '/filehdfio_tbr.h5')
     with cls.to_be_removed_hdf.open('content') as hdf:
         hdf['value'] = 1
     # Remains open to be closed by test_close, do not use otherwise
     cls.opened_hdf = cls.full_hdf5.open("content")
示例#2
0
 def setUpClass(cls):
     cls.current_dir = os.path.dirname(os.path.abspath(__file__)).replace("\\", "/")
     cls.empty_hdf5 = FileHDFio(file_name=cls.current_dir + "/filehdfio_empty.h5")
     cls.full_hdf5 = FileHDFio(file_name=cls.current_dir + "/filehdfio_full.h5")
     cls.es_hdf5 = FileHDFio(
         file_name=cls.current_dir + "/../static/dft/es_hdf.h5"
     )
     with cls.full_hdf5.open("content") as hdf:
         hdf["array"] = np.array([1, 2, 3, 4, 5, 6])
         hdf["array_3d"] = np.array([[1, 2, 3], [4, 5, 6]])
         hdf["traj"] = np.array([[[1, 2, 3], [4, 5, 6]], [[7, 8, 9]]])
         hdf["dict"] = {"key_1": 1, "key_2": "hallo"}
         hdf["dict_numpy"] = {"key_1": 1, "key_2": np.array([1, 2, 3, 4, 5, 6])}
示例#3
0
 def store_custom_output_dict(output_dict):
     folder = Path(".").cwd().parts[-1]
     hdf_file = Path(".").cwd().parents[1] / folder
     hdf_file = str(hdf_file) + ".h5"
     hdf = FileHDFio(hdf_file)
     hdf[folder].create_group("output")
     for k, v in output_dict.items():
         hdf[folder + "/output"][k] = v
示例#4
0
 def store_custom_output_dict(output_dict):
     folder = Path(".").cwd().parts[-1]
     hdf_file = Path(".").cwd().parents[1] / folder
     hdf_file = str(hdf_file) + ".h5"
     hdf = FileHDFio(hdf_file)
     hdf[folder].create_group("output")
     obj = DataContainer(output_dict)
     obj.to_hdf(hdf[folder + "/output"])
示例#5
0
 def get_custom_dict():
     folder = Path(".").cwd().parts[-1]
     project_folder = Path(".").cwd().parents[1]
     hdf_file = project_folder / folder
     hdf_file = str(hdf_file) + ".h5"
     if Path(hdf_file).exists():
         hdf = FileHDFio(hdf_file)
         custom_dict = hdf[folder + '/input/custom_dict/data']
         custom_dict["project_dir"] = str(project_folder)
         return custom_dict
     elif Path("input.json").exists():
         with open("input.json") as f:
             return json.load(f)
     else:
         warnings.warn("{} not found".format(hdf_file))
         return None
示例#6
0
 def get_custom_dict():
     folder = Path(".").cwd().parts[-1]
     project_folder = Path(".").cwd().parents[1]
     hdf_file = project_folder / folder
     hdf_file = str(hdf_file).replace("\\", "/") + ".h5"
     if Path(hdf_file).exists():
         obj = DataContainer()
         obj.from_hdf(
             hdf=FileHDFio(hdf_file), group_name=folder + "/input/custom_dict"
         )
         obj["project_dir"] = str(project_folder)
         return obj
     elif Path("input.json").exists():
         with open("input.json") as f:
             return json.load(f)
     else:
         warnings.warn("{} not found".format(hdf_file))
         return None
示例#7
0
 def get_custom_dict():
     folder = Path(".").cwd().parts[-1]
     project_folder = Path(".").cwd().parents[1]
     hdf_file = project_folder / folder
     hdf_file = str(hdf_file) + ".h5"
     if Path(hdf_file).exists():
         hdf = FileHDFio(hdf_file)
         custom_dict = GenericParameters()
         for k, v in zip(
             hdf[folder + "/input/custom_dict/data_dict"]["Parameter"],
             hdf[folder + "/input/custom_dict/data_dict"]["Value"],
         ):
             custom_dict[k] = v
         custom_dict["project_dir"] = str(project_folder)
         return custom_dict
     elif Path("input.json").exists():
         with open("input.json") as f:
             return json.load(f)
     else:
         warnings.warn("{} not found".format(hdf_file))
         return None
示例#8
0
 def create_table(self,
                  enforce_update=False,
                  level=3,
                  file=None,
                  job_status_list=None):
     skip_table_update = False
     filter_funct = self.filter_function
     if job_status_list is None:
         job_status_list = ["finished"]
     if self._is_file():
         if file is None:
             file = FileHDFio(file_name=self._project.path + self.name +
                              ".h5",
                              h5_path="/")
         (
             temp_user_function_dict,
             temp_system_function_dict,
         ) = self._get_data_from_hdf5(hdf=file)
         job_update_lst = self._collect_job_update_lst(
             job_status_list=job_status_list,
             filter_funct=filter_funct,
             job_stored_ids=self._get_job_ids(),
         )
         keys_update_user_lst = [
             key for key in self.add._user_function_dict.keys()
             if key not in temp_user_function_dict.keys()
         ]
         keys_update_system_lst = [
             k for k, v in self.add._system_function_dict.items()
             if v and not temp_system_function_dict[k]
         ]
         if (len(job_update_lst) == 0 and len(keys_update_user_lst) == 0
                 and keys_update_system_lst == 0 and not enforce_update):
             skip_table_update = True
     else:
         job_update_lst = self._collect_job_update_lst(
             job_status_list=job_status_list,
             filter_funct=filter_funct,
             job_stored_ids=None,
         )
         keys_update_user_lst, keys_update_system_lst = [], []
     if not skip_table_update and len(job_update_lst) != 0:
         df_new_ids = self._iterate_over_job_lst(
             job_lst=job_update_lst,
             function_lst=self.add._function_lst,
             level=level)
     else:
         df_new_ids = pandas.DataFrame({})
     if not skip_table_update and (len(keys_update_user_lst) != 0
                                   or len(keys_update_system_lst) != 0):
         job_update_lst = self._collect_job_update_lst(
             job_status_list=job_status_list,
             filter_funct=filter_funct,
             job_stored_ids=None,
         )
         function_lst = [
             v for k, v in self.add._user_function_dict.items()
             if k in keys_update_system_lst
         ] + [
             funct for funct in self.add._system_function_lst
             if funct.__name__ in keys_update_system_lst
         ]
         df_new_keys = self._iterate_over_job_lst(job_lst=job_update_lst,
                                                  function_lst=function_lst,
                                                  level=level)
     else:
         df_new_keys = pandas.DataFrame({})
     if len(self._df) > 0 and len(df_new_keys) > 0:
         self._df = pandas.concat([self._df, df_new_keys],
                                  axis=1,
                                  sort=False).reset_index(drop=True)
     if len(self._df) > 0 and len(df_new_ids) > 0:
         self._df = pandas.concat([self._df, df_new_ids],
                                  sort=False).reset_index(drop=True)
     elif len(df_new_ids) > 0:
         self._df = df_new_ids
示例#9
0
 def from_hdf(self):
     file = FileHDFio(file_name=self._project.path + self.name + ".h5",
                      h5_path="/")
     self.add._from_hdf(file)
示例#10
0
 def test_hd_copy(self):
     new_hdf_file = os.path.join(self.current_dir, 'copy_full.h5')
     new_hdf = FileHDFio(file_name=new_hdf_file)
     new_hdf = self.full_hdf5.hd_copy(self.full_hdf5, new_hdf)
     self._check_full_hdf_values(new_hdf)
     os.remove(new_hdf_file)