def setUp(self): self.current_dir = os.path.dirname(os.path.abspath(__file__)).replace( '\\', '/') self.empty_hdf5 = FileHDFio(file_name=self.current_dir + '/filehdfio_empty.h5') self.full_hdf5 = FileHDFio(file_name=self.current_dir + '/filehdfio_full.h5') self.es_hdf5 = FileHDFio(file_name=self.current_dir + "/../../static/dft/es_hdf.h5")
def setUpClass(cls): cls.current_dir = os.path.dirname(os.path.abspath(__file__)).replace('\\', '/') cls.empty_hdf5 = FileHDFio(file_name=cls.current_dir + '/filehdfio_empty.h5') cls.full_hdf5 = FileHDFio(file_name=cls.current_dir + '/filehdfio_full.h5') cls.es_hdf5 = FileHDFio(file_name=cls.current_dir + "/../../static/dft/es_hdf.h5") with cls.full_hdf5.open('content') as hdf: hdf['array'] = np.array([1, 2, 3, 4, 5, 6]) hdf['array_3d'] = np.array([[1, 2, 3], [4, 5, 6]]) hdf['traj'] = np.array([[[1, 2, 3], [4, 5, 6]], [[7, 8, 9]]]) hdf['dict'] = {'key_1': 1, 'key_2': 'hallo'} hdf['dict_numpy'] = {'key_1': 1, 'key_2': np.array([1, 2, 3, 4, 5, 6])}
def test_to_hdf(self): if sys.version_info[0] >= 3: filename = os.path.join( os.path.dirname(os.path.abspath(__file__)), "../../static/dft/test_es_hdf.h5", ) abs_filename = os.path.abspath(filename) hdf_obj = FileHDFio(abs_filename) es_obj_old = self.es_list[1] es_obj_old.to_hdf(hdf_obj, group_name="written_es") es_obj_new = ElectronicStructure() es_obj_new.from_hdf(hdf=hdf_obj, group_name="written_es") self.assertTrue( np.array_equal( hdf_obj["written_es/dos/grand_dos_matrix"], es_obj_old.grand_dos_matrix, )) self.assertEqual(es_obj_old.efermi, es_obj_new.efermi) self.assertEqual(es_obj_old.is_metal, es_obj_new.is_metal) self.assertEqual(es_obj_old.vbm, es_obj_new.vbm) self.assertEqual(es_obj_old.cbm, es_obj_new.cbm) self.assertTrue( np.array_equal(es_obj_new.grand_dos_matrix, es_obj_old.grand_dos_matrix)) self.assertTrue( np.array_equal(es_obj_new.resolved_densities, es_obj_old.resolved_densities))
def store_custom_output_dict(output_dict): folder = Path(".").cwd().parts[-1] hdf_file = Path(".").cwd().parents[1] / folder hdf_file = str(hdf_file) + ".h5" hdf = FileHDFio(hdf_file) hdf[folder].create_group("output") for k, v in output_dict.items(): hdf[folder + "/output"][k] = v
def store_custom_output_dict(output_dict): folder = Path('.').cwd().parts[-1] hdf_file = Path('.').cwd().parents[1] / folder hdf_file = str(hdf_file) + '.h5' hdf = FileHDFio(hdf_file) hdf[folder].create_group('output') for k, v in output_dict.items(): hdf[folder + '/output'][k] = v
def setUpClass(cls): cls.current_dir = os.path.dirname(os.path.abspath(__file__)).replace( "\\", "/") cls.empty_hdf5 = FileHDFio(file_name=cls.current_dir + "/filehdfio_empty.h5") cls.full_hdf5 = FileHDFio(file_name=cls.current_dir + "/filehdfio_full.h5") cls.es_hdf5 = FileHDFio(file_name=cls.current_dir + "/../../static/dft/es_hdf.h5") with cls.full_hdf5.open("content") as hdf: hdf["array"] = np.array([1, 2, 3, 4, 5, 6]) hdf["array_3d"] = np.array([[1, 2, 3], [4, 5, 6]]) hdf["traj"] = np.array([[[1, 2, 3], [4, 5, 6]], [[7, 8, 9]]]) hdf["dict"] = {"key_1": 1, "key_2": "hallo"} hdf["dict_numpy"] = { "key_1": 1, "key_2": np.array([1, 2, 3, 4, 5, 6]) }
def get_custom_dict(): folder = Path('.').cwd().parts[-1] hdf_file = Path('.').cwd().parents[1]/folder hdf_file = str(hdf_file)+'.h5' if Path(hdf_file).exists(): hdf = FileHDFio(hdf_file) custom_dict = GenericParameters() for k, v in zip(hdf[folder+'/input/custom_dict/data_dict']['Parameter'], hdf[folder+'/input/custom_dict/data_dict']['Value']): custom_dict[k] = v return custom_dict else: print(hdf_file, 'not found') return None
def test_from_hdf(self): if sys.version_info[0] >= 3: filename = os.path.join(os.path.dirname(os.path.abspath(__file__)), "../../static/atomistics/test_hdf") abs_filename = os.path.abspath(filename) hdf_obj = FileHDFio(abs_filename) pos, cell = generate_fcc_lattice() basis_store = Atoms(symbols='Al', positions=pos, cell=cell) basis_store.set_repeat([2, 2, 2]) basis_store.to_hdf(hdf_obj, "simple_structure") basis = Atoms().from_hdf(hdf_obj, group_name="simple_structure") self.assertEqual(len(basis), 8) self.assertEqual(basis.get_majority_species()[1], "Al") self.assertEqual(basis.get_spacegroup()['Number'], 225)
def test_to_hdf(self): if sys.version_info[0] >= 3: filename = os.path.join(os.path.dirname(os.path.abspath(__file__)), "../../static/atomistics/test_hdf") abs_filename = os.path.abspath(filename) hdf_obj = FileHDFio(abs_filename) pos, cell = generate_fcc_lattice() basis = Atoms(symbols='Al', positions=pos, cell=cell) basis.set_repeat([2, 2, 2]) basis.to_hdf(hdf_obj, "test_structure") self.assertTrue( np.array_equal(hdf_obj["test_structure/positions"], basis.positions)) basis_new = Atoms().from_hdf(hdf_obj, "test_structure") self.assertEqual(basis, basis_new)
def get_custom_dict(): folder = Path(".").cwd().parts[-1] hdf_file = Path(".").cwd().parents[1] / folder hdf_file = str(hdf_file) + ".h5" if Path(hdf_file).exists(): hdf = FileHDFio(hdf_file) custom_dict = GenericParameters() for k, v in zip( hdf[folder + "/input/custom_dict/data_dict"]["Parameter"], hdf[folder + "/input/custom_dict/data_dict"]["Value"], ): custom_dict[k] = v return custom_dict else: print(hdf_file, "not found") return None
def test_from_hdf(self): filename = os.path.join(os.path.dirname(os.path.abspath(__file__)), "../../static/dft/es_hdf.h5") abs_filename = os.path.abspath(filename) hdf_obj = FileHDFio(abs_filename) es_obj_old = ElectronicStructure() es_obj_old.from_hdf_old(hdf_obj, "es_old") es_obj_new = ElectronicStructure() es_obj_new.from_hdf(hdf=hdf_obj, group_name="es_new") self.assertEqual(es_obj_old.efermi, es_obj_new.efermi) self.assertEqual(es_obj_old.is_metal, es_obj_new.is_metal) self.assertEqual(es_obj_old.vbm, es_obj_new.vbm) self.assertEqual(es_obj_old.cbm, es_obj_new.cbm) self.assertTrue( np.array_equal(es_obj_new.grand_dos_matrix, es_obj_old.grand_dos_matrix))
def create_table(self, enforce_update=False, level=3, file=None, job_status_list=None): skip_table_update = False filter_funct = self.filter_function if job_status_list is None: job_status_list = ["finished"] if self._is_file(): if file is None: file = FileHDFio( file_name=self._project.path + self.name + ".h5", h5_path="/" ) temp_user_function_dict, temp_system_function_dict = self._get_data_from_hdf5( hdf=file ) job_stored_ids = self._get_job_ids() job_update_lst = [ self._project.inspect(job_id) for job_id in self._project.get_job_ids() if job_id not in job_stored_ids ] job_update_lst = [ job for job in job_update_lst if job.status in job_status_list and filter_funct(job) ] keys_update_user_lst = [ key for key in self.add._user_function_dict.keys() if key not in temp_user_function_dict.keys() ] keys_update_system_lst = [ k for k, v in self.add._system_function_dict.items() if v and not temp_system_function_dict[k] ] if ( len(job_update_lst) == 0 and len(keys_update_user_lst) == 0 and keys_update_system_lst == 0 and not enforce_update ): skip_table_update = True else: job_update_lst = [ self._project.inspect(job_id) for job_id in self._project.get_job_ids() ] job_update_lst = [ job for job in job_update_lst if job.status in job_status_list and filter_funct(job) ] keys_update_user_lst, keys_update_system_lst = [], [] if not skip_table_update and len(job_update_lst) != 0: df_new_ids = self._iterate_over_job_lst( job_lst=job_update_lst, function_lst=self.add._function_lst, level=level ) else: df_new_ids = pandas.DataFrame({}) if not skip_table_update and ( len(keys_update_user_lst) != 0 or len(keys_update_system_lst) != 0 ): job_update_lst = [ self._project.inspect(job_id) for job_id in self._get_job_ids() ] job_update_lst = [ job for job in job_update_lst if job is not None and job.status in job_status_list and filter_funct(job) ] function_lst = [ v for k, v in self.add._user_function_dict.items() if k in keys_update_system_lst ] + [ funct for funct in self.add._system_function_lst if funct.__name__ in keys_update_system_lst ] df_new_keys = self._iterate_over_job_lst( job_lst=job_update_lst, function_lst=function_lst, level=level ) else: df_new_keys = pandas.DataFrame({}) if len(self._df) > 0 and len(df_new_keys) > 0: self._df = pandas.concat( [self._df, df_new_keys], axis=1, sort=False ).reset_index(drop=True) if len(self._df) > 0 and len(df_new_ids) > 0: self._df = pandas.concat([self._df, df_new_ids], sort=False).reset_index( drop=True ) elif len(df_new_ids) > 0: self._df = df_new_ids
def from_hdf(self): file = FileHDFio(file_name=self._project.path + self.name + ".h5", h5_path="/") self.add._from_hdf(file)
def from_hdf(self): file = FileHDFio(file_name=self._project.path + self.name + '.h5', h5_path='/') self.add._from_hdf(file)