def _hdf_extract(self,filename,key=None): if key is None: key = self.__class__.__name__ if self._legacy_mode(filename,key): key_split = key.split("/") parent_key = os.path.join(*key_split[:-1]) hdf_obj = cd.hdfHandler(filename,mode='r',key=parent_key) msg = "Legacy mode used for coorddata extraction" if 'CoordDF' in hdf_obj.keys(): self.coord_centered = cd.coordstruct.from_hdf(filename,key=parent_key+"/CoordDF") self._domain_handler = DomainHandler(1) self.coord_staggered = self._create_staggered_legacy(self.coord_centered) elif 'coordDF' in hdf_obj.keys(): self.coord_centered = cd.coordstruct.from_hdf(filename,key=parent_key+"/coordDF") self._domain_handler = DomainHandler(1) self.coord_staggered = self._create_staggered_legacy(self.coord_centered) else: msg = ("Either the wrong part of the HDF structure" " has been accessed or legacy mode not handled." " For information the following keys were " f"available {list(hdf_obj.keys())}.") raise ValueError(msg) warnings.warn(msg) else: hdf_obj = cd.hdfHandler(filename,mode='r',key=key) hdf_obj.check_type_id(self.__class__) iCase = 1 if hdf_obj.attrs['cart_mode'] else 2 self._domain_handler = DomainHandler(iCase) self.coord_centered = cd.coordstruct.from_hdf(filename,key=key+"/coord_centered") self.coord_staggered = cd.coordstruct.from_hdf(filename,key=key+"/coord_staggered")
def _extract_moving_wall(self, file_name, key=None): if key is None: key = 'CHAPSim_meta' hdf_obj = cd.hdfHandler(file_name, 'r', key=key) moving_wall = hdf_obj["moving_wall"][:] return moving_wall
def save_hdf(self, file_name, write_mode, key=None): if key is None: key = 'CHAPSim_meta' super().save_hdf(file_name, write_mode, key) hdf_obj = cd.hdfHandler(file_name, 'a', key=key) hdf_obj.create_dataset("moving_wall", data=self.__moving_wall)
def save_hdf(self, file_name, write_mode, key): if key is None: key = self.__class__.__name__ super().save_hdf(file_name, write_mode, key=key) hdf_obj = cd.hdfHandler(file_name, 'a', key) hdf_obj.attrs["plane"] = self._plane.encode('utf-8')
def _hdf_extract(self, file_name, key=None): if key is None: key = self.__class__.__name__ super()._hdf_extract(file_name, key) hdf_obj = cd.hdfHandler(file_name, 'r', key=None) self._plane = hdf_obj.attrs['plane'].decode('utf-8')
def _hdf_extract(self,file_name,key=None): if key is None: key = self.__class__.__name__ hdf_obj = cd.hdfHandler(file_name,'r',key=key) hdf_obj.check_type_id(self.__class__) self._POD_coeffs = hdf_obj["POD_coeffs"][:]
def save_hdf(self, file_name, write_mode, key=None): if key is None: key = self.__class__.__name__ hdf_obj = cd.hdfHandler(file_name, write_mode, key=key) hdf_obj.set_type_id(self.__class__) self._meta_data.save_hdf(file_name, 'a', key + '/meta_data') self.fluctDF.to_hdf(file_name, key=key + '/fluctDF', mode='a')
def _hdf_extract(self,file_name,key=None): if key is None: key = 'CHAPSim_Inst' hdf_obj = cd.hdfHandler(file_name,'r',key=key) hdf_obj.check_type_id(self.__class__) self._meta_data = self._module._meta_class.from_hdf(file_name,key+'/meta_data') self.InstDF = cd.flowstruct3D.from_hdf(file_name,coorddata=self._coorddata,key=key+'/InstDF')#pd.read_hdf(file_name,base_name+'/InstDF').data(shape)
def to_hdf(self,filename,mode,key=None): if key is None: key = self.__class__.__name__ self.coord_centered.to_hdf(filename,key=key+"/coord_centered",mode=mode) self.coord_staggered.to_hdf(filename,key=key+"/coord_staggered",mode=mode) hdf_obj = cd.hdfHandler(filename,mode='r',key=key) cart_mode = False if self._domain_handler.is_cylind else True hdf_obj.attrs['cart_mode'] = cart_mode
def save_hdf(self, file_name, write_mode, key=None): if key is None: key = self.__class__.__name__ hdf_obj = cd.hdfHandler(file_name, write_mode, key=key) hdf_obj.set_type_id(self.__class__) hdf_obj.create_dataset("eig_values", data=self._eig_values) self.POD_modesDF.to_hdf(file_name, key=key + "/POD_modesDF", mode='a') self.avg_data.save_hdf(file_name, 'a', key + "/avg_data")
def save_hdf(self,file_name,write_mode,key=None): if key is None: key = self.__class__.__name__ hdf_obj = cd.hdfHandler(file_name,write_mode,key=key) hdf_obj.set_type_id(self.__class__) hdf_obj.create_dataset("POD_coeffs",data=np.array(self.POD_coeffs)) POD_class = self._POD.__class__.__name__ self._POD.save_hdf(file_name,'a',key+f"/{POD_class}")
def _hdf_extract(self,file_name,key=None): if key is None: key = 'CHAPSim_meta' hdf_obj = cd.hdfHandler(file_name,mode='r',key=key) hdf_obj.check_type_id(self.__class__) self.metaDF = cd.metastruct.from_hdf(file_name,key=key+'/metaDF')#pd.read_hdf(file_name,key=base_name+'/metaDF') self._coorddata = coorddata.from_hdf(file_name,key=key+"/coorddata")
def save_hdf(self,file_name,write_mode,key=None): if key is None: key = self.__class__.__name__ hdf_obj = cd.hdfHandler(file_name,write_mode,key=key) hdf_obj.set_type_id(self.__class__) # group.attrs["path_to_folder"] = self.path_to_folder.encode('utf-8') # group.attrs["abs_path"] = int(self._abs_path) # group.create_dataset("NCL",data=self.NCL) self.metaDF.to_hdf(file_name,key=key+'/metaDF',mode='a') self._coorddata.to_hdf(file_name,key=key+'/coorddata',mode='a')
def _hdf_extract(self, file_name, key=None): if key is None: key = self.__class__.__name__ #'CHAPSim_Quad_Anal' try: hdf_obj = cd.hdfHandler(file_name, 'r', key=key) except KeyError: msg = f"Using legacy default key for class {key}" warnings.warn(msg) key = 'CHAPSim_Quad_Anal' hdf_obj = cd.hdfHandler(file_name, 'r', key=key) hdf_obj.check_type_id(self.__class__) self._meta_data = self._module.CHAPSim_meta.from_hdf( file_name, key + '/meta_data') self._avg_data = self._module._avg_io_class.from_hdf( file_name, key + '/avg_data') self.QuadAnalDF = cd.datastruct.from_hdf( file_name, key=key + '/QuadAnalDF' ) #pd.read_hdf(file_name,key=base_name+'/autocorrDF').data([shape_x,shape_z])
def _hdf_extract(self, file_name, key=None): if key is None: key = "POD" hdf_obj = cd.hdfHandler(file_name, 'r', key=key) hdf_obj.check_type_id(self.__class__) self._eig_values = hdf_obj["/eig_values"][:] self.POD_modesDF = cd.datastruct.from_hdf(file_name, key=key + '/POD_modesDF') self.avg_data = self._module._avg_class.from_hdf(file_name, key=key + "/avg_data") self._meta_data = self.avg_data._meta_data
def save_hdf(self, file_name, write_mode, key=None): if key is None: key = self.__class__.__name__ hdf_obj = cd.hdfHandler(file_name, write_mode, key=key) hdf_obj.set_type_id(self.__class__) # hdf_obj.attrs['shape_x'] = np.array(self.shape_x) # hdf_obj.attrs['shape_z'] = np.array(self.shape_z) hdf_obj.attrs['comp'] = np.array([np.string_(x) for x in self.comp]) self._meta_data.save_hdf(file_name, 'a', key + '/meta_data') self._avg_data.save_hdf(file_name, 'a', key + '/avg_data') self.autocorrDF.to_hdf(file_name, key=key + '/autocorrDF', mode='a') #,format='fixed',data_columns=True)
def _legacy_mode(self,filename, key): key_split = key.split("/") parent_key = os.path.join(*key_split[:-1]) end_key = key_split[-1] hdf_obj = cd.hdfHandler(filename,mode='r',key=parent_key) lower_keys = ["coord_centered","coord_staggered"] if not hdf_obj.check_key(end_key): return True lower_keys_present = all([lkey in hdf_obj[end_key].keys() for lkey in lower_keys]) if not lower_keys_present: return True return False
def _hdf_extract(self, file_name, key=None): if key is None: key = 'CHAPSim_autocov_tg' hdf_obj = cd.hdfHandler(file_name, 'r', key=key) hdf_obj.check_type_id(self.__class__) # self.shape_x = tuple(hdf_obj.attrs["shape_x"][:]) # self.shape_z = tuple(hdf_obj.attrs["shape_z"][:]) self.comp = tuple(np.char.decode(hdf_obj.attrs["comp"][:])) self.autocorrDF = cd.datastruct.from_hdf( file_name, key=key + '/autocorrDF' ) #pd.read_hdf(file_name,key=base_name+'/autocorrDF').data([shape_x,shape_z]) self._meta_data = self._module._meta_class.from_hdf( file_name, key + '/meta_data') self._avg_data = self._module._avg_tg_base_class.from_hdf( file_name, key + '/avg_data')
def save_hdf(self, file_name, mode='a', key=None): if key is None: key = self.__class__.__name__ hdf_obj = cd.hdfHandler(file_name, mode, key=key) hdf_obj.set_type_id(self.__class__) hdf_obj.attrs["y_mode"] = self._y_mode.encode('utf-8') hdf_obj.create_dataset("x_loc_norm", data=self._x_loc_norm) self.pdf_arrayDF.to_hdf(file_name, key=key + '/pdf_arrayDF', mode='a') #,format='fixed',data_columns=True) self.u_arrayDF.to_hdf(file_name, key=key + '/u_arrayDF', mode='a') #,format='fixed',data_columns=True) self.v_arrayDF.to_hdf(file_name, key=key + '/v_arrayDF', mode='a') #,format='fixed',data_columns=True) self._meta_data.save_hdf(file_name, 'a', key + "/meta_data") self.avg_data.save_hdf(file_name, 'a', key + "/avg_data")
def _hdf_extract(self, file_name, key=None): if key is None: key = 'CHAPSim_joint_PDF_io' hdf_obj = cd.hdfHandler(file_name, 'r', key=key) hdf_obj.check_type_id(self.__class__) self._y_mode = hdf_obj.attrs['y_mode'].decode('utf-8') self._x_loc_norm = hdf_obj['x_loc_norm'][:] self._meta_data = self._module._meta_class.from_hdf( file_name, key + '/meta_data') self.avg_data = self._module.CHAPSim_AVG_io.from_hdf( file_name, key + '/avg_data') self.pdf_arrayDF = cd.datastruct.from_hdf(file_name, key=key + '/pdf_arrayDF') self.u_arrayDF = cd.datastruct.from_hdf(file_name, key=key + '/u_arrayDF') self.v_arrayDF = cd.datastruct.from_hdf(file_name, key=key + '/v_arrayDF')
def save_hdf(self,file_name,write_mode,key=None): """ Saves the instance of the class to hdf5 file Parameters ---------- file_name : str File path to existing hdf5 file write_mode : str The write mode for example append "a" or "w" see documentation for h5py.File key : str, optional path-like, hdf5 key to access the data within the file, by default None (class name) """ if key is None: key = self.__class__.__name__ hdf_obj = cd.hdfHandler(file_name,write_mode,key=key) hdf_obj.set_type_id(self.__class__) self._meta_data.save_hdf(file_name,'a',key=key+'/meta_data') self.InstDF.to_hdf(file_name,key=key+'/InstDF',mode='a')