def __init__(self, session_name, cache_override=False, cache_location=".", verbose=False): self.session_name = session_name self.variables = read_OctMI_session(session_name, verbose=verbose) self.time = self.variables.pop("t") self.cachestorename = (os.path.join( os.path.realpath(cache_location), "cache", os.path.basename(self.session_name), ) + ".hdf5") if cache_override: self.cachemode = "w" else: self.cachemode = "r+" try: self.cachestore = h5py.File(self.cachestorename, self.cachemode) if verbose: cprint.yellow("*** Cache store found at " + self.cachestorename) self.has_cachestore = True except IOError: self.has_cachestore = False pass
def __init__( self, session_name, cache_override=False, cache_location=".", verbose=True ): super(SavedSession, self).__init__(session_name) self.store = h5py.File(self.storename, "r") try: self.dset_time = self.store["time"] except KeyError: print("The file '" + self.storename + "' is not a pymanip session file.") raise RuntimeError("Wrong hdf5 data") self.grp_variables = self.store["variables"] self.verbose = verbose try: self.parameters = self.store.attrs self.parameters_defined = True except Exception: self.parameters_defined = False pass try: self.grp_datasets = self.store["datasets"] self.grp_datasets_defined = True except Exception: self.grp_datasets_defined = False pass self.opened = True if verbose: print("Loading saved session from file", self.storename) total_size = self.dset_time.len() if total_size > 0: start_t = self.dset_time[0] end_t = self.dset_time[total_size - 1] start_string = time.strftime(dateformat, time.localtime(start_t)) end_string = time.strftime(dateformat, time.localtime(end_t)) if verbose: cprint.blue("*** Start date: " + start_string) cprint.blue("*** End date: " + end_string) elif not self.grp_datasets_defined: if verbose: cprint.red("No logged variables") if self.grp_datasets_defined: timestamp_string = time.strftime( dateformat, time.localtime(self.grp_datasets.attrs["timestamp"]) ) if verbose: cprint.blue("*** Acquisition timestamp " + timestamp_string) self.cachestorename = os.path.join( os.path.realpath(cache_location), "cache", os.path.basename(self.storename) ) if cache_override: self.cachemode = "w" else: self.cachemode = "r+" try: self.cachestore = h5py.File(self.cachestorename, self.cachemode) if verbose: cprint.yellow("*** Cache store found at " + self.cachestorename) self.has_cachestore = True except IOError: self.has_cachestore = False pass
def cache(self, name, dict_caller=None): if dict_caller is None: stack = inspect.stack() try: dict_caller = stack[1][0].f_locals finally: del stack if not isinstance(name, str): for var in name: self.cache(var, dict_caller) return if not self.has_cachestore: try: os.mkdir(os.path.dirname(self.cachestorename)) except OSError: pass try: self.cachestore = h5py.File(self.cachestorename, "w") self.has_cachestore = True cprint.yellow("*** Cache store created at " + self.cachestorename) except IOError as ioe: self.has_cachestore = False cprint.red("Cannot create cache store") cprint.red(ioe.message) if self.has_cachestore: cprint.yellow("Saving " + name + " in cache") try: new_length = len(dict_caller[name]) except TypeError: new_length = 1 pass if name in self.cachestore.keys(): if len(self.cachestore[name]) != new_length: self.cachestore[name].resize((new_length, )) self.cachestore[name][:] = dict_caller[name] else: if new_length > 1: self.cachestore.create_dataset(name, chunks=True, maxshape=(None, ), data=dict_caller[name]) else: self.cachestore.create_dataset(name, chunks=True, maxshape=(None, ), shape=(new_length, )) self.cachestore[name][:] = dict_caller[name]
def cachedvalue(self, varname): if self.has_cachestore: if self.verbose: cprint.yellow("Retrieving " + varname + " from cache") content = self.cachestore[varname] if hasattr(content, "value"): return content[()] else: result = list() i = 0 while True: try: result.append(content[str(i)][()]) i = i + 1 except KeyError: break return np.array(result) else: raise KeyError
def cachedvalue(self, varname): if self.has_cachestore: cprint.yellow("Retriving " + varname + " from cache") return self.cachestore[varname].value else: return None
def cache(self, name, dict_caller=None): if dict_caller is None: stack = inspect.stack() try: dict_caller = stack[1][0].f_locals finally: del stack if not isinstance(name, str): for var in name: self.cache(var, dict_caller) return if not self.has_cachestore: try: os.mkdir(os.path.dirname(self.cachestorename)) except OSError: pass try: self.cachestore = h5py.File(self.cachestorename, "w") self.has_cachestore = True cprint.yellow("*** Cache store created at " + self.cachestorename) except IOError as ioe: self.has_cachestore = False cprint.red("Cannot create cache store") cprint.red(ioe.message) if self.has_cachestore: cprint.yellow("Saving " + name + " in cache") if isinstance(dict_caller[name], list): # Sauvegarde d'une liste d'objets # Dans ce cas, on crée un groupe du nom "name" et on récurse # Si le groupe existe déjà, on le supprime et on le recrée try: del self.cachestore[name] except KeyError: pass grp = self.cachestore.create_group(name) for itemnum, item in enumerate(dict_caller[name]): if not isinstance(item, (int, float)): grp.create_dataset( str(itemnum), chunks=True, data=item, compression="gzip" ) else: grp.create_dataset(str(itemnum), data=item) else: try: new_length = len(dict_caller[name]) except TypeError: new_length = 1 pass if name in self.cachestore.keys(): if len(self.cachestore[name]) != new_length: self.cachestore[name].resize((new_length,)) self.cachestore[name][:] = dict_caller[name] else: if new_length > 1: self.cachestore.create_dataset( name, chunks=True, maxshape=(None,), data=dict_caller[name] ) else: self.cachestore.create_dataset( name, chunks=True, maxshape=(None,), shape=(new_length,) ) self.cachestore[name][:] = dict_caller[name]