def save_dataset(self, data_name, dict_caller=None): if dict_caller is None: stack = inspect.stack() try: dict_caller = stack[1][0].f_locals finally: del stack if not self.grp_datasets_defined: self.grp_datasets = self.store.create_group("datasets") self.grp_datasets_defined = True if data_name not in self.grp_datasets.keys(): self.grp_datasets.attrs["timestamp"] = time.time() self.grp_datasets.create_dataset( data_name, chunks=True, maxshape=(None,), data=dict_caller[data_name] ) elif self.allow_override_datasets: new_length = len(dict_caller[data_name]) if len(self.grp_datasets[data_name]) != new_length: self.grp_datasets[data_name].resize((new_length,)) self.grp_datasets[data_name][:] = dict_caller[data_name] self.grp_datasets.attrs["timestamp"] = time.time() cprint.red("Warning: overriding existing dataset") else: raise NameError( "Dataset is already defined. Use allow_override_datasets to allow override of existing saved datasets." )
def log_addline(self, timestamp=None, dict_caller=None): if not dict_caller: stack = inspect.stack() try: dict_caller = stack[1][0].f_locals finally: del stack newsize = self.dset_time.len() + 1 self.dset_time.resize((newsize,)) if not timestamp: timestamp = time.time() self.dset_time[newsize - 1] = timestamp self.datfile.write("%f" % self.dset_time[newsize - 1]) for varname in self.grp_variables.keys(): d = self.grp_variables[varname] d.resize((newsize,)) try: d[newsize - 1] = dict_caller[varname] self.datfile.write(" %f" % dict_caller[varname]) except Exception: cprint.red("Variable is not defined: " + varname) d[newsize - 1] = 0.0 self.datfile.write("\n") self.datfile.flush() self.store.flush()
def __init__( self, session_name, cache_override=False, cache_location=".", verbose=True ): super(SavedSession, self).__init__(session_name) self.store = h5py.File(self.storename, "r") try: self.dset_time = self.store["time"] except KeyError: print("The file '" + self.storename + "' is not a pymanip session file.") raise RuntimeError("Wrong hdf5 data") self.grp_variables = self.store["variables"] self.verbose = verbose try: self.parameters = self.store.attrs self.parameters_defined = True except Exception: self.parameters_defined = False pass try: self.grp_datasets = self.store["datasets"] self.grp_datasets_defined = True except Exception: self.grp_datasets_defined = False pass self.opened = True if verbose: print("Loading saved session from file", self.storename) total_size = self.dset_time.len() if total_size > 0: start_t = self.dset_time[0] end_t = self.dset_time[total_size - 1] start_string = time.strftime(dateformat, time.localtime(start_t)) end_string = time.strftime(dateformat, time.localtime(end_t)) if verbose: cprint.blue("*** Start date: " + start_string) cprint.blue("*** End date: " + end_string) elif not self.grp_datasets_defined: if verbose: cprint.red("No logged variables") if self.grp_datasets_defined: timestamp_string = time.strftime( dateformat, time.localtime(self.grp_datasets.attrs["timestamp"]) ) if verbose: cprint.blue("*** Acquisition timestamp " + timestamp_string) self.cachestorename = os.path.join( os.path.realpath(cache_location), "cache", os.path.basename(self.storename) ) if cache_override: self.cachemode = "w" else: self.cachemode = "r+" try: self.cachestore = h5py.File(self.cachestorename, self.cachemode) if verbose: cprint.yellow("*** Cache store found at " + self.cachestorename) self.has_cachestore = True except IOError: self.has_cachestore = False pass
def log(self, varname): if varname in ["Time", "time", "t"]: return self.time elif varname == "?": print(self.log_variable_list()) elif varname in self.variables: return self.variables[varname] else: cprint.red("Variable is not defined: " + varname)
def log(self, varname): if self.opened: if varname == "Time" or varname == "time" or varname == "t": return self.dset_time[()] elif varname == "?": print("List of saved variables:") for var in self.grp_variables.keys(): print(var) elif varname in self.grp_variables.keys(): return self.grp_variables[varname][()] else: cprint.red("Variable is not defined: " + varname) else: cprint.red("Session is not opened")
def cache(self, name, dict_caller=None): if dict_caller is None: stack = inspect.stack() try: dict_caller = stack[1][0].f_locals finally: del stack if not isinstance(name, str): for var in name: self.cache(var, dict_caller) return if not self.has_cachestore: try: os.mkdir(os.path.dirname(self.cachestorename)) except OSError: pass try: self.cachestore = h5py.File(self.cachestorename, "w") self.has_cachestore = True cprint.yellow("*** Cache store created at " + self.cachestorename) except IOError as ioe: self.has_cachestore = False cprint.red("Cannot create cache store") cprint.red(ioe.message) if self.has_cachestore: cprint.yellow("Saving " + name + " in cache") try: new_length = len(dict_caller[name]) except TypeError: new_length = 1 pass if name in self.cachestore.keys(): if len(self.cachestore[name]) != new_length: self.cachestore[name].resize((new_length, )) self.cachestore[name][:] = dict_caller[name] else: if new_length > 1: self.cachestore.create_dataset(name, chunks=True, maxshape=(None, ), data=dict_caller[name]) else: self.cachestore.create_dataset(name, chunks=True, maxshape=(None, ), shape=(new_length, )) self.cachestore[name][:] = dict_caller[name]
def read_OctMI_session(sessionName, verbose=True, veryVerbose=False): filename = sessionName + "_MIstate.octave" if verbose: print("Loading saved MI session from file `" + filename + "'") MI_session = read_octave_binary(filename, veryVerbose)["MI_session"] Variables = dict() Variables["startTime"] = MI_session["startTime"] for varname in MI_session["Variables"]: keyname = varname + "_array" Variables[varname] = MI_session[keyname] time_fmt = "%A %e %B %Y - %H:%M:%S" if "t_array" in MI_session: Variables["t"] = MI_session["t_array"] try: Nelem = len(Variables["t"]) except Exception: Nelem = 1 pass if Nelem > 1: lt_start = localtime(Variables["t"][0]) lt_end = localtime(Variables["t"][Nelem - 1]) elif Nelem == 1: lt_start = localtime(Variables["t"]) lt_end = lt_start else: lt_start = localtime(Variables["startTime"]) lt_end = lt_start if verbose: string = strftime(time_fmt, lt_start) cprint.blue("** Start date: " + string) if Nelem > 0: string = strftime(time_fmt, lt_end) cprint.blue("** End date: " + string) else: cprint.red("No logged variables") return Variables
def read_analog( scope_name, channelList="0", volt_range=10.0, samples_per_chan=100, sample_rate=1000.0, coupling_type="DC", ): """This function reads signal from a digital oscillosope. :param scope_name: name of the NI-Scope device (e.g. 'Dev3') :type scope_channel: str :param channelList: comma-separated string of channel number (e.g. "0") :type channelList: str :param volt_range: voltage range :type volt_range: float :param samples_per_chan: number of samples to read per channel :type samples_per_chan: int :param sample_rate: for 5922 60e6/n avec n entre 4 et 1200 :type sample_rate: float :param coupling_type: 'DC', 'AC', 'GND' :type coupling_type: str """ # Make sure scalars have the correct type, as it will otherwise # fail to convert to the corresponding Vi types samples_per_chan = int(samples_per_chan) sample_rate = float(sample_rate) channelList = str(channelList) numChannels = len(channelList.split(",")) if isinstance(volt_range, Iterable): volt_range = [float(v) for v in volt_range] else: volt_range = float(volt_range) if numChannels > 1: volt_range = [volt_range] * numChannels scope = Scope(scope_name) print("Scope:", scope_name) scope.ConfigureHorizontalTiming(sampleRate=sample_rate, numPts=samples_per_chan) scope.NumRecords = 1 if numChannels == 1: scope.ConfigureVertical(channelList=channelList, voltageRange=volt_range) else: for chan, v in zip(channelList.split(","), volt_range): print("Scope: setting chan {:s} voltage range to {:}.".format( chan, v)) scope.ConfigureVertical(channelList=chan, voltageRange=v) actualRange = scope.ActualVoltageRange(chan) if actualRange != volt_range: print("Scope: actual range for chan {:s} is {:}".format( chan, actualRange)) scope.ConfigureTrigger("Immediate") sampling = scope.ActualSamplingRate length = scope.ActualRecordLength print_horodateur(length, sampling) if sampling != sample_rate: cprint.red( "Warning: sampling frequency changed to {:} Hz.".format(sampling)) if length != samples_per_chan: cprint.red( "Warning: record length changed to {:d} points.".format(length)) if numChannels == 1: vRange = scope.ActualVoltageRange(channelList) if vRange != volt_range: cprint.red( "Warning: actual voltage range is {:} V.".format(vRange)) else: for chan, v in zip(channelList.split(","), volt_range): vv = scope.ActualVoltageRange(chan) if vv != v: cprint.red( "Warning: actual range for channel {:s} is {:} V.".format( chan, vv)) scope.InitiateAcquisition() duration = samples_per_chan / sampling MI.sleep(duration) data = scope.Fetch(channelList, timeout=duration) (l, c) = data.shape print("Scope: {:d} samples read.".format(l)) scope.close() return tuple(data[:, i] for i in range(numChannels))
def read_analog( resource_names, terminal_config, volt_min=None, volt_max=None, samples_per_chan=1, sample_rate=1, coupling_types="DC", output_filename=None, verbose=True, ): """This function reads signal from analog input. :param resources_names: names from MAX (Dev1/ai0) :type resource_names: str, or list :param terminal_config: "Diff", "RSE", "NRSE" :type terminal_config: str, or list :param volt_min: minimum voltage :type volt_min: float, or list, optional :param volt_max: maximum voltage :type volt_max: float, or list, optional :param samples_per_chan: Number of samples to be read per channel :type samples_per_chan: int :param sample_rate: Clock frequency :type sample_rate: float :param coupling_type: Coupling of the channels ("DC", "AC", "GND") :type coupling_type: str, or list :param output_filename: If not None, file to write the acquired data :type output_filename: str, optional :param verbose: Verbosity level. Defaults to True (unlike in Fluidlab) :type verbose: bool, optional If the channel range is not specified, a 5.0 seconds samples will first be acquired to determine appropriate channel range (autoset feature). """ # Les type checks ci-dessous ne sont pas très pythoniques # mais nécessaire parce que PyDAQmx est une passerelle vers le C # et il y a des sous-entendus de type. # Ensure that samples_per_chan is integer if not isinstance(samples_per_chan, int): samples_per_chan = int(samples_per_chan) # Ensure resource_names is str or list of str if isinstance(resource_names, str): num_channels = 1 resource_names = str(resource_names) else: num_channels = len(resource_names) resource_names = [str(r) for r in resource_names] # If no range is provided, take a 5s sample if volt_min is None or volt_max is None: print("Sampling 5s data to determine channel range") if num_channels == 1: volt_min = -10.0 volt_max = 10.0 else: volt_min = [-10.0] * num_channels volt_max = [10.0] * num_channels data = daqmx.read_analog( resource_names, terminal_config, volt_min, volt_max, samples_per_chan=50000, sample_rate=10e3, coupling_types=coupling_types, verbose=False, ) if num_channels == 1: volt_range = np.max(np.abs(data)) * 1.25 volt_min = -volt_range volt_max = volt_range else: for chan in range(num_channels): volt_range = np.max(np.abs(data[chan])) * 1.25 volt_min[chan] = -volt_range volt_max[chan] = volt_range print("Channel", chan, "min max:", np.min(data[chan]), np.max(data[chan])) # Run fluidlab daqmx.read_analog with verbose=True by default data = daqmx.read_analog( resource_names, terminal_config, volt_min, volt_max, samples_per_chan, sample_rate, coupling_types, output_filename, verbose, ) # If verbose, check that voltage range has not been reached and issue a warning otherwise if verbose: if num_channels == 1: channel_range = np.max([np.abs(volt_min), np.abs(volt_max)]) if np.max(np.abs(data)) >= channel_range: cprint.red("WARNING: channel range too small!") else: for chan in range(num_channels): try: channel_range = np.max( [np.abs(volt_min[chan]), np.abs(volt_max[chan])]) except TypeError: channel_range = np.max( [np.abs(volt_min), np.abs(volt_max)]) pass if np.max(np.abs(data[chan])) >= channel_range: cprint.red( "WARNING: channel range is too small for channel " + resource_names[chan]) return data
def cache(self, name, dict_caller=None): if dict_caller is None: stack = inspect.stack() try: dict_caller = stack[1][0].f_locals finally: del stack if not isinstance(name, str): for var in name: self.cache(var, dict_caller) return if not self.has_cachestore: try: os.mkdir(os.path.dirname(self.cachestorename)) except OSError: pass try: self.cachestore = h5py.File(self.cachestorename, "w") self.has_cachestore = True cprint.yellow("*** Cache store created at " + self.cachestorename) except IOError as ioe: self.has_cachestore = False cprint.red("Cannot create cache store") cprint.red(ioe.message) if self.has_cachestore: cprint.yellow("Saving " + name + " in cache") if isinstance(dict_caller[name], list): # Sauvegarde d'une liste d'objets # Dans ce cas, on crée un groupe du nom "name" et on récurse # Si le groupe existe déjà, on le supprime et on le recrée try: del self.cachestore[name] except KeyError: pass grp = self.cachestore.create_group(name) for itemnum, item in enumerate(dict_caller[name]): if not isinstance(item, (int, float)): grp.create_dataset( str(itemnum), chunks=True, data=item, compression="gzip" ) else: grp.create_dataset(str(itemnum), data=item) else: try: new_length = len(dict_caller[name]) except TypeError: new_length = 1 pass if name in self.cachestore.keys(): if len(self.cachestore[name]) != new_length: self.cachestore[name].resize((new_length,)) self.cachestore[name][:] = dict_caller[name] else: if new_length > 1: self.cachestore.create_dataset( name, chunks=True, maxshape=(None,), data=dict_caller[name] ) else: self.cachestore.create_dataset( name, chunks=True, maxshape=(None,), shape=(new_length,) ) self.cachestore[name][:] = dict_caller[name]
def log_plot(self, fignum, varlist, maxvalues=1000, plotfunc=plt.plot): if self.opened: plt.figure(fignum) plt.clf() plt.ion() plt.show() t = self.log("t") if len(t) > maxvalues: debut = len(t) - 1000 fin = len(t) else: debut = 0 fin = len(t) if t[debut] > self.session_opening_time: # tous les points sont nouveaux olddebut = None oldfin = None newdebut = debut newfin = fin elif t[-1] > self.session_opening_time: # certains points sont nouveaux bb = t > self.session_opening_time olddebut = debut oldfin = np.min(bb.argmax()) newdebut = oldfin newfin = fin else: # les points sont tous anciens olddebut = debut oldfin = fin newdebut = None newfin = None if t[-1] - t[0] < 3600: t = (t - t[0]) / 60.0 xlab = "t [min]" else: t = (t - t[0]) / 3600.0 xlab = "t [h]" # print(olddebut, oldfin, newdebut, newfin) if isinstance(varlist, str): lab = varlist col = (0, 0, 1) if newdebut is not None: plt.plot( t[newdebut:newfin], self.log(varlist)[newdebut:newfin], "o-", color=col, mec=col, mfc=col, label=lab, ) lab = None if olddebut is not None: plt.plot( t[olddebut:oldfin], self.log(varlist)[olddebut:oldfin], "o-", mfc="none", mec=col, color=col, label=lab, ) else: for var, coul in zip(varlist, ColorGenerator()): lab = var if newdebut is not None: plotfunc( t[newdebut:newfin], self.log(var)[newdebut:newfin], "o-", mfc=coul, mec=coul, color=coul, label=lab, ) lab = None if olddebut is not None: plotfunc( t[olddebut:oldfin], self.log(var)[olddebut:oldfin], "o-", mfc="none", mec=coul, color=coul, label=lab, ) plt.xlabel(xlab) plt.legend(loc="upper left") plt.draw() with warnings.catch_warnings(): warnings.simplefilter("ignore") plt.pause(0.0001) else: cprint.red("Session is not opened")