def _prepare_measurement_file(self): self._data = hdf.Data(name=self._scan_name) if self._voltage_bias: bias_name = 'Voltage' bias_unit = 'V' measurement_name = 'Current' measurement_unit = 'A' if self._current_bias: bias_name = 'Current' bias_unit = 'A' measurement_name = 'Voltage' measurement_unit = 'V' self._data_bias = self._data.add_coordinate(bias_name, unit=bias_unit) bias_vec = np.append(self._vec_fw, self._vec_bw) self._data_bias.add(bias_vec) if self._measure_IV_1D: if self._sweeps == 1: self._data_measure = self._data.add_value_vector( measurement_name, x=self._hdf_bias, unit=measurement_unit) else: self._data_sweep = self._data.add_coordinate('sweep') self._data_sweep.add([sweep for sweep in self._sweeps]) self._data_measure = self._data.add_value_matrix( measurement_name, x=self._data_sweep, y=self._hdf_bias, unit=measurement_unit) if self._measure_IV_2D: self._data_x = self._dat.add_coordinate(self.x_coordname, unit=self.x_unit) self._data_x.add(self.x_vec) self._data_measure = self._data.add_value_matrix( measurement_name, x=self._x, y=self._hdf_bias, unit=measurement_unit) if self._measure_IV_3D: self._data_x = self._data.add_coordinate(self.x_coordname, unit=self.x_unit) self._data_x.add(self.x_vec) self._data_y = self._data.add_coordinate(self.y_coordname, unit=self.y_unit) self._data_y.add(self.y_vec) self._data_measure = self._data.add_value_box( measurement_name, x=self._data_x, y=self._data_y, z=self._hdf_bias, unit=measurement_unit) if self.comment: self._data.add_comment(self.comment)
def save_errorbar_plot(fname,fvalues,ferrs,x_url,fit_url=None,entryname_coordinate='param',entryname_vector='error_plot',folder='analysis'): ''' Creates an errorbar plot in the h5 file and a respective view, if possible overlayed with fit data. Sample use: from qkit.analysis import dat_reader as dr from qkit.analysis import data_optimizer as do all_dat, fn, urls = dr.load_data('data/sample_data_file.h5',entries=['delay','pulse','amp','pha']) times, amp, ampa, pha, phaa = all_dat t, dmean, stdm = do.optimize(np.array([times,amp,pha]),1,2,show_complex_plot=True) en = 'vac_Rabi_fit_do' dr.fit_data(fn,fit_function=dr.DAMPED_EXP,entryname=en,opt=True) dr.save_errorbar_plot(fn,dmean,stdm,urls[0],fit_url='/entry/analysis0/'+en) inputs: - fname: file name of the h5 file - fvalues: data values - ferrs: errors of the data values - x_url: url of existing data coordinate axis - fit_url: (str) (optional, default: None) url of existing fit data, e.g. u'/entry/analysis0/fit' - entryname_coordinate: (str) (optional, default: 'param') name of parameter coordinate - entryname_vector: (str) (optional, default: 'error_plot') name of fit data vector - folder: (str) (optional, default: 'analysis') folder name for storage of analysis data ouputs: bool - returns True in case job was successful, False if an error occurred ''' try: hf = hdf_lib.Data(path=fname) #create data vector for errorplot with x axis information from x_url ds_x = hf.get_dataset(x_url) hdf_y = hf.add_value_vector(entryname_vector, folder=folder, x = ds_x) hdf_y.append(np.array(fvalues)) #write errors hdf_error = hf.add_value_vector(entryname_vector+'_error',folder=folder) hdf_error.append(np.array(ferrs)) #joint view including fvalues and errors ferrs joint_error_view = hf.add_view('err_view', x = ds_x, y = hdf_y, error = hdf_error) if fit_url is not None: #create joint view with fit data if existing joint_error_view_fit = hf.add_view('err_view_fit', x = ds_x, y = hdf_y, error = hdf_error) #errorplot joint_error_view_fit.add(x = hf.get_dataset('/entry/analysis0/param'), y = hf.get_dataset(fit_url)) #fit hf.close_file() except NameError as m: logging.error('Error while attempting to save error bar data in h5 file: '+str(m)) return False return True
def create_logfile(self): print 'Create new log file for parameter %s.' % self.name self.fname = os.path.join( self.log_path, time.strftime('%m%Y') + '/', self.name.replace(' ', '_') + time.strftime('%d%m%Y%M%S') + '.h5') print self.fname self.hf = hdf_lib.Data(path=self.fname) self.hdf_t = self.hf.add_coordinate('timestamps') self.hdf_v = self.hf.add_value_vector('values', x=self.hdf_t) self.url_timestamps = '/entry/data0/timestamps' self.url_values = '/entry/data0/values' view = self.hf.add_view('data_vs_time', x=self.hdf_t, y=self.hdf_v) #fit
def read_hdf_data(nfile,entries=None, show_output=True): ''' - read hdf data file, store data in 2d numpy array and return - entries (optional): specify entries in h5 file to be read and returned - returns numpy data array - the function is used by the function load_data as part of the general fitter fit_data - currently one can pass a set of keywords (entries) in the form of a string array ['string1',string2',...,'stringn'] with stringi the keywords to be read and returned as numpy array - when no entries are specified, read_hdf_data looks for a frequency axis or a pulse length axis for the numpy array's first axis and also searches amplitude_avg and phase_avg. If not present, regular amplitude and phase data is used - for the near future I have in mind that each data taking script saves a hint in the h5 file which entries to use for quick fitting with the dat_reader ''' try: hf = hdf_lib.Data(path = nfile) except IOError,NameError: print 'Error: No h5 file read.' return
def __init__(self, h5_filepath, comment='', save_pdf=False): self.comment = comment self.save_pdf = save_pdf self.path = h5_filepath filepath = os.path.abspath( self.path) #put filepath to platform standards self.filedir = os.path.dirname( filepath ) #return directory component of the given pathname, here filepath self.image_dir = os.path.join(self.filedir, 'images') try: os.mkdir(self.image_dir) except OSError: logging.warning('Error creating image directory.') pass # open the h5 file and get the hdf_lib object self.hf = hdf_lib.Data(path=self.path) # check for datasets for i, pentry in enumerate(self.hf['/entry'].keys()): key = '/entry/' + pentry for j, centry in enumerate(self.hf[key].keys()): try: self.key = '/entry/' + pentry + "/" + centry self.ds = self.hf[self.key] if self.ds.attrs.get('save_plot', True): self.plt() except Exception as e: print "Exception in qkit/gui/plot/plot.py while plotting" print self.key print e #close hf file self.hf.close() print 'Plots saved in', self.image_dir
def _prepare_measurement_file(self): ''' creates the output .h5-file with distinct dataset structures for each measurement type. at this point all measurement parameters are known and put in the output file ''' self._data_file = hdf.Data(name=self._file_name) self._measurement_object.uuid = self._data_file._uuid self._measurement_object.hdf_relpath = self._data_file._relpath self._measurement_object.instruments = qt.instruments.get_instruments() self._measurement_object.save() self._mo = self._data_file.add_textlist('measurement') self._mo.append(self._measurement_object.get_JSON()) # write logfile and instrument settings self._write_settings_dataset() self._log = waf.open_log_file(self._data_file.get_filepath()) if not self._scan_time: self._data_freq = self._data_file.add_coordinate('frequency', unit='Hz') self._data_freq.add(self._freqpoints) if self._scan_1D: self._data_real = self._data_file.add_value_vector( 'real', x=self._data_freq, unit='', save_timestamp=True) self._data_imag = self._data_file.add_value_vector( 'imag', x=self._data_freq, unit='', save_timestamp=True) self._data_amp = self._data_file.add_value_vector( 'amplitude', x=self._data_freq, unit='arb. unit', save_timestamp=True) self._data_pha = self._data_file.add_value_vector( 'phase', x=self._data_freq, unit='rad', save_timestamp=True) if self._scan_2D: self._data_x = self._data_file.add_coordinate(self.x_coordname, unit=self.x_unit) self._data_x.add(self.x_vec) self._data_amp = self._data_file.add_value_matrix( 'amplitude', x=self._data_x, y=self._data_freq, unit='arb. unit', save_timestamp=True) self._data_pha = self._data_file.add_value_matrix( 'phase', x=self._data_x, y=self._data_freq, unit='rad', save_timestamp=True) if self.log_function != None: #use logging self._log_value = [] for i in range(len(self.log_function)): self._log_value.append( self._data_file.add_value_vector( self.log_name[i], x=self._data_x, unit=self.log_unit[i], dtype=self.log_dtype[i])) if self._nop < 10: """creates view: plot middle point vs x-parameter, for qubit measurements""" self._data_amp_mid = self._data_file.add_value_vector( 'amplitude_midpoint', unit='arb. unit', x=self._data_x, save_timestamp=True) self._data_pha_mid = self._data_file.add_value_vector( 'phase_midpoint', unit='rad', x=self._data_x, save_timestamp=True) #self._view = self._data_file.add_view("amplitude vs. " + self.x_coordname, x = self._data_x, y = self._data_amp[self._nop/2]) if self._scan_3D: self._data_x = self._data_file.add_coordinate(self.x_coordname, unit=self.x_unit) self._data_x.add(self.x_vec) self._data_y = self._data_file.add_coordinate(self.y_coordname, unit=self.y_unit) self._data_y.add(self.y_vec) if self._nop == 0: #saving in a 2D matrix instead of a 3D box HR: does not work yet !!! test things before you put them online. self._data_amp = self._data_file.add_value_matrix( 'amplitude', x=self._data_x, y=self._data_y, unit='arb. unit', save_timestamp=False) self._data_pha = self._data_file.add_value_matrix( 'phase', x=self._data_x, y=self._data_y, unit='rad', save_timestamp=False) else: self._data_amp = self._data_file.add_value_box( 'amplitude', x=self._data_x, y=self._data_y, z=self._data_freq, unit='arb. unit', save_timestamp=False) self._data_pha = self._data_file.add_value_box( 'phase', x=self._data_x, y=self._data_y, z=self._data_freq, unit='rad', save_timestamp=False) if self.log_function != None: #use logging self._log_value = [] for i in range(len(self.log_function)): self._log_value.append( self._data_file.add_value_vector( self.log_name[i], x=self._data_x, unit=self.log_unit[i], dtype=self.log_dtype[i])) if self._scan_time: self._data_freq = self._data_file.add_coordinate('frequency', unit='Hz') self._data_freq.add([self.vna.get_centerfreq()]) self._data_time = self._data_file.add_coordinate('time', unit='s') self._data_time.add( np.arange(0, self._nop, 1) * self.vna.get_sweeptime() / (self._nop - 1)) self._data_x = self._data_file.add_coordinate('trace_number', unit='') self._data_x.add(np.arange(0, self.number_of_timetraces, 1)) self._data_amp = self._data_file.add_value_matrix( 'amplitude', x=self._data_x, y=self._data_time, unit='lin. mag.', save_timestamp=False) self._data_pha = self._data_file.add_value_matrix( 'phase', x=self._data_x, y=self._data_time, unit='rad.', save_timestamp=False) if self.comment: self._data_file.add_comment(self.comment) if self.qviewkit_singleInstance and self.open_qviewkit and self._qvk_process: self._qvk_process.terminate() #terminate an old qviewkit instance
def __init__(self, file_path): self._hf = hdf_lib.Data(path=file_path) self._prepare_datasets()
@author: rotzinger@kit """ import time try: from qkit.storage import hdf_lib as hl except ImportError: import hdf_lib as hl ## for random data from numpy.random import rand from numpy import linspace, arange nop = 101 ## # first create a data object , if path is set to None or is omitted, a new path will be created h5d = hl.Data(name='VNA_tracedata', path="./test2.h5") # comment added to the hdf (internal) folder # options : comment (mandatory) # : folder='data' | 'analysis' (optional, default is "data") h5d.add_comment("New data has been created ....") settings = h5d.add_textlist("settings", comment="my settings") settings.append(u"vnapower = 10dBm") settings.append(u"fridge attenuation=50db\n data jumps like Van Halen.") # string array #add_text() #options: name (mandatory) # : comment = "" (optional) # : folder="data" (optional) # use the append method to add the text
def _prepare_measurement_file(self): if self.dirname == None: self.dirname = self.x_coordname self.ndev = len( readout.get_tone_freq() ) #returns array of readout frequencies (=1 for non-multiplexed readout) self._hdf = hdf.Data(name=self.dirname) self._hdf_x = self._hdf.add_coordinate(self.x_coordname, unit=self.x_unit) self._hdf_x.add(self.x_vec) self._settings = self._hdf.add_textlist('settings') settings = waf.get_instrument_settings(self._hdf.get_filepath()) self._settings.append(settings) self._log = waf.open_log_file(self._hdf.get_filepath()) self._hdf_readout_frequencies = self._hdf.add_value_vector( self.multiplex_attribute, unit=self.multiplex_unit) self._hdf_readout_frequencies.append(readout.get_tone_freq()) if self.ReadoutTrace: self._hdf_TimeTraceAxis = self._hdf.add_coordinate( 'recorded timepoint', unit='s') self._hdf_TimeTraceAxis.add( np.arange(mspec.get_samples()) / readout.get_adc_clock()) if self.mode == 1: #1D self._hdf_amp = [] self._hdf_pha = [] for i in range(self.ndev): self._hdf_amp.append( self._hdf.add_value_vector('amplitude_%i' % i, x=self._hdf_x, unit='V')) self._hdf_pha.append( self._hdf.add_value_vector('phase_%i' % i, x=self._hdf_x, unit='rad')) if self.ReadoutTrace: self._hdf_I = self._hdf.add_value_matrix( 'I_TimeTrace', x=self._hdf_x, y=self._hdf_TimeTraceAxis, unit='V', save_timestamp=False) self._hdf_Q = self._hdf.add_value_matrix( 'Q_TimeTrace', x=self._hdf_x, y=self._hdf_TimeTraceAxis, unit='V', save_timestamp=False) elif self.mode == 2: #2D self._hdf_y = self._hdf.add_coordinate(self.y_coordname, unit=self.y_unit) self._hdf_y.add(self.y_vec) self._hdf_amp = [] self._hdf_pha = [] for i in range(self.ndev): self._hdf_amp.append( self._hdf.add_value_matrix('amplitude_%i' % i, x=self._hdf_x, y=self._hdf_y, unit='V')) self._hdf_pha.append( self._hdf.add_value_matrix('phase_%i' % i, x=self._hdf_x, y=self._hdf_y, unit='rad')) elif self.mode == 3: #1D_AWG/2D_AWG self._hdf_y = self._hdf.add_coordinate(self.y_coordname, unit=self.y_unit) self._hdf_y.add(self.y_vec) self._hdf_amp = [] self._hdf_pha = [] for i in range(self.ndev): self._hdf_amp.append( self._hdf.add_value_matrix('amplitude_%i' % i, x=self._hdf_y, y=self._hdf_x, unit='V')) self._hdf_pha.append( self._hdf.add_value_matrix('phase_%i' % i, x=self._hdf_y, y=self._hdf_x, unit='rad')) if self.ReadoutTrace: self._hdf_I = self._hdf.add_value_box( 'I_TimeTrace', x=self._hdf_y, y=self._hdf_x, z=self._hdf_TimeTraceAxis, unit='V', save_timestamp=False) self._hdf_Q = self._hdf.add_value_box( 'Q_TimeTrace', x=self._hdf_y, y=self._hdf_x, z=self._hdf_TimeTraceAxis, unit='V', save_timestamp=False) if self.create_averaged_data: self._hdf_amp_avg = [] self._hdf_pha_avg = [] for i in range(self.ndev): self._hdf_amp_avg.append( self._hdf.add_value_vector('amplitude_avg_%i' % i, x=self._hdf_x, unit='V')) self._hdf_pha_avg.append( self._hdf.add_value_vector('phase_avg_%i' % i, x=self._hdf_x, unit='rad')) if self.comment: self._hdf.add_comment(self.comment) if self.qviewkit_singleInstance and self.open_qviewkit and self._qvk_process: self._qvk_process.terminate() #terminate an old qviewkit instance if self.open_qviewkit: self._qvk_process = qviewkit.plot(self._hdf.get_filepath(), datasets=['amplitude', 'phase'])
def _save_fit_data_in_h5_file(fname,x_vec,fvalues,x_url,data_url,fit_type,fit_params,fit_covariance,data_opt=None,entryname_coordinate='param',entryname_vector='fit',folder='analysis'): ''' Appends fitted data to the h5 file in the specified folder. As the fit is a fixed length array, a respective parameter axis is created and also stored in the h5 file. If data was optimized using the data optimizer option, the optimized data set is in addition stored in the h5 file. A joint view of data overlayed with the fit is created. inputs: - fname: file name of the h5 file - x_vec: x vector (parameter vector) of constant length, matching the dimensions of fit data - fvalues: fitted function values - x_url: url of existing data coordinate axis - fit_type: int of the fit function used - fit_params: np array of the resulting fit parameters - fit_covariance: estimated covariances of the fit_params as returned by curve_fit - data_url: url of existing data vector - data_opt: (optional, default: None) specifier whether optimized data that is to be stored has been generated - entryname_coordinate: (str) (optional, default: 'param') name of parameter coordinate - entryname_vector: (str) (optional, default: 'fit') name of fit data vector - folder: (str) (optional, default: 'analysis') folder name for storage of analysis data ouputs: bool - returns True in case job was successful, False if an error occurred ''' try: hf = hdf_lib.Data(path=fname) #create coordinate and fit data vector hdf_x = hf.add_coordinate(entryname_coordinate,folder=folder) hdf_x.add(x_vec) hdf_y = hf.add_value_vector(entryname_vector, folder=folder, x = hdf_x) hdf_y.append(np.array(fvalues)) hdf_type = hf.add_value_vector('dr_fit_type',folder=folder) hdf_type.append(np.array([fit_type])) hdf_params = hf.add_value_vector('dr_values',folder=folder,comment=", ".join(PARAMS[fit_type])) hdf_params.append(np.array(fit_params)) hdf_covariance = hf.add_value_vector('dr_covariance',folder=folder) hdf_covariance.append(np.array(fit_covariance)) if data_opt is not None: #create optimized data entry hdf_data_opt = hf.add_value_vector('data_opt', folder=folder, x = hf.get_dataset(x_url)) hdf_data_opt.append(np.array(data_opt)) #create joint view if data_opt is not None: joint_view = hf.add_view(entryname_vector+'_do', x = hdf_x, y = hdf_y) #fit joint_view.add(x = hf.get_dataset(x_url), y = hdf_data_opt) #data else: joint_view = hf.add_view(entryname_vector, x = hdf_x, y = hdf_y) #fit joint_view.add(x = hf.get_dataset(x_url), y = hf.get_dataset(data_url)) #data hf.close_file() except NameError as m: logging.error('Error while attempting to save fit data in h5 file: '+str(m)) return False return True
def _prepare_measurement_file(self): ''' creates the output .h5-file with distinct dataset structures for each measurement type. at this point all measurement parameters are known and put in the output file ''' print ('filename '+self._file_name) self._data_file = hdf.Data(name=self._file_name) self._measurement_object.uuid = self._data_file._uuid self._measurement_object.hdf_relpath = self._data_file._relpath self._measurement_object.instruments = qt.instruments.get_instruments() #self._measurement_object.save() self._mo = self._data_file.add_textlist('measurement') self._mo.append(self._measurement_object.get_JSON()) # write logfile and instrument settings #self._write_settings_dataset() #self._log = waf.open_log_file(self._data_file.get_filepath()) #if not self._scan_time: # self._data_freq = self._data_file.add_coordinate('frequency', unit = 'Hz') # self._data_freq.add(self._freqpoints) self._data_I = [] self._data_V = [] #self._data_dVdI = [] if self._scan_1D: if self._bias_mode:# current bias #self._data_freq = self._data_file.add_coordinate('frequency', unit = 'Hz') for st in range(self.sweep.get_nos()): self._data_V.append(self._data_file.add_value_vector('V_'+str(st), unit = 'V', save_timestamp = False)) self._data_I.append(self._data_file.add_value_vector('I_'+str(st), x = self._data_V[st], unit = 'A', save_timestamp = False)) #self._data_dVdI.append(self._data_file.add_value_vector('_data_dVdI_'+str(st), x = self._data_V[st], unit = 'V/A', save_timestamp = False)) IV = self._data_file.add_view('IV', x = self._data_V[0], y = self._data_I[0]) #dVdI = self._data_file.add_view('dVdI', x = self._data_I[0] , y = self._data_dVdI[0]) for i in range(1, self.sweep.get_nos()): IV.add(x=self._data_V[i],y=self._data_I[i]) #dVdI.add(x=self._data_I[i],y=self._data_dVdI[i]) if self._scan_2D: self._data_x = self._data_file.add_coordinate(self.x_coordname, unit = self.x_unit) self._data_x.add(self.x_vec) for st in range(self.sweep.get_nos()): self._data_I.append(self._data_file.add_value_vector('I_'+str(st), unit = 'A', save_timestamp = False)) self._data_V.append(self._data_file.add_value_matrix('V_'+str(st), x = self._data_x, y = self._data_I[st], unit = 'V', save_timestamp = False)) #self._data_dVdI.append(self._data_file.add_value_matrix('dVdI_'+str(st), x = self._data_x, y = self._data_I[st], unit = 'V/A', save_timestamp = False)) if self._Fraunhofer: self._data_Ic = [] for st in range(self.sweep.get_nos()): self._data_Ic.append(self._data_file.add_value_vector('Ic_'+str(st), x = self._data_x, unit = 'A', save_timestamp = False)) Fraunhofer = self._data_file.add_view('Fraunhofer', x=self._data_x, y=self._data_Ic[0]) for i in range(1, self.sweep.get_nos()): Fraunhofer.add(x=self._data_x, y=self._data_Ic[i]) #if self.log_function != None: #use logging # self._log_value = [] # for i in range(len(self.log_function)): # self._log_value.append(self._data_file.add_value_vector(self.log_name[i], x = self._data_x, unit = self.log_unit[i], dtype=self.log_dtype[i])) if self._scan_3D: self._data_x = self._data_file.add_coordinate(self.x_coordname, unit = self.x_unit) self._data_x.add(self.x_vec) self._data_y = self._data_file.add_coordinate(self.y_coordname, unit = self.y_unit) self._data_y.add(self.y_vec) if self._nop == 0: #saving in a 2D matrix instead of a 3D box HR: does not work yet !!! test things before you put them online. self._data_amp = self._data_file.add_value_matrix('amplitude', x = self._data_x, y = self._data_y, unit = 'arb. unit', save_timestamp = False) self._data_pha = self._data_file.add_value_matrix('phase', x = self._data_x, y = self._data_y, unit = 'rad', save_timestamp = False) else: self._data_amp = self._data_file.add_value_box('amplitude', x = self._data_x, y = self._data_y, z = self._data_freq, unit = 'arb. unit', save_timestamp = False) self._data_pha = self._data_file.add_value_box('phase', x = self._data_x, y = self._data_y, z = self._data_freq, unit = 'rad', save_timestamp = False) if self.log_function != None: #use logging self._log_value = [] for i in range(len(self.log_function)): self._log_value.append(self._data_file.add_value_vector(self.log_name[i], x = self._data_x, unit = self.log_unit[i], dtype=self.log_dtype[i])) if self.comment: self._data_file.add_comment(self.comment) if self.qviewkit_singleInstance and self.open_qviewkit and self._qvk_process: self._qvk_process.terminate() #terminate an old qviewkit instance