def save_last_histogram(self, name): # y,x = self._hist, self._hist_bins[1:]*self._hist_binsize_ns#np.histogram(self._dts_ns, bins = np.linspace(self.get_roi_min(), self.get_roi_max(),pts))#np.min(dts[dts>0]),np.max(dts),pts)) # f = h5py.File(self._last_filepath,'a') dat = h5.HDF5Data(name='qutau_counter_' + name) print dat.filepath() dat.create_dataset('x', data=self.x) dat.create_dataset('y', data=self.y) dat.close()
def save(self): name = self.get_name() if self._setup_controller != None: name += '_' + self._setup_controller.get_keyword() # NOTE a test of hdf5 data dat = h5.HDF5Data(name=name) dat.create_dataset('x', data=self._x) dat.create_dataset('y', data=self._y) dat.create_dataset('countrate', data=self._data['countrates']) m2.save_instrument_settings_file(dat) self._last_filepath = dat.filepath() dat.close()
def __init__(self, name, save=True): self.name = name self.params = MeasurementParameters() if save: self.dataset_idx = 0 self.h5data = h5.HDF5Data(name=self.mprefix + '_' + self.name) self.h5datapath = self.h5data.filepath() self.h5base = '/' + self.name + '/' self.h5basegroup = self.h5data.create_group(self.name) self.datafolder = self.h5data.folder() self.keystroke_monitors = {} self.params['measurement_type'] = self.mprefix
def save_current_settings(self, parent=None, name=''): if parent == None: parent = h5.HDF5Data(name='OKOTech_DM' + '_' + name) flush = True instrument_grp = parent.create_group('instrument_settings') #inslist = dict_to_ordered_tuples(qt.instruments.get_instruments()) #for (iname, ins) in inslist: insgroup = instrument_grp.create_group(self._name) parlist = dict_to_ordered_tuples(self.get_parameters()) for (param, popts) in parlist: try: insgroup.attrs[param] = self.get(param, query=True) \ if 'remote' in self.get_options()['tags'] \ else self.get(param, query=False) except (ValueError, TypeError): insgroup.attrs[param] = str(self.get(param, query=False)) if flush: parent.flush()
def start_scan_debug(): blobs_list = [] xs_blobs = np.array([]) ys_blobs = np.array([]) filter_str = None folder = r'D:\measuring\data\20180208\194717_scan2d' search_range = 2 x_o = 0 y_o = 0 store = [] z = -3 x_scan = [-12, 12] y_scan = [-17, 17] #dat = h5.HDF5Data(name='blobs_coordinates') ip = testcv.Image_processor() ip.config(search_range) S = NV_search() # I=nir.CNN() # I.train(r'D:\measuring\data\20180210\data\train',saved_file='trained_explorer_68',folder_validate=r'D:\measuring\data\20180210\data\validate') # folder_name=r'D:\measuring\data\20180211\130859_scan2d\images_to analyze\to_test' # I.load_trained_model(load_file='trained_explorer_68') # I.analyze_images_from_folder(folder_name) # # e=esr_NV_search.esr_on_blob() # success=e.run(name='test_esr') # print success # d= ds.DisplayScanFlim(folder) # d.get_data() # d.split_image(folder,x_scan,y_scan,save=True,colormap='gist_earth',search_range=search_range) #filter_str=['x_c=-1 y_c=-3 ','x_c=-1 y_c=-10', 'x_c=-2 y_c=-1 '] filter_str = ['x_c'] qt.instruments['GreenAOM'].turn_on() for filename in os.listdir(folder): if any(s in filename for s in filter_str): if not 'search_range' in filename: cropped_image = ip.crop_image(folder, filename) x_blobs, y_blobs = ip.find_blobs(folder, filename, cropped_image, search_range, z) xs_blobs = np.append(xs_blobs, x_blobs) ys_blobs = np.append(ys_blobs, y_blobs) #xy_blobs=np.append([x_blobs],[y_blobs],axis=0) #xy_blobs=np.transpose(xy_blobs) blobs_list = np.append([xs_blobs], [ys_blobs], axis=0) print 'number of blobs', len(xs_blobs) del filter_str name = 'outcome_cordinates' dat = h5.HDF5Data(name=name) dat.create_dataset('z=' + str(z) + ' blobs_coordinates', data=np.transpose(blobs_list)) m2.save_instrument_settings_file(dat) dat.close()
def DataDumperThread ( StopQueue, ProcessedDataQueue, MAX_DATA_LEN, data_filepath ): rawdata_idx = 0 t_ofl = 0 t_lastsync = 0 last_sync_number = 0 length = 0 current_dset_length = 0 head, tail = os.path.split(data_filepath) h5data = h5.HDF5Data( name = head + '/PQData_'+ tail) h5datapath = h5data.filepath() print 'Filepath '+str( h5datapath ) #h5data = h5py.File(data_filepath, 'r+') print 'Dumper Process started' while True : #The process is stopped when an item is put to the stop queue. Then it will process the remaining queue items and exit. if StopQueue.empty() == False: if ProcessedDataQueue.empty() == True: break print 'Stopping the Dumper... waiting until the queue is empty. Current length ' + str( ProcessedDataQueue.qsize() ) if rawdata_idx == 0 or current_dset_length > MAX_DATA_LEN : rawdata_idx += 1 current_dset_length = 0 print 'I create a new dataset.' dset_hhtime = h5data.create_dataset('PQ_time-{}'.format(rawdata_idx), (0,), 'u8', maxshape=(None,)) dset_hhchannel = h5data.create_dataset('PQ_channel-{}'.format(rawdata_idx), (0,), 'u1', maxshape=(None,)) dset_hhspecial = h5data.create_dataset('PQ_special-{}'.format(rawdata_idx), (0,), 'u1', maxshape=(None,)) dset_hhsynctime = h5data.create_dataset('PQ_sync_time-{}'.format(rawdata_idx), (0,), 'u8', maxshape=(None,)) dset_hhsyncnumber = h5data.create_dataset('PQ_sync_number-{}'.format(rawdata_idx), (0,), 'u4', maxshape=(None,)) h5data.flush() if ProcessedDataQueue.qsize() > 0: try: hhtime, hhchannel, hhspecial, sync_time, sync_number, \ newlength, t_ofl, t_lastsync, last_sync_number = ProcessedDataQueue.get(True, 1) except Exception as E: print 'exception: timeout during get() in the dumper queue' print E.args continue # do not process data when getting from the queue has not worked out if newlength > 0: dset_hhtime.resize((current_dset_length+newlength,)) dset_hhchannel.resize((current_dset_length+newlength,)) dset_hhspecial.resize((current_dset_length+newlength,)) dset_hhsynctime.resize((current_dset_length+newlength,)) dset_hhsyncnumber.resize((current_dset_length+newlength,)) dset_hhtime[current_dset_length:] = hhtime dset_hhchannel[current_dset_length:] = hhchannel dset_hhspecial[current_dset_length:] = hhspecial dset_hhsynctime[current_dset_length:] = sync_time dset_hhsyncnumber[current_dset_length:] = sync_number current_dset_length += newlength h5data.flush() print 'Dumper thread stopped' print 'PQ total datasets, events last datase, last sync number:', rawdata_idx, current_dset_length, last_sync_number
Example to illustrate the usage of the hdf5_data module; To check out the resulting file, you can use, besides python, the HDFView Program from the HDF group (hdfgroup.com). About HDF5 implementation in python see h5py.alfven.org. Latest version: 2012/12/26, Wolfgang Pfaff <wolfgangpfff at gmail dot com> """ import hdf5_data as h5 import numpy as np ### Direct access to hdf5 container # create data; follows the data storage scheme of qtlab dat = h5.HDF5Data(name='data_number_one') # this function is a simple wrapper for the h5py method of the same name print 'create our first dataset' dset1 = dat.create_dataset('first set', (5, 5), 'i') dset1[...] = 42 print dset1 # this is the dataset object print dset1.value # this is a numpy array # simpler access (equivalent) print '' print 'again...' print dat['/first set'] # create something in a group, by simple access (there's also a create_group # method that's a simple wrapper for the h5py method).
def go_to_membrane(where, **kw): save_fit = kw.pop('save_fit', False) opt_ins = qt.instruments['opt1d_counts'] mos_ins = qt.instruments['master_of_space'] x, y = opt_ins.run(dimension='z', scan_length=8, nr_of_points=61, pixel_time=80, return_data=True, gaussian_fit=False) Dx = 1.5 #g_a1, g_A1, g_x01, g_sigma1, g_A2, g_Dx, g_sigma2 fitargs = (0, np.max(y), x[np.argmax(y)], 0.5, np.max(y) * 0.7, Dx, 0.5) #print fitargs, len(p) gaussian_fit = fit.fit1d(x, y, common.fit_offset_double_gauss, *fitargs, fixed=[5], do_print=False, ret=True) if type(gaussian_fit) != dict: print 'double gaussian fit failed' return print gaussian_fit['success'] # plot(x,y,name='test',clear=True) # xp=linspace(min(x),max(x),100) # plot(xp,gaussian_fit['fitfunc'](xp), name='test') fits = gaussian_fit['params_dict'] if where == 'middle': D = Dx / 2. - 0.1 elif where == 'surface': D = 0 else: D = Dx / 2. - 0.1 print 'Middle of membrane' if gaussian_fit['success']: print 'fit succeeded, going to Z=', fits['x01'] + D mos_ins.set_z(fits['x01'] + D) else: mos_ins.set_z(mos_ins.get_z() + D) # fdir = os.path.join(self.datafolder, self.FILES_DIR) # if not os.path.isdir(fdir): # os.makedirs(fdir) if save_fit: dat = h5.HDF5Data(name='optimize_z_double_gauss_fit') print dat.filepath() fit.write_to_hdf(gaussian_fit, dat.filepath()) dat.close() qt.msleep(1) return gaussian_fit
def do_long_scan(bleaching_scan=False, save_fits=False, name=''): scan2d_ins = qt.instruments['scan2d'] save = qt.instruments['setup_controller'] mos_ins = qt.instruments['master_of_space'] GreenAOM = qt.instruments['GreenAOM'] opt1d_ins = qt.instruments['opt1d_counts'] ystarts = [20, 40, 60, 80] #np.linspace(-10,70,5) delta_y = 20 xstarts = [-60] #np.linspace(-90,-10,5) delta_x = 20 ystep = delta_y * 10 + 1 xstep = delta_x * 10 + 1 bleaching_time = 20 depths = np.array([0, -2, -3]) aborted = False #testing # ystarts=np.linspace(-20,0,2) # xstarts=np.linspace(-100,-80,2) # ystep=21 # xstep=21 # bleaching_time = 10 for ystart in ystarts: print 'y start=', ystart ystop = ystart + delta_y for xstart in xstarts: print 'x start=', xstart xstop = xstart + delta_x scan2d_ins.set_ystart(ystart) scan2d_ins.set_xstart(xstart) scan2d_ins.set_ystop(ystop) scan2d_ins.set_xstop(xstop) scan2d_ins.set_ysteps(ystep) scan2d_ins.set_xsteps(xstep) mos_ins.set_x(xstart) qt.msleep(1) mos_ins.set_y(ystart) qt.msleep(1) GreenAOM.set_power(400.e-6) #if xstart == xstarts[0]: # qt.msleep(4) # fitres = opt1d_ins.run(dimension='z', scan_length=15, nr_of_points=101, pixel_time=100, gaussian_fit=False, return_fitresult = False) # qt.msleep(4) # if save_fits and type(fitres)==dict: # dat = h5.HDF5Data(name='optimize_z_gauss_fit'+'_y_'+str(ystart)) # print dat.filepath() # fit.write_to_hdf(fitres,dat.filepath()) # dat.close() qt.msleep(3) opt1d_ins.run(dimension='z', scan_length=15, nr_of_points=151, pixel_time=100, gaussian_fit=False) qt.msleep(5) # fitres = opt1d_ins.run(dimension='z', scan_length=5, nr_of_points=51, pixel_time=100, gaussian_fit=True, return_fitresult = True) # qt.msleep(2) z_surface = mos_ins.get_z() if save_fits and type(fitres) == dict: dat = h5.HDF5Data(name='optimize_z_gauss_fit' + '_y_' + str(ystart) + '_x_' + str(xstart)) print dat.filepath() fit.write_to_hdf(fitres, dat.filepath()) dat.close() # fit_ress = go_to_membrane('surface',save_fit=True) for depth in depths: print 'depth under surface', depth mos_ins.set_z(z_surface + depth) if bleaching_scan: save.set_keyword(name + '_bleaching_x=%d,y=%d,z=%.1f+%.1f' % (xstart, ystart, z_surface, depth)) scan2d_ins.set_pixel_time(bleaching_time) GreenAOM.turn_on() scan2d_ins.set_is_running(True) while scan2d_ins.get_is_running(): if (msvcrt.kbhit() and (msvcrt.getch() == 'q')): aborted = True break qt.msleep(1) qt.msleep(3) if aborted == True: break mos_ins.set_x(xstart) qt.msleep(0.5) mos_ins.set_y(ystart) qt.msleep(0.5) save.set_keyword(name + 'x=%d,y=%d,z=%.1f+%.1f' % (xstart, ystart, z_surface, depth)) # fit_res=go_to_membrane('middle', save_fit = True) # qt.msleep(2) scan2d_ins.set_pixel_time(10) GreenAOM.set_power(400e-6) scan2d_ins.set_is_running(True) while scan2d_ins.get_is_running(): if (msvcrt.kbhit() and (msvcrt.getch() == 'q')): aborted = True break qt.msleep(1) qt.msleep(3) if aborted == True: break mos_ins.set_x(xstart) qt.msleep(0.5) mos_ins.set_y(ystart) qt.msleep(0.5) #set z back so that for the next scan it will be easier to find the surface mos_ins.set_z(z_surface) if aborted == True: break mos_ins.set_x(xstart) qt.msleep(0.5) mos_ins.set_y(ystart) qt.msleep(0.5) if aborted == True: break