Exemple #1
0
def test_device_meas(station, chip):
    meas = Measurement(station=station)
    device = chip.device1
    meas.register_parameter(device.gate)
    meas.register_parameter(device.drain, setpoints=(device.gate, ))

    with meas.run() as datasaver:
        for set_v in np.linspace(0, 1.5, 10):
            device.gate.set(set_v)
            get_a = device.drain_X.get()
            datasaver.add_result((device.gate, set_v), (device.drain, get_a))
            datasaver.flush_data_to_database()
        assert len(datasaver.dataset.to_pandas_dataframe_dict()
                   ["device1_drain"]) == 10
def do2d_multi(param_slow: _BaseParameter, start_slow: float, stop_slow: float,
               num_points_slow: int, delay_slow: float,
               param_fast: _BaseParameter, start_fast: float, stop_fast: float,
               num_points_fast: int, delay_fast: float,
               lockins: Sequence[SR830],
               devices_no_buffer: Iterable[Parameter] = None,
               write_period: float = 1.,
               threading: List[bool] = [True, True, True, True],
               label: str = None,
               channels: int = 0,
               attempts_to_get: int = 3,
               delay_fast_increase: float = 0.0
               ) -> Tuple[DataSet, ...]:
    """
    This is a do2d to be used for a collection of SR830.

    Args:
        param_slow: The QCoDeS parameter to sweep over in the outer loop
        start_slow: Starting point of sweep in outer loop
        stop_slow: End point of sweep in the outer loop
        num_points_slow: Number of points to measure in the outer loop
        delay_slow: Delay after setting parameter in the outer loop
        param_fast: The QCoDeS parameter to sweep over in the inner loop
        start_fast: Starting point of sweep in inner loop
        stop_fast: End point of sweep in the inner loop
        num_points_fast: Number of points to measure in the inner loop
        delay_fast: Delay after setting parameter before measurement is performed
        lockins: Iterable of SR830 lockins
        devices_no_buffer: Iterable of Parameters to be measured alongside the lockins
        write_period: The time after which the data is actually written to the
                      database.
        threading: For each element which are True, write_in_background, buffer_reset,
                   and send_trigger and get_trace will be threaded respectively
        channels: Channels to get from the buffer. "0" gets both channels
        attempts_to_get: Maximum number of attempts to try to get the buffer if it fails
        delay_fast_increase: Amount to increase delay_fast if getting the buffer fails
    """

    
    logger.info('Starting do2d_multi with {}'.format(num_points_slow * num_points_fast))
    logger.info('write_in_background {},threading buffer_reset {},threading send_trigger {},threading  get trace {}'.format(*threading))
    begin_time = time.perf_counter()

    for lockin in lockins:
        if not isinstance(lockin, SR830):
            raise ValueError('Invalid instrument. Only SR830s are supported')
        lockin.buffer_SR("Trigger")
        lockin.buffer_trig_mode.set('ON')
        assert isinstance(param_fast, Parameter)
        lockin.set_sweep_parameters(param_fast, start_fast, stop_fast, num_points_fast, label=label)

    interval_slow = tqdm(np.linspace(start_slow, stop_slow, num_points_slow), position=0)
    interval_slow.set_description("Slow parameter")
    set_points_fast = lockins[0].sweep_setpoints

    meas = Measurement()
    meas.write_period = write_period
    meas.register_parameter(set_points_fast)
    meas.register_parameter(param_fast)
    meas.register_parameter(param_slow)

    param_fast.post_delay = delay_fast
    param_slow.post_delay = delay_slow

    traces = _datatrace_parameters(lockins, channels)

    for trace in traces:
        assert isinstance(trace.root_instrument, SR830)
        if len(trace.label.split()) < 2:
            trace.label = trace.root_instrument.name + ' ' + trace.label
        meas.register_parameter(trace, setpoints=(param_slow, trace.root_instrument.sweep_setpoints))

    if devices_no_buffer is not None:
        meas_no_buffer = Measurement()
        meas_no_buffer.write_period = write_period
        meas_no_buffer.register_parameter(param_fast)
        meas_no_buffer.register_parameter(param_slow)
        for device in devices_no_buffer:
            meas_no_buffer.register_parameter(device, setpoints=(param_slow, param_fast))

    time_fast_loop = 0.0
    time_set_fast = 0.0
    time_buffer_reset = 0.0
    time_trigger_send = 0.0
    time_get_trace = 0.0

    cm_datasaver = meas.run(write_in_background=threading[0])
    if devices_no_buffer is not None:
        cm_datasaver_no_buffer = meas_no_buffer.run(write_in_background=threading[0])

    with ExitStack() as cmx:
        cmx.enter_context(cm_datasaver)
        datasaver = cm_datasaver.datasaver
        if devices_no_buffer is not None:
            cmx.enter_context(cm_datasaver_no_buffer)
            datasaver_no_buffer = cm_datasaver_no_buffer.datasaver
        for point_slow in interval_slow:
            param_slow.set(point_slow)
            data = []
            data.append((param_slow, param_slow.get()))

            if devices_no_buffer is not None:
                data_no_buffer = []
                data_no_buffer.append((param_slow, param_slow.get()))
            attempts = 0
            while attempts < attempts_to_get:
                try:
                    begin_time_temp_buffer = time.perf_counter()
                    if threading[1]:
                        with concurrent.futures.ThreadPoolExecutor() as executor:
                            for lockin in lockins:
                                executor.submit(lockin.buffer_reset)
                    else:
                        for lockin in lockins:
                            lockin.buffer_reset()
                    time_buffer_reset += time.perf_counter() - begin_time_temp_buffer

                    begin_time_temp_fast_loop = time.perf_counter()
                    interval_fast = tqdm(set_points_fast.get(), position=1, leave=False)
                    interval_fast.set_description("Fast parameter")
                    for point_fast in interval_fast:
                        begin_time_temp_set_fast = time.perf_counter()
                        param_fast.set(point_fast)

                        time_set_fast += time.perf_counter() - begin_time_temp_set_fast
                        begin_time_temp_trigger = time.perf_counter()
                        if threading[2]:
                            with concurrent.futures.ThreadPoolExecutor() as executor:
                                for lockin in lockins:
                                    executor.submit(lockin.send_trigger)
                        else:
                            for lockin in lockins:
                                lockin.send_trigger()

                        time_trigger_send += time.perf_counter() - begin_time_temp_trigger

                        if devices_no_buffer is not None:
                            fast = param_fast.get()
                            data_no_buffer.append((param_fast, fast))
                            for device in devices_no_buffer:
                                device_value = device.get()
                                data_no_buffer.append((device, device_value))
                            datasaver_no_buffer.add_result(*data_no_buffer)

                    time_fast_loop += time.perf_counter() - begin_time_temp_fast_loop

                    begin_time_temp_trace = time.perf_counter()
                    if threading[3]:
                        with concurrent.futures.ThreadPoolExecutor() as executor:
                            data_trace = executor.map(trace_tuble, traces)

                        data += list(data_trace)
                    else:
                        for trace in traces:
                            data.append((trace, trace.get()))
                    time_get_trace += time.perf_counter() - begin_time_temp_trace

                    data.append((set_points_fast, set_points_fast.get()))
                    break
                except Exception as e:
                    logger.info('Faild to get buffer')
                    logger.info(e)
                    print(e)
                    attempts += 1
                    delay_fast += delay_fast_increase
                    print(attempts)
                    logger.info('next attempt nr. {}'.format(attempts))
                    logger.info('next delay_fast. {}'.format(delay_fast))
                    print(delay_fast)
                    if attempts < attempts_to_get:
                        log_message = 'getting the buffer failed, will try again'
                        print(log_message)
                        logger.info(log_message)
                    else:
                        log_message = 'getting the buffer failed, will go to next slow_point'
                        print(log_message)
                        logger.info(log_message)

            datasaver.add_result(*data)

    message = 'Have finished the measurement in {} seconds'.format(time.perf_counter()-begin_time)
    logger.info(message)
    message2 = 'Time used in buffer reset {}. Time used in send trigger {}. Time used in get trace {}'.format(time_buffer_reset, time_trigger_send, time_get_trace)
    logger.info(message2)
    logger.info('time in the fast loop {}'.format(time_fast_loop))

    if devices_no_buffer is not None:
        return (datasaver.dataset, datasaver_no_buffer.dataset)
    else:
        return (datasaver.dataset,)
    def double_sweep(self, graph=True):
        dac = self.dac
        dmm = self.dmm
        exp = self.exp
        station = self.station

        meas = Measurement(exp=exp, station=station)
        meas.register_parameter(
            dac.freq)  # register the first independent parameter
        meas.register_parameter(dac.amp)
        meas.register_parameter(dmm.v1, setpoints=(
            dac.freq,
            dac.amp,
        ))  # now register the dependent oone

        meas.write_period = 2

        if (self.__ax1 != None and graph):
            self.__ax1.set_xlabel("Frequency of input (Hz)")
            self.__ax1.set_ylabel("Amplitude of input (V)")
            self.__ax1.set_title('One Tone Spectroscopy with double sweep')

            colorbar = False

        #vectors for plotting
        Y = []
        X = []
        C = []
        column = -1
        with meas.run() as datasaver:
            for set_v in np.linspace(self.__amplitude_range[0],
                                     self.__amplitude_range[1],
                                     self.__amplitude_samples):
                if (self.__ax1 != None and graph):
                    C.append([])
                    column += 1
                    Y.append(set_v)
                dac.amp(set_v)
                for set_f in np.linspace(self.__range[0], self.__range[1],
                                         self.__samples):
                    #sets dac ch1 with the set_v value and gets value from v1
                    dac.freq.set(set_f)
                    get_v = dmm.v1.get()
                    #adds to the datasaver the result
                    datasaver.add_result((dac.freq, set_f), (dac.amp, set_v),
                                         (dmm.v1, get_v))

                    #stays in one freq so that the osciloscope can measure the necessary
                    sleep(self.__freq_time)

                    #plots graph in real time

                    #checks if there is a canvas to plot in
                    if (self.__ax1 != None and graph):
                        #gets data from dataset to plot
                        X.append(set_f)
                        C[column].append(get_v)

                    #plots data with color from array picked by number
                #the pcolor doesn't do well with imcomplete lines/columns alongside imcomplete ones so it' at the end of the main loop
                if (self.__ax1 != None and graph):
                    #removes repeated colorbars
                    if (colorbar):
                        colorbar.remove()
                    graph_colors = self.__ax1.pcolor(X, Y, C)
                    colorbar = self.__fig.colorbar(graph_colors, ax=self.__ax1)
                    colorbar.set_label('Amplitude of output (V)',
                                       rotation=270,
                                       labelpad=15)
                    plt.pause(0.01)
                    X = []
            #changes color of plot for next time
            self.__colorN = (self.__colorN + 1) % 7
            #adds the amplitude to the dataset as well
            datasaver.add_result((dac.amp, dac.amp()))

            self.dataset = datasaver.dataset  # convenient to have for plotting

        print("experiment was run")
    def run(self, graph=True, amp=-1, legend=True):
        dac = self.dac
        dmm = self.dmm
        exp = self.exp
        station = self.station

        #selects between amp set previously or set in the run call
        if (amp == -1):
            dac.amp(self.__amp)
            amp = self.__amp
        else:
            dac.amp(amp)

        meas = Measurement(exp=exp, station=station)
        meas.register_parameter(
            dac.freq)  # register the first independent parameter
        meas.register_parameter(dac.amp)
        meas.register_parameter(
            dmm.v1, setpoints=(dac.freq, ))  # now register the dependent oone

        meas.write_period = 2

        #plot lables
        if (self.__ax1 != None and graph):
            self.__ax1.set_xlabel("Frequency")
            self.__ax1.set_ylabel("Amplitude measured")
            title = 'One Tone Spectroscopy sweeping the frequency'
            self.__ax1.set_title(title)
            #sets legend with amplitude for each plot to appear
            first = legend
        #vectors for plotting
        Y = []
        X = []

        with meas.run() as datasaver:
            #np.linspace is the interval and number of points in the osciloscope
            for set_v in np.linspace(self.__range[0], self.__range[1],
                                     self.__samples):
                #sets dac ch1 with the set_v value and gets value from v1
                dac.freq.set(set_v)
                get_v = dmm.v1.get()
                #adds to the datasaver the result
                datasaver.add_result((dac.freq, set_v), (dmm.v1, get_v))

                #stays in one freq so that the osciloscope can measure the necessary
                sleep(self.__freq_time / 10)

                #plots graph in real time

                #checks if there is a canvas to plot in
                if (self.__ax1 != None and graph):
                    #gets data from dataset to plot
                    X.append(set_v)
                    Y.append(get_v)
                    #plots data with color from array picked by number
                    self.__ax1.plot(X, Y, c=self.__colors[self.__colorN])
                    #plots once with a label and the legend for it
                    if (first):
                        self.__ax1.plot(X,
                                        Y,
                                        c=self.__colors[self.__colorN],
                                        label='Amplitude ' + str(self.__amp) +
                                        'V')
                        plt.legend(loc='lower right')
                        first = False

                    plt.pause(0.01)

            #changes color of plot for next time
            self.__colorN = (self.__colorN + 1) % 7
            #adds the amplitude to the dataset as well
            datasaver.add_result((dac.amp, dac.amp()))

            self.dataset = datasaver.dataset  # convenient to have for plotting

        print("experiment was run")
Exemple #5
0
def run_measurement(event, 
                    param_set, 
                    param_meas, 
                    spaces, 
                    settle_times, 
                    name, 
                    comment, 
                    meander, 
                    extra_cmd, 
                    extra_cmd_val,
                    wait_first_datapoint,
                    checkstepinterdelay,
                    manualsetpoints):
    # Local reference of THIS thread object
    t = current_thread()
    # Thread is alive by default
    t.alive = True

    # Create measurement object
    meas = Measurement() 
    # Apply name
    meas.name = name

    #Generating setpoints
    if manualsetpoints==False:
        if meander == True:
            setpoints = cartprodmeander(*spaces)
        else:
            setpoints = cartprod(*spaces)
    else:
        setpoints = spaces
    ### Filling station for snapshotting
    fill_station(param_set,param_meas)
    ### Checking and setting safety rates and delays
    if checkstepinterdelay:
        safetyratesdelays(param_set,spaces)    
    
    meas.write_period = 1
       
    #Make array showing changes between setpoints on axes
    changesetpoints = setpoints - np.roll(setpoints, 1, axis=0)

    #Forcing the first setpoint in changesetpoints to 1 to make sure it is always set.
    changesetpoints[0,:] = 1
   
    # Registering set parameters
    param_setstring = ''
    param_setnames = [None]*len(param_set)
    param_setunits = [None]*len(param_set)
    for i,parameter in enumerate(param_set):
        meas.register_parameter(parameter)
        param_setstring += parameter.name + ', '
        param_setnames[i] = parameter.name
        param_setunits[i] = parameter.unit
    
    output = [None]*len(param_meas) 
        # Registering readout parameters
    param_measstring = ''
    param_measnames = [None]*len(param_meas)
    param_measunits = [None]*len(param_meas)
    param_measnames_sub = [None]*len(param_meas)
    paramtype = [None]*len(param_meas)
    for i, parameter in enumerate(param_meas):
        meas.register_parameter(parameter, setpoints=(*param_set,))
        output[i]= [parameter, None]
        param_measstring += parameter.name + ', '
        param_measnames[i] = parameter.name
        if isinstance(parameter, qc.instrument.parameter.ParameterWithSetpoints):
            param_measunits[i] = parameter.unit
            param_measnames_sub[i] = ''
            paramtype[i] = 'ParameterWithSetpoints'
        elif isinstance(parameter, qc.instrument.parameter.MultiParameter):
            param_measunits[i] = parameter.units
            param_measnames_sub[i] = parameter.names
            paramtype[i] = 'MultiParameter'
        elif isinstance(parameter, qc.instrument.parameter.Parameter):
            param_measunits[i] = parameter.unit
            paramtype[i] = 'Parameter'

    # Start measurement routine
    with meas.run() as datasaver:  
        global measid
        measid = datasaver.run_id

        # Getting dimensionality of measurement
        ndims = setpoints.shape[1]
        
        # Add comment to metadata in database
        datasaver.dataset.add_metadata('Comment', comment)
        
        # Main loop for setting values
        for i in range(0,setpoints.shape[0]):
            #Check for nonzero axis to apply new setpoints by looking in changesetpoints arrays
            resultlist = [None]*ndims
            if i==0: #On first datapoint change set_params from slow to fast axis
                dimlist = range(0,ndims)
            else: #On all other datapoints change fast axis first
                dimlist = reversed(range(0,ndims))
            for j in dimlist:
                if not np.isclose(changesetpoints[i,j] , 0, atol=0): # Only set set params that need to be changed
                    if i==0 and not t.alive: # Allows killing of thread in-between initialisiation of set_parameters for first datapoint.
                        event.set() # Trigger closing of run_dbextractor
                        raise KeyboardInterrupt('User interrupted doNd during initialisation of first setpoint.')
                        # Break out of for loop
                        break
                    param_set[j].set(setpoints[i,j])
                    time.sleep(settle_times[j]) # Apply appropriate settle_time
                resultlist[j] = (param_set[j],setpoints[i,j]) # Make a list of result
            if i==0: # Add additional waiting time for first measurement point before readout and start timers
                time.sleep(wait_first_datapoint)
                # Start various timers
                starttime = datetime.datetime.now() + datetime.timedelta(0,-1)
                lastwrittime = starttime
                lastprinttime = starttime             
            for k, parameter in enumerate(param_meas): # Readout all measurement parameters at this setpoint i
                if extra_cmd is not None: # Optional extra command + value that is run before each measurement paremeter is read out.
                    if extra_cmd[k] is not None:
                        if extra_cmd_val[k] is not None:
                            (extra_cmd[k])(extra_cmd_val[k])
                        else:
                            (extra_cmd[k])()
                output[k][1] = parameter.get()
            datasaver.add_result(*resultlist, # Add everything to the database
                                 *output)
            setvals = list(zip(param_setnames,[f"{x:.{6}}" for x in setpoints[i,:]],param_setunits))
            outputparsed = [None]*len(param_meas)
            for k,x in enumerate([row[1] for row in output]):
                if paramtype[k] == 'MultiParameter':
                    valsparsed = [None]*len(x)
                    for l,y in enumerate(x):
                        if isinstance(y, (list,tuple,np.ndarray)):
                            if len(y) > 5:
                                vals = ['{:.6f}'.format(x) for x in y[0:5]]
                                vals.append('.......')
                            else:
                                vals = ['{:.6f}'.format(x) for x in y]
                            newvals = [[vals[i]] for i in range(0,len(vals))]
                            valsparsed[l] = tabulate(newvals,tablefmt='plain') 
                        else:
                            valsparsed[l] = f"{y:.{6}}"
                    outputparsed[k] = tabulate(list(zip(param_measnames_sub[k],valsparsed,param_measunits[k])), tablefmt='plain', colalign=('left','left','left'))
                if paramtype[k] == 'Parameter':
                    outputparsed[k] = tabulate([[f"{x:.{6}}",param_measunits[k]]], tablefmt='plain')
                if paramtype[k] == 'ParameterWithSetpoints':
                    outputparsed[k] = '{Parameter with setpoints, not shown.}'
            measvals = list(zip(param_measnames,outputparsed))

            if not t.alive: # Check if user tried to kill the thread by keyboard interrupt, if so kill it
                event.set() # Trigger closing of run_dbextractor
                qctools.db_extraction.db_extractor(dbloc = qc.dataset.sqlite.database.get_DB_location(),  # Run db_extractor once more
                                   ids=[measid], 
                                   overwrite=True,
                                   newline_slowaxes=True,
                                   no_folders=False,
                                   suppress_output=True,
                                   useopendbconnection = True)
                plot_by_id(measid)
                raise KeyboardInterrupt('User interrupted doNd. All data flushed to database and extracted to *.dat file.')
                # Break out of for loop
                break
            #Time estimation
            printinterval = 0.025 # Increase printinterval to save CPU
            now = datetime.datetime.now()
            finish =['','']
            if (now-lastprinttime).total_seconds() > printinterval or i == len(setpoints)-1: # Calculate and print time estimation
                frac_complete = (i+1)/len(setpoints)
                duration_in_sec = (now-starttime).total_seconds()/frac_complete
                elapsed_in_sec = (now-starttime).total_seconds()
                remaining_in_sec = duration_in_sec-elapsed_in_sec
                perc_complete = np.round(100*frac_complete,2)
                clear_output(wait=True)
                if i == len(setpoints)-1:
                    finish[0] = 'Finished: ' + str((now).strftime('%Y-%m-%d'))
                    finish[1] = str((now).strftime('%H:%M:%S'))

                l1 = tabulate([['----------------------' ,'-------------------------------------------------'],
                               ['Starting runid:', str(measid)], # Time estimation now in properly aligned table format
                               ['Name:', name], 
                               ['Comment:', comment],
                               ['Set parameter(s):', tabulate(setvals, tablefmt='plain', colalign=('left','left','left'))],
                               ['Readout parameter(s):', tabulate(measvals, tablefmt='plain', colalign=('left','left'))],
                               ['______________________' ,'_________________________________________________'],
                               ['Setpoint: ' + str(i+1) + ' of ' + str(len(setpoints)), '%.2f' % perc_complete + ' % complete.'],
                               ['Started: ' + starttime.strftime('%Y-%m-%d'), starttime.strftime('%H:%M:%S')],
                               ['ETA: ' + str((datetime.timedelta(seconds=np.round(duration_in_sec))+starttime).strftime('%Y-%m-%d')), str((datetime.timedelta(seconds=np.round(duration_in_sec))+starttime).strftime('%H:%M:%S'))],
                               [finish[0],finish[1]],
                               ['Total duration:', str(datetime.timedelta(seconds=np.round(duration_in_sec)))],
                               ['Elapsed time:', str(datetime.timedelta(seconds=np.round(elapsed_in_sec)))],
                               ['Remaining time:', str(datetime.timedelta(seconds=np.round(remaining_in_sec)))],
                               ], colalign=('right','left'), tablefmt='plain')
                print(l1)
                lastprinttime = now
        event.set() # Trigger closing of run_dbextractor
Exemple #6
0
def run_zerodim(event, param_meas, name, comment, wait_first_datapoint):
    # Local reference of THIS thread object
    t = current_thread()
    # Thread is alive by default
    t.alive = True

    # Create measurement object
    meas = Measurement() 
    # Apply name
    meas.name = name

    ### Filling station for snapshotting
    fill_station_zerodim(param_meas)
    
    meas.write_period = 0.5
    output = [] 
    # Registering readout parameters
    param_measstring = ''
    for parameter in param_meas:
        meas.register_parameter(parameter)
        output.append([parameter, None])   
        param_measstring += parameter.name + ', '
    
    # Start measurement routine
    with meas.run() as datasaver:  
        global measid
        measid = datasaver.run_id

        # Start various timers
        starttime = datetime.datetime.now()
        l1 = tabulate([['----------------------' ,'-------------------------------------------------'],
                       ['Running 0-dimensional measurement,', 'time estimation not available.'], # Time estimation now in properly aligned table format
                       ['Starting runid:', str(measid)], # Time estimation now in properly aligned table format
                       ['Name:', name], 
                       ['Comment:', comment],
                       ['Starting runid:', str(measid)],
                       ['Readout parameter(s):', str(param_measstring)],
                       ['______________________' ,'_________________________________________________'],
                       ['Started: ' + starttime.strftime('%Y-%m-%d'), starttime.strftime('%H:%M:%S')],
                       ], colalign=('right','left'), tablefmt='plain')
        print(l1)

        # Getting dimensions and array dimensions and lengths
        # Main loop for setting values
        #Check for nonzero axis to apply new setpoints by looking in changesetpoints arrays
        time.sleep(wait_first_datapoint)
        resultlist = [None]*1
        for k, parameter in enumerate(param_meas): # Readout all measurement parameters at this setpoint i
                output[k][1] = parameter.get()                
        datasaver.add_result(*output)
        datasaver.dataset.add_metadata('Comment', comment) # Add comment to metadata in database
        now = datetime.datetime.now()
        elapsed_in_sec = (now-starttime).total_seconds()
        clear_output(wait=True)
        l1 = tabulate([['---------------------------------' ,'-------------------------------------------'],
                       ['Running 0-dimensional measurement,', 'time estimation not available.'], # Time estimation now in properly aligned table format
                       ['Starting runid:', str(measid)], # Time estimation now in properly aligned table format
                       ['Name:', name], 
                       ['Comment:', comment],
                       ['Starting runid:', str(measid)],
                       ['Readout parameter(s):', str(param_measstring)],
                       ['_________________________________' ,'___________________________________________'],
                       ['Started: ' + starttime.strftime('%Y-%m-%d'), starttime.strftime('%H:%M:%S')],
                       ['Finished: ' + str((now).strftime('%Y-%m-%d')),str((now).strftime('%H:%M:%S'))],
                       ['Total duration:', str(datetime.timedelta(seconds=np.round(elapsed_in_sec)))],
                       ], colalign=('right','left'), tablefmt='plain')
        print(l1)
        event.set() # Trigger closing of run_dbextractor
Exemple #7
0
def do2d_multi(param_slow: _BaseParameter,
               start_slow: float,
               stop_slow: float,
               num_points_slow: int,
               delay_slow: float,
               param_fast: _BaseParameter,
               start_fast: float,
               stop_fast: float,
               num_points_fast: int,
               delay_fast: float,
               bundle: BundleLockin,
               write_period: float = 1.,
               threading: List[bool] = [True, True, True, True],
               show_progress_bar: bool = True,
               attempts_to_get: int = 3,
               delay_fast_increase: float = 0.0,
               label: Union[str, None] = None):
    """
    This is a do2d only to be used with BundleLockin.

    Args:
        param_slow: The QCoDeS parameter to sweep over in the outer loop
        start_slow: Starting point of sweep in outer loop
        stop_slow: End point of sweep in the outer loop
        num_points_slow: Number of points to measure in the outer loop
        delay_slow: Delay after setting parameter in the outer loop
        param_fast: The QCoDeS parameter to sweep over in the inner loop
        start_fast: Starting point of sweep in inner loop
        stop_fast: End point of sweep in the inner loop
        num_points_fast: Number of points to measure in the inner loop
        delay_fast: Delay after setting parameter before measurement is performed
        write_period: The time after which the data is actually written to the
                      database.
        threading: For each ellement which are True, write_in_background, buffer_reset,
                   and send_trigger and get_trace will be threaded respectively
        show_progress_bar: should a progress bar be displayed during the
                           measurement.
        attempts_to_get: nummber of attempts to get the buffer before failling
        delay_fast_increase: increases the delay_fast if failling
    """

    logger.info('Starting do2d_multi with {}'.format(num_points_slow *
                                                     num_points_fast))
    logger.info(
        'write_in_background {},threading buffer_reset {},threading send_trigger {},threading  get trace {}'
        .format(*threading))
    begin_time = time.perf_counter()
    meas = Measurement()
    bundle.set_sweep_parameters(param_fast,
                                start_fast,
                                stop_fast,
                                num_points_fast,
                                label=label)
    interval_slow = np.linspace(start_slow, stop_slow, num_points_slow)
    meas.write_period = write_period
    set_points_fast = bundle.setpoints

    meas.register_parameter(set_points_fast)
    param_fast.post_delay = delay_fast

    meas.register_parameter(param_slow)
    param_slow.post_delay = delay_slow

    bundle_parameters = bundle.__dict__['parameters']
    traces = [
        bundle_parameters[key] for key in bundle_parameters.keys()
        if 'trace' in key
    ]
    for trace in traces:
        meas.register_parameter(trace, setpoints=(param_slow, set_points_fast))

    time_fast_loop = 0.0
    time_set_fast = 0.0
    time_buffer_reset = 0.0
    time_trigger_send = 0.0
    time_get_trace = 0.0

    if show_progress_bar:
        progress_bar = progressbar.ProgressBar(max_value=num_points_slow *
                                               num_points_fast)
        points_taken = 0

    with meas.run(write_in_background=threading[0]) as datasaver:
        run_id = datasaver.run_id

        for point_slow in interval_slow:
            param_slow.set(point_slow)

            attempts = 0
            while attempts < attempts_to_get:
                try:
                    begin_time_temp_buffer = time.perf_counter()
                    if threading[1]:
                        with concurrent.futures.ThreadPoolExecutor(
                        ) as executor:
                            for lockin in bundle.lockins:
                                executor.submit(lockin.buffer_reset)
                    else:
                        for lockin in bundle.lockins:
                            lockin.buffer_reset()
                    time_buffer_reset += time.perf_counter(
                    ) - begin_time_temp_buffer

                    begin_time_temp_fast_loop = time.perf_counter()
                    for point_fast in set_points_fast.get():
                        begin_time_temp_set_fast = time.perf_counter()
                        param_fast.set(point_fast)

                        time_set_fast += time.perf_counter(
                        ) - begin_time_temp_set_fast
                        begin_time_temp_trigger = time.perf_counter()
                        if threading[2]:
                            with concurrent.futures.ThreadPoolExecutor(
                            ) as executor:
                                for lockin in bundle.lockins:
                                    executor.submit(lockin.send_trigger)
                        else:
                            for lockin in bundle.lockins:
                                lockin.send_trigger()
                        if show_progress_bar and attempts == 0:
                            points_taken += 1
                            progress_bar.update(points_taken)
                        time_trigger_send += time.perf_counter(
                        ) - begin_time_temp_trigger
                    time_fast_loop += time.perf_counter(
                    ) - begin_time_temp_fast_loop

                    begin_time_temp_trace = time.perf_counter()
                    if threading[3]:
                        with concurrent.futures.ThreadPoolExecutor(
                        ) as executor:
                            data = executor.map(trace_tuble, traces)

                        data = list(data)
                    else:
                        data = []
                        for trace in traces:
                            data.append((trace, trace.get()))
                    time_get_trace += time.perf_counter(
                    ) - begin_time_temp_trace

                    data.append((param_slow, param_slow.get()))
                    data.append((set_points_fast, set_points_fast.get()))
                    break
                except Exception as e:
                    print(e)
                    attempts += 1
                    delay_fast += delay_fast_increase
                    print(attempts)
                    print(delay_fast)
                    if attempts < attempts_to_get:
                        print('getting the buffer failed, will try again')
                    else:
                        print(
                            'getting the buffer failed, will go to next slow_point'
                        )
            datasaver.add_result(*data)

    message = 'Have finished the measurement in {} seconds. run_id {}'.format(
        time.perf_counter() - begin_time, run_id)
    logger.info(message)
    message2 = 'Time used in buffer reset {}. Time used in send trigger {}. Time used in get trace {}'.format(
        time_buffer_reset, time_trigger_send, time_get_trace)
    logger.info(message2)
    logger.info('time in the fast loop {}'.format(time_fast_loop))
    logger.info('time setting in the fast loop {}'.format(time_set_fast))