Пример #1
0
 def setup_parameter_from_dict(parameter: _BaseParameter,
                               options: Dict[str, Any]) -> None:
     for attr, val in options.items():
         if attr in PARAMETER_ATTRIBUTES:
             # set the attributes of the parameter, that map 1 to 1
             setattr(parameter, attr, val)
         # extra attributes that need parsing
         elif attr == 'limits':
             if isinstance(val, str):
                 issue_deprecation_warning(
                     ('use of a comma separated string for the limits '
                      'keyword'),
                     alternative='an array like "[lower_lim, upper_lim]"'
                 )
                 lower, upper = [float(x) for x in val.split(',')]
             else:
                 lower, upper = val
             parameter.vals = validators.Numbers(lower, upper)
         elif attr == 'monitor' and val is True:
             self._monitor_parameters.append(parameter)
         elif attr == 'alias':
             setattr(parameter.instrument, val, parameter)
         elif attr == 'initial_value':
             # skip value attribute so that it gets set last
             # when everything else has been set up
             pass
         else:
             log.warning(f'Attribute {attr} not recognized when '
                         f'instatiating parameter \"{parameter.name}\"')
     if 'initial_value' in options:
         parameter.set(options['initial_value'])
Пример #2
0
    def _attach_parameter_decorators(self,
                                     parameter: _BaseParameter,
                                     decorator_methods: Dict[str, Callable]):
        """Attaches @parameter decorators to a parameter

        Args:
            parameter: Parameter to attach decorators to
            decorator_methods: Decorator methods to attach to parameter
            """
        for param_attr, param_method in decorator_methods.items():
            method_with_args = partial(param_method, self, parameter)
            if param_attr == 'get':
                parameter.get_raw = method_with_args
                if parameter.wrap_get:
                    parameter.get = parameter._wrap_get(parameter.get_raw)
                else:
                    parameter.get = parameter.get_raw
            elif param_attr == 'set':
                parameter.set_raw = method_with_args
                if parameter.wrap_set:
                    parameter.set = parameter._wrap_set(parameter.set_raw)
                else:
                    parameter.set = parameter.set_raw
            else:
                setattr(parameter, param_attr, method_with_args)
        # perform a set without evaluating, which saves the value,
        # ensuring that new modifications such as the set_parser are
        # taken into account
        if hasattr(parameter, 'set') and parameter.raw_value is not None and parameter.wrap_set:
            parameter.set(copy(parameter.get_latest()), evaluate=False)
Пример #3
0
def do1d(
        param_set: _BaseParameter,
        xarray,
        delay: float,
        *param_meas: qcnd.ParamMeasT,
        exp: Optional[Experiment] = None,
        use_threads: Optional[bool] = None,
        enter_actions: qcnd.ActionsT = (),
        exit_actions: qcnd.ActionsT = (),
        additional_setpoints: Sequence[qcnd.ParamMeasT] = tuple(),
):

    if not _is_monotonic(xarray):
        warn('Sweep array is not monotonic. This is pretty weird. Reconsider.')

    meas = Measurement(exp=exp)

    all_setpoint_params = (param_set, ) + tuple(s
                                                for s in additional_setpoints)

    measured_parameters = tuple(param for param in param_meas
                                if isinstance(param, _BaseParameter))

    if (len(measured_parameters) > 2) or (use_threads == True):
        use_threads = True
    else:
        use_threads = False

    try:
        loop_shape = tuple(1 for _ in additional_setpoints) + (len(xarray), )
        shapes: Shapes = detect_shape_of_measurement(measured_parameters,
                                                     loop_shape)
    except TypeError:
        warn(f"Could not detect shape of {measured_parameters} "
             f"falling back to unknown shape.")
        shapes = None

    qcnd._register_parameters(meas, all_setpoint_params)
    qcnd._register_parameters(meas,
                              param_meas,
                              setpoints=all_setpoint_params,
                              shapes=shapes)
    qcnd._register_actions(meas, enter_actions, exit_actions)

    with qcnd._catch_keyboard_interrupts() as interrupted, \
        meas.run(write_in_background=True) as datasaver:

        additional_setpoints_data = qcnd._process_params_meas(
            additional_setpoints)
        for set_point in xarray:
            param_set.set(set_point)
            time.sleep(delay)
            datasaver.add_result(
                (param_set, set_point),
                *qcnd._process_params_meas(param_meas,
                                           use_threads=use_threads),
                *additional_setpoints_data)
        dataset = datasaver.dataset

    return dataset
Пример #4
0
def _conditional_parameter_set(
    parameter: _BaseParameter, value: Union[float, complex],
    ) -> None:
    """
    Reads the cache value of the given parameter and set the parameter to
    the given value if the value is different from the cache value.
    """
    if value != parameter.cache.get():
        parameter.set(value)
Пример #5
0
def do2d(
    param_set1: _BaseParameter,
    start1: float,
    stop1: float,
    num_points1: int,
    delay1: float,
    param_set2: _BaseParameter,
    start2: float,
    stop2: float,
    num_points2: int,
    delay2: float,
    *param_meas: ParamMeasT,
    set_before_sweep: Optional[bool] = True,
    enter_actions: ActionsT = (),
    exit_actions: ActionsT = (),
    before_inner_actions: ActionsT = (),
    after_inner_actions: ActionsT = (),
    write_period: Optional[float] = None,
    measurement_name: str = "",
    exp: Optional[Experiment] = None,
    flush_columns: bool = False,
    do_plot: Optional[bool] = None,
    use_threads: Optional[bool] = None,
    additional_setpoints: Sequence[ParamMeasT] = tuple(),
    show_progress: Optional[None] = None,
) -> AxesTupleListWithDataSet:
    """
    Perform a 1D scan of ``param_set1`` from ``start1`` to ``stop1`` in
    ``num_points1`` and ``param_set2`` from ``start2`` to ``stop2`` in
    ``num_points2`` measuring param_meas at each step.

    Args:
        param_set1: The QCoDeS parameter to sweep over in the outer loop
        start1: Starting point of sweep in outer loop
        stop1: End point of sweep in the outer loop
        num_points1: Number of points to measure in the outer loop
        delay1: Delay after setting parameter in the outer loop
        param_set2: The QCoDeS parameter to sweep over in the inner loop
        start2: Starting point of sweep in inner loop
        stop2: End point of sweep in the inner loop
        num_points2: Number of points to measure in the inner loop
        delay2: Delay after setting parameter before measurement is performed
        *param_meas: Parameter(s) to measure at each step or functions that
          will be called at each step. The function should take no arguments.
          The parameters and functions are called in the order they are
          supplied.
        set_before_sweep: if True the outer parameter is set to its first value
            before the inner parameter is swept to its next value.
        enter_actions: A list of functions taking no arguments that will be
            called before the measurements start
        exit_actions: A list of functions taking no arguments that will be
            called after the measurements ends
        before_inner_actions: Actions executed before each run of the inner loop
        after_inner_actions: Actions executed after each run of the inner loop
        write_period: The time after which the data is actually written to the
            database.
        measurement_name: Name of the measurement. This will be passed down to
            the dataset produced by the measurement. If not given, a default
            value of 'results' is used for the dataset.
        exp: The experiment to use for this measurement.
        flush_columns: The data is written after a column is finished
            independent of the passed time and write period.
        additional_setpoints: A list of setpoint parameters to be registered in
            the measurement but not scanned.
        do_plot: should png and pdf versions of the images be saved after the
            run. If None the setting will be read from ``qcodesrc.json``
        use_threads: If True measurements from each instrument will be done on
            separate threads. If you are measuring from several instruments
            this may give a significant speedup.
        show_progress: should a progress bar be displayed during the
            measurement. If None the setting will be read from ``qcodesrc.json`

    Returns:
        The QCoDeS dataset.
    """

    if do_plot is None:
        do_plot = config.dataset.dond_plot
    if show_progress is None:
        show_progress = config.dataset.dond_show_progress

    meas = Measurement(name=measurement_name, exp=exp)
    all_setpoint_params = (
        param_set1,
        param_set2,
    ) + tuple(s for s in additional_setpoints)

    measured_parameters = tuple(param for param in param_meas
                                if isinstance(param, _BaseParameter))

    try:
        loop_shape = tuple(
            1 for _ in additional_setpoints) + (num_points1, num_points2)
        shapes: Shapes = detect_shape_of_measurement(measured_parameters,
                                                     loop_shape)
    except TypeError:
        LOG.exception(f"Could not detect shape of {measured_parameters} "
                      f"falling back to unknown shape.")
        shapes = None

    _register_parameters(meas, all_setpoint_params)
    _register_parameters(meas,
                         param_meas,
                         setpoints=all_setpoint_params,
                         shapes=shapes)
    _set_write_period(meas, write_period)
    _register_actions(meas, enter_actions, exit_actions)

    original_delay1 = param_set1.post_delay
    original_delay2 = param_set2.post_delay

    param_set1.post_delay = delay1
    param_set2.post_delay = delay2

    with _catch_keyboard_interrupts() as interrupted, meas.run() as datasaver:
        dataset = datasaver.dataset
        additional_setpoints_data = process_params_meas(additional_setpoints)
        setpoints1 = np.linspace(start1, stop1, num_points1)
        for set_point1 in tqdm(setpoints1, disable=not show_progress):
            if set_before_sweep:
                param_set2.set(start2)

            param_set1.set(set_point1)

            for action in before_inner_actions:
                action()

            setpoints2 = np.linspace(start2, stop2, num_points2)

            # flush to prevent unflushed print's to visually interrupt tqdm bar
            # updates
            sys.stdout.flush()
            sys.stderr.flush()
            for set_point2 in tqdm(setpoints2,
                                   disable=not show_progress,
                                   leave=False):
                # skip first inner set point if `set_before_sweep`
                if set_point2 == start2 and set_before_sweep:
                    pass
                else:
                    param_set2.set(set_point2)

                datasaver.add_result(
                    (param_set1, set_point1), (param_set2, set_point2),
                    *process_params_meas(param_meas, use_threads=use_threads),
                    *additional_setpoints_data)

            for action in after_inner_actions:
                action()
            if flush_columns:
                datasaver.flush_data_to_database()

    param_set1.post_delay = original_delay1
    param_set2.post_delay = original_delay2

    return _handle_plotting(dataset, do_plot, interrupted())
Пример #6
0
def do1d(
    param_set: _BaseParameter,
    start: float,
    stop: float,
    num_points: int,
    delay: float,
    *param_meas: ParamMeasT,
    enter_actions: ActionsT = (),
    exit_actions: ActionsT = (),
    write_period: Optional[float] = None,
    measurement_name: str = "",
    exp: Optional[Experiment] = None,
    do_plot: Optional[bool] = None,
    use_threads: Optional[bool] = None,
    additional_setpoints: Sequence[ParamMeasT] = tuple(),
    show_progress: Optional[None] = None,
) -> AxesTupleListWithDataSet:
    """
    Perform a 1D scan of ``param_set`` from ``start`` to ``stop`` in
    ``num_points`` measuring param_meas at each step. In case param_meas is
    an ArrayParameter this is effectively a 2d scan.

    Args:
        param_set: The QCoDeS parameter to sweep over
        start: Starting point of sweep
        stop: End point of sweep
        num_points: Number of points in sweep
        delay: Delay after setting parameter before measurement is performed
        *param_meas: Parameter(s) to measure at each step or functions that
          will be called at each step. The function should take no arguments.
          The parameters and functions are called in the order they are
          supplied.
        enter_actions: A list of functions taking no arguments that will be
            called before the measurements start
        exit_actions: A list of functions taking no arguments that will be
            called after the measurements ends
        write_period: The time after which the data is actually written to the
            database.
        additional_setpoints: A list of setpoint parameters to be registered in
            the measurement but not scanned.
        measurement_name: Name of the measurement. This will be passed down to
            the dataset produced by the measurement. If not given, a default
            value of 'results' is used for the dataset.
        exp: The experiment to use for this measurement.
        do_plot: should png and pdf versions of the images be saved after the
            run. If None the setting will be read from ``qcodesrc.json`
        use_threads: If True measurements from each instrument will be done on
            separate threads. If you are measuring from several instruments
            this may give a significant speedup.
        show_progress: should a progress bar be displayed during the
            measurement. If None the setting will be read from ``qcodesrc.json`

    Returns:
        The QCoDeS dataset.
    """
    if do_plot is None:
        do_plot = config.dataset.dond_plot
    if show_progress is None:
        show_progress = config.dataset.dond_show_progress

    meas = Measurement(name=measurement_name, exp=exp)

    all_setpoint_params = (param_set, ) + tuple(s
                                                for s in additional_setpoints)

    measured_parameters = tuple(param for param in param_meas
                                if isinstance(param, _BaseParameter))
    try:
        loop_shape = tuple(1 for _ in additional_setpoints) + (num_points, )
        shapes: Shapes = detect_shape_of_measurement(measured_parameters,
                                                     loop_shape)
    except TypeError:
        LOG.exception(f"Could not detect shape of {measured_parameters} "
                      f"falling back to unknown shape.")
        shapes = None

    _register_parameters(meas, all_setpoint_params)
    _register_parameters(meas,
                         param_meas,
                         setpoints=all_setpoint_params,
                         shapes=shapes)
    _set_write_period(meas, write_period)
    _register_actions(meas, enter_actions, exit_actions)

    original_delay = param_set.post_delay
    param_set.post_delay = delay

    # do1D enforces a simple relationship between measured parameters
    # and set parameters. For anything more complicated this should be
    # reimplemented from scratch
    with _catch_keyboard_interrupts() as interrupted, meas.run() as datasaver:
        dataset = datasaver.dataset
        additional_setpoints_data = process_params_meas(additional_setpoints)
        setpoints = np.linspace(start, stop, num_points)

        # flush to prevent unflushed print's to visually interrupt tqdm bar
        # updates
        sys.stdout.flush()
        sys.stderr.flush()

        for set_point in tqdm(setpoints, disable=not show_progress):
            param_set.set(set_point)
            datasaver.add_result((param_set, set_point),
                                 *process_params_meas(param_meas,
                                                      use_threads=use_threads),
                                 *additional_setpoints_data)

    param_set.post_delay = original_delay

    return _handle_plotting(dataset, do_plot, interrupted())
Пример #7
0
def do2d_multi(param_slow: _BaseParameter, start_slow: float, stop_slow: float,
               num_points_slow: int, delay_slow: float,
               param_fast: _BaseParameter, start_fast: float, stop_fast: float,
               num_points_fast: int, delay_fast: float,
               lockins: Sequence[SR830],
               devices_no_buffer: Iterable[Parameter] = None,
               write_period: float = 1.,
               threading: List[bool] = [True, True, True, True],
               label: str = None,
               channels: int = 0,
               attempts_to_get: int = 3,
               delay_fast_increase: float = 0.0
               ) -> Tuple[DataSet, ...]:
    """
    This is a do2d to be used for a collection of SR830.

    Args:
        param_slow: The QCoDeS parameter to sweep over in the outer loop
        start_slow: Starting point of sweep in outer loop
        stop_slow: End point of sweep in the outer loop
        num_points_slow: Number of points to measure in the outer loop
        delay_slow: Delay after setting parameter in the outer loop
        param_fast: The QCoDeS parameter to sweep over in the inner loop
        start_fast: Starting point of sweep in inner loop
        stop_fast: End point of sweep in the inner loop
        num_points_fast: Number of points to measure in the inner loop
        delay_fast: Delay after setting parameter before measurement is performed
        lockins: Iterable of SR830 lockins
        devices_no_buffer: Iterable of Parameters to be measured alongside the lockins
        write_period: The time after which the data is actually written to the
                      database.
        threading: For each element which are True, write_in_background, buffer_reset,
                   and send_trigger and get_trace will be threaded respectively
        channels: Channels to get from the buffer. "0" gets both channels
        attempts_to_get: Maximum number of attempts to try to get the buffer if it fails
        delay_fast_increase: Amount to increase delay_fast if getting the buffer fails
    """

    
    logger.info('Starting do2d_multi with {}'.format(num_points_slow * num_points_fast))
    logger.info('write_in_background {},threading buffer_reset {},threading send_trigger {},threading  get trace {}'.format(*threading))
    begin_time = time.perf_counter()

    for lockin in lockins:
        if not isinstance(lockin, SR830):
            raise ValueError('Invalid instrument. Only SR830s are supported')
        lockin.buffer_SR("Trigger")
        lockin.buffer_trig_mode.set('ON')
        assert isinstance(param_fast, Parameter)
        lockin.set_sweep_parameters(param_fast, start_fast, stop_fast, num_points_fast, label=label)

    interval_slow = tqdm(np.linspace(start_slow, stop_slow, num_points_slow), position=0)
    interval_slow.set_description("Slow parameter")
    set_points_fast = lockins[0].sweep_setpoints

    meas = Measurement()
    meas.write_period = write_period
    meas.register_parameter(set_points_fast)
    meas.register_parameter(param_fast)
    meas.register_parameter(param_slow)

    param_fast.post_delay = delay_fast
    param_slow.post_delay = delay_slow

    traces = _datatrace_parameters(lockins, channels)

    for trace in traces:
        assert isinstance(trace.root_instrument, SR830)
        if len(trace.label.split()) < 2:
            trace.label = trace.root_instrument.name + ' ' + trace.label
        meas.register_parameter(trace, setpoints=(param_slow, trace.root_instrument.sweep_setpoints))

    if devices_no_buffer is not None:
        meas_no_buffer = Measurement()
        meas_no_buffer.write_period = write_period
        meas_no_buffer.register_parameter(param_fast)
        meas_no_buffer.register_parameter(param_slow)
        for device in devices_no_buffer:
            meas_no_buffer.register_parameter(device, setpoints=(param_slow, param_fast))

    time_fast_loop = 0.0
    time_set_fast = 0.0
    time_buffer_reset = 0.0
    time_trigger_send = 0.0
    time_get_trace = 0.0

    cm_datasaver = meas.run(write_in_background=threading[0])
    if devices_no_buffer is not None:
        cm_datasaver_no_buffer = meas_no_buffer.run(write_in_background=threading[0])

    with ExitStack() as cmx:
        cmx.enter_context(cm_datasaver)
        datasaver = cm_datasaver.datasaver
        if devices_no_buffer is not None:
            cmx.enter_context(cm_datasaver_no_buffer)
            datasaver_no_buffer = cm_datasaver_no_buffer.datasaver
        for point_slow in interval_slow:
            param_slow.set(point_slow)
            data = []
            data.append((param_slow, param_slow.get()))

            if devices_no_buffer is not None:
                data_no_buffer = []
                data_no_buffer.append((param_slow, param_slow.get()))
            attempts = 0
            while attempts < attempts_to_get:
                try:
                    begin_time_temp_buffer = time.perf_counter()
                    if threading[1]:
                        with concurrent.futures.ThreadPoolExecutor() as executor:
                            for lockin in lockins:
                                executor.submit(lockin.buffer_reset)
                    else:
                        for lockin in lockins:
                            lockin.buffer_reset()
                    time_buffer_reset += time.perf_counter() - begin_time_temp_buffer

                    begin_time_temp_fast_loop = time.perf_counter()
                    interval_fast = tqdm(set_points_fast.get(), position=1, leave=False)
                    interval_fast.set_description("Fast parameter")
                    for point_fast in interval_fast:
                        begin_time_temp_set_fast = time.perf_counter()
                        param_fast.set(point_fast)

                        time_set_fast += time.perf_counter() - begin_time_temp_set_fast
                        begin_time_temp_trigger = time.perf_counter()
                        if threading[2]:
                            with concurrent.futures.ThreadPoolExecutor() as executor:
                                for lockin in lockins:
                                    executor.submit(lockin.send_trigger)
                        else:
                            for lockin in lockins:
                                lockin.send_trigger()

                        time_trigger_send += time.perf_counter() - begin_time_temp_trigger

                        if devices_no_buffer is not None:
                            fast = param_fast.get()
                            data_no_buffer.append((param_fast, fast))
                            for device in devices_no_buffer:
                                device_value = device.get()
                                data_no_buffer.append((device, device_value))
                            datasaver_no_buffer.add_result(*data_no_buffer)

                    time_fast_loop += time.perf_counter() - begin_time_temp_fast_loop

                    begin_time_temp_trace = time.perf_counter()
                    if threading[3]:
                        with concurrent.futures.ThreadPoolExecutor() as executor:
                            data_trace = executor.map(trace_tuble, traces)

                        data += list(data_trace)
                    else:
                        for trace in traces:
                            data.append((trace, trace.get()))
                    time_get_trace += time.perf_counter() - begin_time_temp_trace

                    data.append((set_points_fast, set_points_fast.get()))
                    break
                except Exception as e:
                    logger.info('Faild to get buffer')
                    logger.info(e)
                    print(e)
                    attempts += 1
                    delay_fast += delay_fast_increase
                    print(attempts)
                    logger.info('next attempt nr. {}'.format(attempts))
                    logger.info('next delay_fast. {}'.format(delay_fast))
                    print(delay_fast)
                    if attempts < attempts_to_get:
                        log_message = 'getting the buffer failed, will try again'
                        print(log_message)
                        logger.info(log_message)
                    else:
                        log_message = 'getting the buffer failed, will go to next slow_point'
                        print(log_message)
                        logger.info(log_message)

            datasaver.add_result(*data)

    message = 'Have finished the measurement in {} seconds'.format(time.perf_counter()-begin_time)
    logger.info(message)
    message2 = 'Time used in buffer reset {}. Time used in send trigger {}. Time used in get trace {}'.format(time_buffer_reset, time_trigger_send, time_get_trace)
    logger.info(message2)
    logger.info('time in the fast loop {}'.format(time_fast_loop))

    if devices_no_buffer is not None:
        return (datasaver.dataset, datasaver_no_buffer.dataset)
    else:
        return (datasaver.dataset,)
Пример #8
0
def do2d_multi(param_slow: _BaseParameter,
               start_slow: float,
               stop_slow: float,
               num_points_slow: int,
               delay_slow: float,
               param_fast: _BaseParameter,
               start_fast: float,
               stop_fast: float,
               num_points_fast: int,
               delay_fast: float,
               bundle: BundleLockin,
               write_period: float = 1.,
               threading: List[bool] = [True, True, True, True],
               show_progress_bar: bool = True,
               attempts_to_get: int = 3,
               delay_fast_increase: float = 0.0,
               label: Union[str, None] = None):
    """
    This is a do2d only to be used with BundleLockin.

    Args:
        param_slow: The QCoDeS parameter to sweep over in the outer loop
        start_slow: Starting point of sweep in outer loop
        stop_slow: End point of sweep in the outer loop
        num_points_slow: Number of points to measure in the outer loop
        delay_slow: Delay after setting parameter in the outer loop
        param_fast: The QCoDeS parameter to sweep over in the inner loop
        start_fast: Starting point of sweep in inner loop
        stop_fast: End point of sweep in the inner loop
        num_points_fast: Number of points to measure in the inner loop
        delay_fast: Delay after setting parameter before measurement is performed
        write_period: The time after which the data is actually written to the
                      database.
        threading: For each ellement which are True, write_in_background, buffer_reset,
                   and send_trigger and get_trace will be threaded respectively
        show_progress_bar: should a progress bar be displayed during the
                           measurement.
        attempts_to_get: nummber of attempts to get the buffer before failling
        delay_fast_increase: increases the delay_fast if failling
    """

    logger.info('Starting do2d_multi with {}'.format(num_points_slow *
                                                     num_points_fast))
    logger.info(
        'write_in_background {},threading buffer_reset {},threading send_trigger {},threading  get trace {}'
        .format(*threading))
    begin_time = time.perf_counter()
    meas = Measurement()
    bundle.set_sweep_parameters(param_fast,
                                start_fast,
                                stop_fast,
                                num_points_fast,
                                label=label)
    interval_slow = np.linspace(start_slow, stop_slow, num_points_slow)
    meas.write_period = write_period
    set_points_fast = bundle.setpoints

    meas.register_parameter(set_points_fast)
    param_fast.post_delay = delay_fast

    meas.register_parameter(param_slow)
    param_slow.post_delay = delay_slow

    bundle_parameters = bundle.__dict__['parameters']
    traces = [
        bundle_parameters[key] for key in bundle_parameters.keys()
        if 'trace' in key
    ]
    for trace in traces:
        meas.register_parameter(trace, setpoints=(param_slow, set_points_fast))

    time_fast_loop = 0.0
    time_set_fast = 0.0
    time_buffer_reset = 0.0
    time_trigger_send = 0.0
    time_get_trace = 0.0

    if show_progress_bar:
        progress_bar = progressbar.ProgressBar(max_value=num_points_slow *
                                               num_points_fast)
        points_taken = 0

    with meas.run(write_in_background=threading[0]) as datasaver:
        run_id = datasaver.run_id

        for point_slow in interval_slow:
            param_slow.set(point_slow)

            attempts = 0
            while attempts < attempts_to_get:
                try:
                    begin_time_temp_buffer = time.perf_counter()
                    if threading[1]:
                        with concurrent.futures.ThreadPoolExecutor(
                        ) as executor:
                            for lockin in bundle.lockins:
                                executor.submit(lockin.buffer_reset)
                    else:
                        for lockin in bundle.lockins:
                            lockin.buffer_reset()
                    time_buffer_reset += time.perf_counter(
                    ) - begin_time_temp_buffer

                    begin_time_temp_fast_loop = time.perf_counter()
                    for point_fast in set_points_fast.get():
                        begin_time_temp_set_fast = time.perf_counter()
                        param_fast.set(point_fast)

                        time_set_fast += time.perf_counter(
                        ) - begin_time_temp_set_fast
                        begin_time_temp_trigger = time.perf_counter()
                        if threading[2]:
                            with concurrent.futures.ThreadPoolExecutor(
                            ) as executor:
                                for lockin in bundle.lockins:
                                    executor.submit(lockin.send_trigger)
                        else:
                            for lockin in bundle.lockins:
                                lockin.send_trigger()
                        if show_progress_bar and attempts == 0:
                            points_taken += 1
                            progress_bar.update(points_taken)
                        time_trigger_send += time.perf_counter(
                        ) - begin_time_temp_trigger
                    time_fast_loop += time.perf_counter(
                    ) - begin_time_temp_fast_loop

                    begin_time_temp_trace = time.perf_counter()
                    if threading[3]:
                        with concurrent.futures.ThreadPoolExecutor(
                        ) as executor:
                            data = executor.map(trace_tuble, traces)

                        data = list(data)
                    else:
                        data = []
                        for trace in traces:
                            data.append((trace, trace.get()))
                    time_get_trace += time.perf_counter(
                    ) - begin_time_temp_trace

                    data.append((param_slow, param_slow.get()))
                    data.append((set_points_fast, set_points_fast.get()))
                    break
                except Exception as e:
                    print(e)
                    attempts += 1
                    delay_fast += delay_fast_increase
                    print(attempts)
                    print(delay_fast)
                    if attempts < attempts_to_get:
                        print('getting the buffer failed, will try again')
                    else:
                        print(
                            'getting the buffer failed, will go to next slow_point'
                        )
            datasaver.add_result(*data)

    message = 'Have finished the measurement in {} seconds. run_id {}'.format(
        time.perf_counter() - begin_time, run_id)
    logger.info(message)
    message2 = 'Time used in buffer reset {}. Time used in send trigger {}. Time used in get trace {}'.format(
        time_buffer_reset, time_trigger_send, time_get_trace)
    logger.info(message2)
    logger.info('time in the fast loop {}'.format(time_fast_loop))
    logger.info('time setting in the fast loop {}'.format(time_set_fast))