def analyze(self):
        h5_filename = self.output_filename + '.h5'

        self.logger.info('Starting data analysis...')
        with tb.open_file(h5_filename, 'r+') as h5_file:
            raw_data = h5_file.root.raw_data[:]
            meta_data = h5_file.root.meta_data[:]
            run_config = h5_file.root.configuration.run_config[:]

            # TODO: TMP this should go to analysis function with chunking
            self.logger.info('Interpret raw data...')
            hit_data = analysis.interpret_raw_data(raw_data, meta_data)
            Vthreshold_start = [int(item[1]) for item in run_config if item[0] == 'Vthreshold_start'][0]
            Vthreshold_stop = [int(item[1]) for item in run_config if item[0] == 'Vthreshold_stop'][0]
            n_injections = [int(item[1]) for item in run_config if item[0] == 'n_injections'][0]

            hit_data = hit_data[hit_data['data_header'] == 1]
            param_range = np.unique(meta_data['scan_param_id'])
            
            self.logger.info('Get the global threshold distributions for all pixels...')
            scurve = analysis.scurve_hist(hit_data, param_range)
            self.logger.info('Fit the scurves for all pixels...')
            thr2D, sig2D, chi2ndf2D = analysis.fit_scurves_multithread(scurve, scan_param_range=range(Vthreshold_start, Vthreshold_stop), n_injections=n_injections, invert_x=True)

            h5_file.create_group(h5_file.root, 'interpreted', 'Interpreted Data')

            h5_file.create_table(h5_file.root.interpreted, 'hit_data', hit_data, filters=tb.Filters(complib='zlib', complevel=5))
            h5_file.create_carray(h5_file.root.interpreted, name='HitDistribution', obj=scurve)
            h5_file.create_carray(h5_file.root.interpreted, name='PixelThresholdMap', obj=thr2D.T)
示例#2
0
    def analyze(self):
        h5_filename = self.output_filename + '.h5'

        self.logger.info('Starting data analysis...')
        with tb.open_file(h5_filename, 'r+') as h5_file:
            raw_data = h5_file.root.raw_data[:]
            meta_data = h5_file.root.meta_data[:]
            run_config = h5_file.root.configuration.run_config[:]

            # TODO: TMP this should go to analysis function with chunking
            hit_data = analysis.interpret_raw_data(raw_data, meta_data)
            hit_data = hit_data[hit_data['data_header'] == 1]
            param_range = np.unique(meta_data['scan_param_id'])
            scurve = analysis.scurve_hist(hit_data, param_range)

            n_injections = [int(item[1]) for item in run_config if item[0] == 'n_injections'][0]
            VTP_fine_start = [int(item[1]) for item in run_config if item[0] == 'VTP_fine_start'][0]
            VTP_fine_stop = [int(item[1]) for item in run_config if item[0] == 'VTP_fine_stop'][0]

            param_range = range(VTP_fine_start, VTP_fine_stop)
            thr2D, sig2D, chi2ndf2D = analysis.fit_scurves_multithread(scurve, scan_param_range=param_range, n_injections=n_injections)

            h5_file.create_group(h5_file.root, 'interpreted', 'Interpreted Data')

            h5_file.create_table(h5_file.root.interpreted, 'hit_data', hit_data, filters=tb.Filters(complib='zlib', complevel=5))

            h5_file.create_carray(h5_file.root.interpreted, name='HistSCurve', obj=scurve)
            h5_file.create_carray(h5_file.root.interpreted, name='Chi2Map', obj=chi2ndf2D.T)
            h5_file.create_carray(h5_file.root.interpreted, name='ThresholdMap', obj=thr2D.T)
            h5_file.create_carray(h5_file.root.interpreted, name='NoiseMap', obj=sig2D.T)

            pix_occ = np.bincount(hit_data['x'] * 256 + hit_data['y'], minlength=256 * 256).astype(np.uint32)
            hist_occ = np.reshape(pix_occ, (256, 256)).T
            h5_file.create_carray(h5_file.root.interpreted, name='HistOcc', obj=hist_occ)
示例#3
0
    def analyze(self, progress = None, status = None, **kwargs):
        '''
            Analyze the data of the scan
            If progress is None a tqdm progress bar is used else progress should be a Multiprocess Queue which stores the progress as fraction of 1
            If there is a status queue information about the status of the scan are put into it
        '''

        h5_filename = self.output_filename + '.h5'

        self.logger.info('Starting data analysis...')
        if status != None:
            status.put("Performing data analysis")

        # Open the HDF5 which contains all data of the scan
        with tb.open_file(h5_filename, 'r+') as h5_file:
            # Read raw data, meta data and configuration parameters
            raw_data = h5_file.root.raw_data[:]
            meta_data = h5_file.root.meta_data[:]
            run_config = h5_file.root.configuration.run_config[:]
            general_config = h5_file.root.configuration.generalConfig[:]
            op_mode = [row[1] for row in general_config if row[0]==b'Op_mode'][0]
            vco = [row[1] for row in general_config if row[0]==b'Fast_Io_en'][0]

            # Create group to save all data and histograms to the HDF file
            h5_file.create_group(h5_file.root, 'interpreted', 'Interpreted Data')

            self.logger.info('Interpret raw data...')
            # Interpret the raw data (2x 32 bit to 1x 48 bit)
            hit_data = analysis.interpret_raw_data(raw_data, op_mode, vco, meta_data, progress = progress)
            raw_data = None

            # Select only data which is hit data
            hit_data = hit_data[hit_data['data_header'] == 1]
            h5_file.create_table(h5_file.root.interpreted, 'hit_data', hit_data, filters=tb.Filters(complib='zlib', complevel=5))
            pix_occ = np.bincount(hit_data['x'] * 256 + hit_data['y'], minlength=256 * 256).astype(np.uint32)
            hist_occ = np.reshape(pix_occ, (256, 256)).T
            h5_file.create_carray(h5_file.root.interpreted, name='HistOcc', obj=hist_occ)
            param_range = np.unique(meta_data['scan_param_id'])
            meta_data = None
            pix_occ = None
            hist_occ = None

            # Create histograms for number of detected hits for individual thresholds
            scurve = analysis.scurve_hist(hit_data, param_range)
            hit_data = None

            # Read needed configuration parameters
            n_injections = [int(item[1]) for item in run_config if item[0] == b'n_injections'][0]
            Vthreshold_start = [int(item[1]) for item in run_config if item[0] == b'Vthreshold_start'][0]
            Vthreshold_stop = [int(item[1]) for item in run_config if item[0] == b'Vthreshold_stop'][0]

            # Fit S-Curves to the histogramms for all pixels
            param_range = list(range(Vthreshold_start, Vthreshold_stop))
            thr2D, sig2D, chi2ndf2D = analysis.fit_scurves_multithread(scurve, scan_param_range=param_range, n_injections=n_injections, invert_x=True, progress = progress)

            h5_file.create_carray(h5_file.root.interpreted, name='HistSCurve', obj=scurve)
            h5_file.create_carray(h5_file.root.interpreted, name='Chi2Map', obj=chi2ndf2D.T)
            h5_file.create_carray(h5_file.root.interpreted, name='ThresholdMap', obj=thr2D.T)
            h5_file.create_carray(h5_file.root.interpreted, name='NoiseMap', obj=sig2D.T)
示例#4
0
    def analyze(self):
        h5_filename = self.output_filename + '.h5'

        self.logger.info('Starting data analysis...')
        with tb.open_file(h5_filename, 'r+') as h5_file:
            raw_data = h5_file.root.raw_data[:]
            meta_data = h5_file.root.meta_data[:]
            run_config = h5_file.root.configuration.run_config[:]

            # TODO: TMP this should go to analysis function with chunking
            self.logger.info('Interpret raw data...')
            hit_data = analysis.interpret_raw_data(raw_data, meta_data)
            print(hit_data)
            Vthreshold_start = [
                int(item[1]) for item in run_config
                if item[0] == 'Vthreshold_start'
            ][0]
            Vthreshold_stop = [
                int(item[1]) for item in run_config
                if item[0] == 'Vthreshold_stop'
            ][0]

            hit_data = hit_data[hit_data['data_header'] == 1]
            print(hit_data)
            param_range = np.unique(meta_data['scan_param_id'])

            self.logger.info(
                'Get the global threshold distributions for all pixels...')
            scurve = analysis.scurve_hist(hit_data, param_range)
            self.logger.info(
                'Calculate the mean of the global threshold distributions for all pixels...'
            )
            vths = analysis.vths(scurve, param_range, Vthreshold_start)

            h5_file.create_group(h5_file.root, 'interpreted',
                                 'Interpreted Data')

            h5_file.create_table(h5_file.root.interpreted,
                                 'hit_data',
                                 hit_data,
                                 filters=tb.Filters(complib='zlib',
                                                    complevel=5))
            h5_file.create_carray(h5_file.root.interpreted,
                                  name='HitDistribution',
                                  obj=scurve)
            h5_file.create_carray(h5_file.root.interpreted,
                                  name='PixelThresholdMap',
                                  obj=vths.T)
示例#5
0
    def analyze(self, progress=None, status=None, result_path=None, **kwargs):
        '''
            Analyze the data of the equalisation and calculate the equalisation matrix
            If progress is None a tqdm progress bar is used else progress should be a Multiprocess Queue which stores the progress as fraction of 1
            If there is a status queue information about the status of the scan are put into it
        '''

        h5_filename = self.output_filename + '.h5'

        self.logger.info('Starting data analysis...')
        if status != None:
            status.put("Performing data analysis")

        # Open the HDF5 which contains all data of the equalisation
        with tb.open_file(h5_filename, 'r+') as h5_file:
            # Read raw data, meta data and configuration parameters
            meta_data = h5_file.root.meta_data[:]
            run_config = h5_file.root.configuration.run_config[:]
            general_config = h5_file.root.configuration.generalConfig[:]
            op_mode = [
                row[1] for row in general_config if row[0] == b'Op_mode'
            ][0]
            vco = [
                row[1] for row in general_config if row[0] == b'Fast_Io_en'
            ][0]

            self.logger.info('Interpret raw data...')

            # THR = 0
            param_range, index = np.unique(meta_data['scan_param_id'],
                                           return_index=True)
            meta_data_th0 = meta_data[
                meta_data['scan_param_id'] < len(param_range) // 2]
            param_range_th0 = np.unique(meta_data_th0['scan_param_id'])

            # THR = 15
            meta_data_th15 = meta_data[
                meta_data['scan_param_id'] >= len(param_range) // 2]
            param_range_th15 = np.unique(meta_data_th15['scan_param_id'])

            # shift indices so that they start with zero
            start = meta_data_th15['index_start'][0]
            meta_data_th15[
                'index_start'] = meta_data_th15['index_start'] - start
            meta_data_th15['index_stop'] = meta_data_th15['index_stop'] - start

            self.logger.info('THR = 0')
            #THR = 0
            raw_data_thr0 = h5_file.root.raw_data[:meta_data_th0['index_stop']
                                                  [-1]]
            hit_data_thr0 = analysis.interpret_raw_data(raw_data_thr0,
                                                        op_mode,
                                                        vco,
                                                        meta_data_th0,
                                                        progress=progress)
            raw_data_thr0 = None

            self.logger.info('THR = 15')
            #THR = 15
            raw_data_thr15 = h5_file.root.raw_data[
                meta_data_th0['index_stop'][-1]:]
            hit_data_thr15 = analysis.interpret_raw_data(raw_data_thr15,
                                                         op_mode,
                                                         vco,
                                                         meta_data_th15,
                                                         progress=progress)
            raw_data_thr15 = None

        # Read needed configuration parameters
        Vthreshold_start = [
            int(item[1]) for item in run_config
            if item[0] == b'Vthreshold_start'
        ][0]
        Vthreshold_stop = [
            int(item[1]) for item in run_config
            if item[0] == b'Vthreshold_stop'
        ][0]
        chip_wafer = [
            int(item[1]) for item in run_config if item[0] == b'chip_wafer'
        ][0]
        chip_x = [
            item[1].decode() for item in run_config if item[0] == b'chip_x'
        ][0]
        chip_y = [int(item[1]) for item in run_config
                  if item[0] == b'chip_y'][0]

        # Select only data which is hit data
        hit_data_thr0 = hit_data_thr0[hit_data_thr0['data_header'] == 1]
        hit_data_thr15 = hit_data_thr15[hit_data_thr15['data_header'] == 1]

        # Divide the data into two parts - data for pixel threshold 0 and 15
        param_range = np.unique(meta_data['scan_param_id'])
        meta_data = None
        param_range_th0 = np.unique(hit_data_thr0['scan_param_id'])
        param_range_th15 = np.unique(hit_data_thr15['scan_param_id'])

        # Create histograms for number of detected hits for individual thresholds
        self.logger.info(
            'Get the global threshold distributions for all pixels...')
        scurve_th0 = analysis.scurve_hist(hit_data_thr0, param_range_th0)
        hit_data_thr0 = None
        scurve_th15 = analysis.scurve_hist(hit_data_thr15, param_range_th15)
        hit_data_thr15 = None

        # Calculate the mean of the threshold distributions for all pixels
        self.logger.info(
            'Calculate the mean of the global threshold distributions for all pixels...'
        )
        vths_th0 = analysis.vths(scurve_th0, param_range_th0, Vthreshold_start)
        scurve_th0 = None
        vths_th15 = analysis.vths(scurve_th15, param_range_th15,
                                  Vthreshold_start)
        scurve_th15 = None

        # Get the treshold distributions for both scan
        self.logger.info('Get the cumulated global threshold distributions...')
        hist_th0 = analysis.vth_hist(vths_th0, Vthreshold_stop)
        hist_th15 = analysis.vth_hist(vths_th15, Vthreshold_stop)
        vths_th15 = None

        # Use the threshold histogramms and one threshold distribution to calculate the equalisation
        self.logger.info('Calculate the equalisation matrix...')
        eq_matrix = analysis.eq_matrix(hist_th0, hist_th15, vths_th0,
                                       Vthreshold_start, Vthreshold_stop)

        # Don't mask any pixels in the mask file
        mask_matrix = np.zeros((256, 256), dtype=np.bool)
        mask_matrix[:, :] = 0

        # Write the equalisation matrix to a new HDF5 file
        self.save_thr_mask(eq_matrix, chip_wafer, chip_x, chip_y)

        if result_path != None:
            result_path.put(self.thrfile)
示例#6
0
    def analyze(self):
        h5_filename = self.output_filename + '.h5'

        self.logger.info('Starting data analysis...')
        with tb.open_file(h5_filename, 'r+') as h5_file:
            raw_data = h5_file.root.raw_data[:]
            meta_data = h5_file.root.meta_data[:]
            run_config = h5_file.root.configuration.run_config[:]

            # TODO: TMP this should go to analysis function with chunking
            #print('haeder1\t header2\t y\t x\t Hits\t Counter')
            self.logger.info('Interpret raw data...')
            hit_data = analysis.interpret_raw_data(raw_data, meta_data)
            Vthreshold_start = [int(item[1]) for item in run_config if item[0] == 'Vthreshold_start'][0]
            Vthreshold_stop = [int(item[1]) for item in run_config if item[0] == 'Vthreshold_stop'][0]

            hit_data = hit_data[hit_data['data_header'] == 1]
            param_range = np.unique(meta_data['scan_param_id'])
            hit_data_th0 = hit_data[hit_data['scan_param_id'] < len(param_range) / 2]
            param_range_th0 = np.unique(hit_data_th0['scan_param_id'])
            hit_data_th15 = hit_data[hit_data['scan_param_id'] >= len(param_range) / 2]
            param_range_th15 = np.unique(hit_data_th15['scan_param_id'])
            
            self.logger.info('Get the global threshold distributions for all pixels...')
            scurve_th0 = analysis.scurve_hist(hit_data_th0, param_range_th0)
            scurve_th15 = analysis.scurve_hist(hit_data_th15, param_range_th15)
            self.logger.info('Calculate the mean of the global threshold distributions for all pixels...')
            vths_th0 = analysis.vths(scurve_th0, param_range_th0, Vthreshold_start)
            vths_th15 = analysis.vths(scurve_th15, param_range_th15, Vthreshold_start)
            self.logger.info('Get the cumulated global threshold distributions...')
            hist_th0 = analysis.vth_hist(vths_th0, Vthreshold_stop)
            hist_th15 = analysis.vth_hist(vths_th15, Vthreshold_stop)

            self.logger.info('Calculate the equalisation matrix...')
            eq_matrix = analysis.eq_matrix(hist_th0, hist_th15, vths_th0, Vthreshold_start, Vthreshold_stop)
            mask_matrix = np.zeros((256, 256), dtype=np.bool)
            mask_matrix[:, :] = 0

            self.logger.info('Writing mask_matrix to file...')
            maskfile = os.path.join(self.working_dir, self.timestamp + '_mask.h5')

            with tb.open_file(maskfile, 'a') as out_file:
                try:
                    out_file.remove_node(out_file.root.mask_matrix)
                except NoSuchNodeError:
                    self.logger.debug('Specified maskfile does not include a mask_matrix yet!')

                out_file.create_carray(out_file.root,
                                    name='mask_matrix',
                                    title='Matrix mask',
                                    obj=mask_matrix)
                self.logger.info('Closing mask file: %s' % (maskfile))

            self.logger.info('Writing equalisation matrix to file...')
            with tb.open_file(maskfile, 'a') as out_file:
                try:
                    out_file.remove_node(out_file.root.thr_matrix)
                except NoSuchNodeError:
                    self.logger.debug('Specified maskfile does not include a thr_mask yet!')

                out_file.create_carray(out_file.root,
                                        name='thr_matrix',
                                        title='Matrix Threshold',
                                        obj=eq_matrix)
                self.logger.info('Closing equalisation matrix file: %s' % (maskfile))
示例#7
0
    def analyze_iteration(self, iteration=0, progress=None, status=None):
        '''
            Analyze the data of the iteration and calculate the new Ibias_PixelDAC value.
            In the last iteration the data is also used to calculate an equalisation matrix.
            If progress is None a tqdm progress bar is used else progress should be a Multiprocess Queue which stores the progress as fraction of 1
            If there is a status queue information about the status of the scan are put into it
        '''

        h5_filename = self.output_filename + '.h5'

        self.logger.info('Starting data analysis...')
        if status != None:
            status.put("Performing data analysis")

        # Open the HDF5 which contains all data of the optimization iteration
        with tb.open_file(h5_filename, 'r+') as h5_file:
            # Read raw data, meta data and configuration parameters for the current iteration
            meta_data_call = ('h5_file.root.' + 'meta_data_' + str(iteration) +
                              '[:]')
            meta_data = eval(meta_data_call)
            run_config_call = ('h5_file.root.' + 'configuration.run_config_' +
                               str(iteration) + '[:]')
            run_config = eval(run_config_call)
            general_config_call = ('h5_file.root.' +
                                   'configuration.generalConfig_' +
                                   str(iteration) + '[:]')
            general_config = eval(general_config_call)
            op_mode = [
                row[1] for row in general_config if row[0] == b'Op_mode'
            ][0]
            vco = [
                row[1] for row in general_config if row[0] == b'Fast_Io_en'
            ][0]

            self.logger.info('Interpret raw data...')

            # THR = 0
            param_range, index = np.unique(meta_data['scan_param_id'],
                                           return_index=True)
            meta_data_th0 = meta_data[
                meta_data['scan_param_id'] < len(param_range) // 2]
            param_range_th0 = np.unique(meta_data_th0['scan_param_id'])

            # THR = 15
            meta_data_th15 = meta_data[
                meta_data['scan_param_id'] >= len(param_range) // 2]
            param_range_th15 = np.unique(meta_data_th15['scan_param_id'])

            # shift indices so that they start with zero
            start = meta_data_th15['index_start'][0]
            meta_data_th15[
                'index_start'] = meta_data_th15['index_start'] - start
            meta_data_th15['index_stop'] = meta_data_th15['index_stop'] - start

            self.logger.info('THR = 0')
            #THR = 0
            raw_data_call = ('h5_file.root.' + 'raw_data_' + str(iteration) +
                             '[:' + meta_data_th0['index_stop'][-1] + ']')
            raw_data_thr0 = eval(raw_data_call)
            hit_data_thr0 = analysis.interpret_raw_data(raw_data_thr0,
                                                        op_mode,
                                                        vco,
                                                        meta_data_th0,
                                                        progress=progress)
            raw_data_thr0 = None

            self.logger.info('THR = 15')
            #THR = 15
            raw_data_call = ('h5_file.root.' + 'raw_data_' + str(iteration) +
                             '[' + meta_data_th0['index_stop'][-1] + ':]')
            raw_data_thr15 = eval(raw_data_call)
            hit_data_thr15 = analysis.interpret_raw_data(raw_data_thr15,
                                                         op_mode,
                                                         vco,
                                                         meta_data_th15,
                                                         progress=progress)
            raw_data_thr15 = None

        # Read needed configuration parameters
        Vthreshold_start = [
            int(item[1]) for item in run_config
            if item[0] == b'Vthreshold_start'
        ][0]
        Vthreshold_stop = [
            int(item[1]) for item in run_config
            if item[0] == b'Vthreshold_stop'
        ][0]
        n_injections = [
            int(item[1]) for item in run_config if item[0] == b'n_injections'
        ][0]
        pixeldac = [
            int(item[1]) for item in run_config if item[0] == b'pixeldac'
        ][0]
        last_pixeldac = [
            int(item[1]) for item in run_config if item[0] == b'last_pixeldac'
        ][0]
        last_delta = [
            float(item[1]) for item in run_config if item[0] == b'last_delta'
        ][0]
        chip_wafer = [
            int(item[1]) for item in run_config if item[0] == b'chip_wafer'
        ][0]
        chip_x = [
            item[1].decode() for item in run_config if item[0] == b'chip_x'
        ][0]
        chip_y = [int(item[1]) for item in run_config
                  if item[0] == b'chip_y'][0]

        # Select only data which is hit data
        hit_data_thr0 = hit_data_thr0[hit_data_thr0['data_header'] == 1]
        hit_data_thr15 = hit_data_thr15[hit_data_thr15['data_header'] == 1]

        # Divide the data into two parts - data for pixel threshold 0 and 15
        param_range = np.unique(meta_data['scan_param_id'])
        meta_data = None
        param_range_th0 = np.unique(hit_data_thr0['scan_param_id'])
        param_range_th15 = np.unique(hit_data_thr15['scan_param_id'])

        # Create histograms for number of detected hits for individual thresholds
        self.logger.info(
            'Get the global threshold distributions for all pixels...')
        scurve_th0 = analysis.scurve_hist(hit_data_thr0,
                                          np.arange(len(param_range) // 2))
        hit_data_thr0 = None
        scurve_th15 = analysis.scurve_hist(
            hit_data_thr15, np.arange(len(param_range) // 2, len(param_range)))
        hit_data_thr15 = None

        # Fit S-Curves to the histogramms for all pixels
        self.logger.info('Fit the scurves for all pixels...')
        thr2D_th0, sig2D_th0, chi2ndf2D_th0 = analysis.fit_scurves_multithread(
            scurve_th0,
            scan_param_range=list(range(Vthreshold_start, Vthreshold_stop)),
            n_injections=n_injections,
            invert_x=True,
            progress=progress)
        scurve_th0 = None
        thr2D_th15, sig2D_th15, chi2ndf2D_th15 = analysis.fit_scurves_multithread(
            scurve_th15,
            scan_param_range=list(range(Vthreshold_start, Vthreshold_stop)),
            n_injections=n_injections,
            invert_x=True,
            progress=progress)
        scurve_th15 = None

        # Put the threshold distribution based on the fit results in two histogramms
        self.logger.info('Get the cumulated global threshold distributions...')
        hist_th0 = analysis.vth_hist(thr2D_th0, Vthreshold_stop)
        hist_th15 = analysis.vth_hist(thr2D_th15, Vthreshold_stop)

        # Use the threshold histogramms to calculate the new Ibias_PixelDAC setting
        self.logger.info('Calculate new pixelDAC value...')
        pixeldac_result = analysis.pixeldac_opt(hist_th0, hist_th15, pixeldac,
                                                last_pixeldac, last_delta,
                                                Vthreshold_start,
                                                Vthreshold_stop)
        delta = pixeldac_result[1]
        rms_delta = pixeldac_result[2]

        # In the last iteration calculate also the equalisation matrix
        if delta > rms_delta - 2 and delta < rms_delta + 2:
            # Use the threshold histogramms and one threshold distribution to calculate the equalisation
            self.logger.info('Calculate the equalisation matrix...')
            eq_matrix = analysis.eq_matrix(hist_th0, hist_th15, thr2D_th0,
                                           Vthreshold_start, Vthreshold_stop)

            # Don't mask any pixels in the mask file
            mask_matrix = np.zeros((256, 256), dtype=np.bool)
            mask_matrix[:, :] = 0

            # Write the equalisation matrix to a new HDF5 file
            self.save_thr_mask(eq_matrix, chip_wafer, chip_x, chip_y)

        self.logger.info(
            'Result of iteration: Scan with pixeldac %i - New pixeldac %i. Delta was %f with optimal delta %f'
            % (int(pixeldac), int(
                pixeldac_result[0]), pixeldac_result[1], pixeldac_result[2]))
        return pixeldac_result