def setup_raw_data_analysis(self):
     self.interpreter = PyDataInterpreter()
     self.histogram = PyDataHistograming()
     self.interpreter.set_warning_output(False)
     self.histogram.set_no_scan_parameter()
     self.histogram.create_occupancy_hist(True)
     self.histogram.create_rel_bcid_hist(True)
     self.histogram.create_tot_hist(True)
     self.histogram.create_tdc_hist(True)
     try:
         self.histogram.create_tdc_distance_hist(True)
         self.interpreter.use_tdc_trigger_time_stamp(True)
     except AttributeError:
         self.has_tdc_distance = False
     else:
         self.has_tdc_distance = True
    def configure(self):
        if self.trig_count == 0:
            self.consecutive_lvl1 = (2 ** self.register.global_registers['Trig_Count']['bitlength'])
        else:
            self.consecutive_lvl1 = self.trig_count
        self.abs_occ_limit = int(self.occupancy_limit * self.n_triggers * self.consecutive_lvl1)
        if self.abs_occ_limit <= 0:
            logging.info('Any noise hit will lead to an increased pixel threshold.')
        else:
            logging.info('The pixel threshold of any pixel with an occpancy >%d will be increased' % self.abs_occ_limit)
            

        commands = []
        commands.extend(self.register.get_commands("ConfMode"))
        # TDAC
        tdac_max = 2 ** self.register.pixel_registers['TDAC']['bitlength'] - 1
        self.register.set_pixel_register_value("TDAC", tdac_max)
        commands.extend(self.register.get_commands("WrFrontEnd", same_mask_for_all_dc=False, name="TDAC"))
        mask = make_box_pixel_mask_from_col_row(column=self.col_span, row=self.row_span)
        # Enable
        if self.use_enable_mask:
            self.register.set_pixel_register_value("Enable", np.logical_and(mask, self.register.get_pixel_register_value("Enable")))
        else:
            self.register.set_pixel_register_value("Enable", mask)
        commands.extend(self.register.get_commands("WrFrontEnd", same_mask_for_all_dc=False, name="Enable"))
        # Imon
        self.register.set_pixel_register_value('Imon', 1)
        commands.extend(self.register.get_commands("WrFrontEnd", same_mask_for_all_dc=True, name='Imon'))
        # C_High
        self.register.set_pixel_register_value('C_High', 0)
        commands.extend(self.register.get_commands("WrFrontEnd", same_mask_for_all_dc=True, name='C_High'))
        # C_Low
        self.register.set_pixel_register_value('C_Low', 0)
        commands.extend(self.register.get_commands("WrFrontEnd", same_mask_for_all_dc=True, name='C_Low'))
        # Registers
#         self.register.set_global_register_value("Trig_Lat", self.trigger_latency)  # set trigger latency
        self.register.set_global_register_value("Trig_Count", self.trig_count)  # set number of consecutive triggers
        commands.extend(self.register.get_commands("WrRegister", name=["Trig_Count"]))
        commands.extend(self.register.get_commands("RunMode"))
        self.register_utils.send_commands(commands)

        self.interpreter = PyDataInterpreter()
        self.histogram = PyDataHistograming()
        self.interpreter.set_trig_count(self.trig_count)
        self.interpreter.set_warning_output(False)
        self.histogram.set_no_scan_parameter()
        self.histogram.create_occupancy_hist(True)
Example #3
0
 def test_hit_histograming(self):
     raw_data = np.array([67307647, 67645759, 67660079, 67541711, 67718111, 67913663, 67914223, 67847647, 67978655, 68081199, 68219119, 68219487, 68425615, 68311343, 68490719, 68373295, 68553519, 68693039, 68573503, 68709951, 68717058, 68734735, 68604719, 68753999, 68761151, 68847327, 69014799, 69079791, 69211359, 69221055, 69279567, 69499247, 69773183, 69788527, 69998559, 69868559, 69872655, 70003599, 69902527, 70274575, 70321471, 70429983, 70563295, 70574959, 70447631, 70584591, 70783023, 71091999, 70972687, 70985087, 71214815, 71382623, 71609135, 71643519, 71720527, 71897695, 72167199, 72040047, 72264927, 72423983, 77471983, 77602863, 77604383, 77485295, 77616415, 77618927, 77619231, 77639983, 77655871, 77544159, 77548303, 77338399, 77345567, 77346287, 77360399, 77255407, 77386211, 77268287, 77279215, 77409599, 77075983, 76951903, 76980527, 77117023, 76991055, 77011007, 77148127, 77148815, 76827167, 76700031, 76868895, 76758575, 76889567, 76558303, 76429599, 76584783, 76468191, 76610943, 76613743, 76620879, 76629375, 76285999, 76321908, 76194319, 76205599, 76233759, 76065391, 76075839, 76093759, 75801311, 75826319, 75829215, 75699231, 75403887, 75565039, 75439135, 75111711, 75115151, 75251487, 75258399, 75138015, 75303471, 74974111, 74868559, 75030047, 75050079, 74714591, 74722847, 74595103, 74649935, 74656815, 74796511, 74455519, 74391519, 74402607, 74534383, 74189695, 74064911, 74246271, 74116063, 74248719, 74133119, 73935183, 73941087, 73811295, 73663583, 73743423, 73449647, 73453391, 73323743, 73343471, 73474159, 73345087, 73206751, 72899295, 72958559, 72828447, 72542623, 82383232, 67374687, 67503967, 67766575, 68179999, 68052847, 68198239, 68104495, 68235759, 68238223, 68472415, 68490463, 68501279, 68621071, 68623903, 68821791, 68988639, 68864047, 69003183, 68876015, 69007423, 68891407, 69267743, 69272367, 69159567, 69666911, 69684447, 70003247, 70018895, 69898927, 69938543, 69942031, 70198863, 70339919, 70587455, 70462783, 70597679, 70796399, 70800015, 70703887, 71121183, 71323151, 71243535, 71578703, 71467695, 71622879, 71629359, 71831264, 71836511, 71710319, 71992943, 72353855, 72355039, 77606628, 77608287, 77622047, 77510223, 77653263, 77664319, 77546223, 77677471, 77549375, 77213519, 77219551, 77232207, 77234991, 77366511, 77373791, 77389647, 77404383, 77070655, 77087199, 76956975, 76996431, 77009183, 77015327, 76683567, 76840351, 76862255, 76888804, 76548975, 76554767, 76427087, 76560159, 76451967, 76456847, 76468015, 76627295, 76352831, 76354863, 76365887, 75923999, 76074175, 75955439, 76086063, 75774239, 75781535, 75792671, 75662111, 75793647, 75797167, 75827023, 75696543, 75390527, 75522031, 75533663, 75541775, 75432255, 75571535, 75115535, 75247999, 75145197, 75151391, 75160799, 74974991, 74852831, 74871839, 74882783, 75023199, 74896943, 75028767, 75046431, 74922463, 74725711, 74621199, 74658623, 74663183, 74336383, 74484559, 74364526, 74370287, 74370639, 74517983, 74393615, 74205471, 74217359, 74227263, 74231727, 74102559, 74237999, 74248735, 73953599, 73868591, 74000703, 74002975, 73877295, 73664910, 73695967, 73704751, 73579583, 73582639, 73719055, 73405998, 73448207, 73481951, 73008831, 73175087, 73044495, 73058863, 73194895, 73197919, 73093151, 72895567, 72918543, 72947039, 72957919, 82383481, 67392015, 67303135, 67312799, 67318303, 67453727, 67454767, 67634719, 67645887, 67717391, 67914111, 67947919, 67818463, 68052959, 68097215, 68500543, 68711909, 68584735, 68726975, 68741679, 68615471, 68750559, 68755487, 68629311, 68764687, 68765648, 68990175, 69022959, 69023727, 69217327, 69547327, 69665839, 69809983, 69814815, 70006831, 70037807, 70055951, 70068511, 70184031, 70323999, 70334687, 70566095, 70588751, 70723935, 71049695, 70952031, 71084831, 71376863, 71256287, 71611039, 71487727, 71618591, 71623999, 71514239, 71891231, 71897327, 71897663, 72036783, 72391487, 77604975, 77608163, 77621327, 77501983, 77635039, 77646559, 77654671, 77655695, 77546543, 77678383, 77345471, 77224735, 77375519, 77385519, 77393967, 76944399, 76975663, 77114628, 77115231, 77127525, 77142959, 76677423, 76699967, 76722287, 76857647, 76739039, 76883567, 76891615, 76453343, 76584335, 76590623, 76594607, 76600031, 76611167, 76617743, 76622303, 76285999, 76329231, 76335839, 76348175, 76350351, 76356783, 75910383, 75639343, 75787615, 75660079, 75796895, 75797615, 75692559, 75827999, 75833487, 75836479, 75518943, 75568143, 75278943, 75290271, 75297903, 75309391, 75312479, 75315119, 74852223, 74987055, 74858047, 74992943, 74875439, 75008031, 74885407, 75027743, 75055583, 74927839, 74738719, 74629087, 74767391, 74779295, 74789343, 74791247, 74323183, 74454239, 74349455, 74364751, 74516047, 74528559, 74192207, 74201535, 74084367, 74220511, 74109039, 74263263, 74133215, 73807119, 73945313, 73868148, 74001631, 73536815, 73684815, 73711439, 73275407, 73408799, 73052767, 73190975, 73209823, 72788271, 72960607, 72487647, 82383730, 67407151, 67415583, 67322127, 67523871, 67700959, 67583039, 67905375, 67793199, 68159583, 68237791, 68306479, 68492399], np.uint32)
     interpreter = PyDataInterpreter()
     histograming = PyDataHistograming()
     interpreter.set_trig_count(1)
     interpreter.set_warning_output(False)
     histograming.set_no_scan_parameter()
     histograming.create_occupancy_hist(True)
     interpreter.interpret_raw_data(raw_data)
     interpreter.store_event()
     histograming.add_hits(interpreter.get_hits())
     occ_hist_cpp = histograming.get_occupancy()[:, :, 0]
     col_arr, row_arr = convert_data_array(raw_data, filter_func=is_data_record, converter_func=get_col_row_array_from_data_record_array)
     occ_hist_python, _, _ = np.histogram2d(col_arr, row_arr, bins=(80, 336), range=[[1, 80], [1, 336]])
     self.assertTrue(np.all(occ_hist_cpp == occ_hist_python))
Example #4
0
 def setup_interpretation(self):
     self.histograming = PyDataHistograming()
     self.histograming.set_no_scan_parameter()
     self.histograming.create_occupancy_hist(True)
     self.histograming.create_rel_bcid_hist(True)
     self.histograming.create_tot_hist(True)
     self.histograming.create_tdc_hist(True)
     # Variables
     self.n_readouts = 0
     self.readout = 0
     self.fps = 0  # data frames per second
     self.hps = 0  # hits per second
     self.eps = 0  # events per second
     self.plot_delay = 0
     self.total_hits = 0
     self.total_events = 0
     self.updateTime = time.time()
     # Histogrammes from interpretation stored for summing
     self.tdc_counters = None
     self.error_counters = None
     self.service_records_counters = None
     self.trigger_error_counters = None
 def setup_interpretation(self):
     self.histograming = PyDataHistograming()
     self.histograming.set_no_scan_parameter()
     self.histograming.create_occupancy_hist(True)
     self.histograming.create_rel_bcid_hist(True)
     self.histograming.create_tot_hist(True)
     self.histograming.create_tdc_hist(True)
     # Variables
     self.n_readouts = 0
     self.readout = 0
     self.fps = 0  # data frames per second
     self.hps = 0  # hits per second
     self.eps = 0  # events per second
     self.plot_delay = 0
     self.total_hits = 0
     self.total_events = 0
     self.updateTime = time.time()
     # Histogrammes from interpretation stored for summing
     self.tdc_counters = None
     self.error_counters = None
     self.service_records_counters = None
     self.trigger_error_counters = None
 def test_trigger_data_format(self):
     raw_data = np.array([82411778, 82793472, 82411779, 82794496, 82411780, 82795520, 82379013, 82379014,
                          82379015, 82379016, 67240383, 82379017, 82379018, 82379019, 82379020, 82379021,
                          82379022, 82379023, 82379024, 82379025, 3611295745, 82380701, 82380702, 82380703,
                          82380704, 82380705, 82380706, 82380707, 67240383, 82380708, 82380709, 82380710,
                          82380711, 82380712, 82380713, 82380714, 82380715, 82380716, 3611361282, 82381368,
                          82381369, 82381370, 82381371, 82381372, 82381373, 82381374, 67240367, 82381375,
                          82381376, 82381377, 82381378, 82381379, 82381380, 82381381, 82381382, 82381383,
                          3611426819, 82382035, 82382036, 82382037, 82382038, 82382039, 82382040, 82382041,
                          67240383, 82382042, 82382043, 82382044, 82382045, 82382046, 82382047, 82382048,
                          82382049, 82382050, 3611492356, 82383726, 82383727, 82383728, 82383729, 82383730,
                          82383731, 82383732, 67240367, 82383733, 82383734, 82383735, 82383736, 82383737,
                          82383738, 82383739, 82383740, 82383741, 3611557893], np.uint32)
     raw_data_tlu = np.array([3611295745, 3611361282, 3611426819, 3611492356, 3611557893], np.uint32)
     interpreter = PyDataInterpreter()
     histograming = PyDataHistograming()
     for i in (0, 1, 2):  # 0: trigger data contains trigger number, 1: trigger data contains time stamp, 2: trigger data contains 15bit time stamp and 16bit trigger number
         interpreter.set_trigger_data_format(i)
         interpreter.set_trig_count(16)
         interpreter.set_warning_output(False)
         histograming.set_no_scan_parameter()
         histograming.create_occupancy_hist(True)
         interpreter.interpret_raw_data(raw_data)
         interpreter.store_event()
         histograming.add_hits(interpreter.get_hits())
         hits = interpreter.get_hits()
         if i == 0:
             trigger_number_ref = raw_data_tlu & 0x7FFFFFFF
             trigger_time_stamp_ref = np.zeros_like(raw_data_tlu)
         elif i == 1:
             trigger_number_ref = np.zeros_like(raw_data_tlu)
             trigger_time_stamp_ref = raw_data_tlu & 0x7FFFFFFF
         elif i == 2:
             trigger_number_ref = raw_data_tlu & 0x0000FFFF
             trigger_time_stamp_ref = (raw_data_tlu & 0x7FFF0000) >> 16
         self.assertTrue(np.all(hits["trigger_number"] == trigger_number_ref))
         self.assertTrue(np.all(hits["trigger_time_stamp"] == trigger_time_stamp_ref))
class DataWorker(QtCore.QObject):
    run_start = QtCore.pyqtSignal()
    run_config_data = QtCore.pyqtSignal(dict)
    global_config_data = QtCore.pyqtSignal(dict)
    filename = QtCore.pyqtSignal(dict)
    interpreted_data = QtCore.pyqtSignal(dict)
    meta_data = QtCore.pyqtSignal(dict)
    finished = QtCore.pyqtSignal()

    def __init__(self):
        QtCore.QObject.__init__(self)
        self.integrate_readouts = 1
        self.n_readout = 0
        self._stop_readout = Event()
        self.setup_raw_data_analysis()
        self.reset_lock = Lock()

    def setup_raw_data_analysis(self):
        self.interpreter = PyDataInterpreter()
        self.histogram = PyDataHistograming()
        self.interpreter.set_warning_output(False)
        self.histogram.set_no_scan_parameter()
        self.histogram.create_occupancy_hist(True)
        self.histogram.create_rel_bcid_hist(True)
        self.histogram.create_tot_hist(True)
        self.histogram.create_tdc_hist(True)
        try:
            self.histogram.create_tdc_distance_hist(True)
            self.interpreter.use_tdc_trigger_time_stamp(True)
        except AttributeError:
            self.has_tdc_distance = False
        else:
            self.has_tdc_distance = True

    def connect(self, socket_addr):
        self.socket_addr = socket_addr
        self.context = zmq.Context()
        self.socket_pull = self.context.socket(zmq.SUB)  # subscriber
        self.socket_pull.setsockopt(zmq.SUBSCRIBE,
                                    '')  # do not filter any data
        self.socket_pull.connect(self.socket_addr)

    def on_set_integrate_readouts(self, value):
        self.integrate_readouts = value

    def reset(self):
        with self.reset_lock:
            self.histogram.reset()
            self.interpreter.reset()
            self.n_readout = 0

    def analyze_raw_data(self, raw_data):
        self.interpreter.interpret_raw_data(raw_data)
        self.histogram.add_hits(self.interpreter.get_hits())

    def process_data(
        self
    ):  # infinite loop via QObject.moveToThread(), does not block event loop
        while (not self._stop_readout.wait(0.01)
               ):  # use wait(), do not block here
            with self.reset_lock:
                try:
                    meta_data = self.socket_pull.recv_json(flags=zmq.NOBLOCK)
                except zmq.Again:
                    pass
                else:
                    name = meta_data.pop('name')
                    if name == 'ReadoutData':
                        data = self.socket_pull.recv()
                        # reconstruct numpy array
                        buf = buffer(data)
                        dtype = meta_data.pop('dtype')
                        shape = meta_data.pop('shape')
                        data_array = np.frombuffer(buf,
                                                   dtype=dtype).reshape(shape)
                        # count readouts and reset
                        self.n_readout += 1
                        if self.integrate_readouts != 0 and self.n_readout % self.integrate_readouts == 0:
                            self.histogram.reset()
                            # we do not want to reset interpreter to keep the error counters
        #                         self.interpreter.reset()
        # interpreted data
                        self.analyze_raw_data(data_array)
                        if self.integrate_readouts == 0 or self.n_readout % self.integrate_readouts == self.integrate_readouts - 1:
                            interpreted_data = {
                                'occupancy':
                                self.histogram.get_occupancy(),
                                'tot_hist':
                                self.histogram.get_tot_hist(),
                                'tdc_counters':
                                self.interpreter.get_tdc_counters(),
                                'tdc_distance':
                                self.interpreter.get_tdc_distance()
                                if self.has_tdc_distance else np.zeros(
                                    (256, ), dtype=np.uint8),
                                'error_counters':
                                self.interpreter.get_error_counters(),
                                'service_records_counters':
                                self.interpreter.get_service_records_counters(
                                ),
                                'trigger_error_counters':
                                self.interpreter.get_trigger_error_counters(),
                                'rel_bcid_hist':
                                self.histogram.get_rel_bcid_hist()
                            }
                            self.interpreted_data.emit(interpreted_data)
                        # meta data
                        meta_data.update({
                            'n_hits':
                            self.interpreter.get_n_hits(),
                            'n_events':
                            self.interpreter.get_n_events()
                        })
                        self.meta_data.emit(meta_data)
                    elif name == 'RunConf':
                        self.run_config_data.emit(meta_data)
                    elif name == 'GlobalRegisterConf':
                        trig_count = int(meta_data['conf']['Trig_Count'])
                        self.interpreter.set_trig_count(trig_count)
                        self.global_config_data.emit(meta_data)
                    elif name == 'Reset':
                        self.histogram.reset()
                        self.interpreter.reset()
                        self.run_start.emit()
                    elif name == 'Filename':
                        self.filename.emit(meta_data)
        self.finished.emit()

    def stop(self):
        self._stop_readout.set()
class PybarFEI4Histogrammer(Transceiver):

    def setup_transceiver(self):
        self.set_bidirectional_communication()  # We want to be able to change the histogrammmer settings

    def setup_interpretation(self):
        self.histograming = PyDataHistograming()
        self.histograming.set_no_scan_parameter()
        self.histograming.create_occupancy_hist(True)
        self.histograming.create_rel_bcid_hist(True)
        self.histograming.create_tot_hist(True)
        self.histograming.create_tdc_hist(True)
        # Variables
        self.n_readouts = 0
        self.readout = 0
        self.fps = 0  # data frames per second
        self.hps = 0  # hits per second
        self.eps = 0  # events per second
        self.plot_delay = 0
        self.total_hits = 0
        self.total_events = 0
        self.updateTime = time.time()
        # Histogrammes from interpretation stored for summing
        self.tdc_counters = None
        self.error_counters = None
        self.service_records_counters = None
        self.trigger_error_counters = None

    def deserialze_data(self, data):
        return jsonapi.loads(data, object_hook=utils.json_numpy_obj_hook)

    def interpret_data(self, data):
        if 'meta_data' in data[0][1]:  # Meta data is directly forwarded to the receiver, only hit data, event counters are histogramed; 0 from frontend index, 1 for data dict
            meta_data = data[0][1]['meta_data']
            now = time.time()
            recent_total_hits = meta_data['n_hits']
            recent_total_events = meta_data['n_events']
            recent_fps = 1.0 / (now - self.updateTime)  # calculate FPS
            recent_hps = (recent_total_hits - self.total_hits) / (now - self.updateTime)
            recent_eps = (recent_total_events - self.total_events) / (now - self.updateTime)
            self.updateTime = now
            self.total_hits = recent_total_hits
            self.total_events = recent_total_events
            self.fps = self.fps * 0.7 + recent_fps * 0.3
            self.hps = self.hps + (recent_hps - self.hps) * 0.3 / self.fps
            self.eps = self.eps + (recent_eps - self.eps) * 0.3 / self.fps
            meta_data.update({'fps': self.fps, 'hps': self.hps, 'total_hits': self.total_hits, 'eps': self.eps, 'total_events': self.total_events})
            return [data[0][1]]

        self.readout += 1

        if self.n_readouts != 0:  # = 0 for infinite integration
            if self.readout % self.n_readouts == 0:
                self.histograming.reset()
                self.tdc_counters = np.zeros_like(self.tdc_counters)
                self.error_counters = np.zeros_like(self.error_counters)
                self.service_records_counters = np.zeros_like(self.service_records_counters)
                self.trigger_error_counters = np.zeros_like(self.trigger_error_counters)
                self.readouts = 0

        interpreted_data = data[0][1]

        self.histograming.add_hits(interpreted_data['hits'])

        # Sum up interpreter histograms
        if self.tdc_counters is not None:
            self.tdc_counters += interpreted_data['tdc_counters']
        else:
            self.tdc_counters = interpreted_data['tdc_counters'].copy()  # Copy needed to give ownage to histogrammer
        if self.error_counters is not None:
            self.error_counters += interpreted_data['error_counters']
        else:
            self.error_counters = interpreted_data['error_counters'].copy()  # Copy needed to give ownage to histogrammer
        if self.service_records_counters is not None:
            self.service_records_counters += interpreted_data['service_records_counters']
        else:
            self.service_records_counters = interpreted_data['service_records_counters'].copy()  # Copy needed to give ownage to histogrammer
        if self.trigger_error_counters is not None:
            self.trigger_error_counters += interpreted_data['trigger_error_counters']
        else:
            self.trigger_error_counters = interpreted_data['trigger_error_counters'].copy()  # Copy needed to give ownage to histogrammer

        histogrammed_data = {
            'occupancy': self.histograming.get_occupancy(),
            'tot_hist': self.histograming.get_tot_hist(),
            'tdc_counters': self.tdc_counters,
            'error_counters': self.error_counters,
            'service_records_counters': self.service_records_counters,
            'trigger_error_counters': self.trigger_error_counters,
            'rel_bcid_hist': self.histograming.get_rel_bcid_hist()
        }

        return [histogrammed_data]

    def serialze_data(self, data):
        return jsonapi.dumps(data, cls=utils.NumpyEncoder)

    def handle_command(self, command):
        if command[0] == 'RESET':
            self.histograming.reset()
            self.tdc_counters = np.zeros_like(self.tdc_counters)
            self.error_counters = np.zeros_like(self.error_counters)
            self.service_records_counters = np.zeros_like(self.service_records_counters)
            self.trigger_error_counters = np.zeros_like(self.trigger_error_counters)
        else:
            self.n_readouts = int(command[0])
Example #9
0
def histogram_cluster_table(analyzed_data_file,
                            output_file,
                            chunk_size=10000000):
    '''Reads in the cluster info table in chunks and histograms the seed pixels into one occupancy array.
    The 3rd dimension of the occupancy array is the number of different scan parameters used

    Parameters
    ----------
    analyzed_data_file : string
        HDF5 filename of the file containing the cluster table. If a scan parameter is given in the meta data, the occupancy histogramming is done per scan parameter step.

    Returns
    -------
    occupancy_array: numpy.array with dimensions (col, row, #scan_parameter)
    '''

    with tb.open_file(analyzed_data_file, mode="r") as in_file_h5:
        with tb.open_file(output_file, mode="w") as out_file_h5:
            histogram = PyDataHistograming()
            histogram.create_occupancy_hist(True)
            scan_parameters = None
            event_number_indices = None
            scan_parameter_indices = None
            try:
                meta_data = in_file_h5.root.meta_data[:]
                scan_parameters = analysis_utils.get_unique_scan_parameter_combinations(
                    meta_data)
                if scan_parameters is not None:
                    scan_parameter_indices = np.array(range(
                        0, len(scan_parameters)),
                                                      dtype='u4')
                    event_number_indices = np.ascontiguousarray(
                        scan_parameters['event_number']).astype(np.uint64)
                    histogram.add_meta_event_index(
                        event_number_indices,
                        array_length=len(scan_parameters['event_number']))
                    histogram.add_scan_parameter(scan_parameter_indices)
                    logging.info(
                        "Add %d different scan parameter(s) for analysis",
                        len(scan_parameters))
                else:
                    logging.info("No scan parameter data provided")
                    histogram.set_no_scan_parameter()
            except tb.exceptions.NoSuchNodeError:
                logging.info("No meta data provided, use no scan parameter")
                histogram.set_no_scan_parameter()

            logging.info('Histogram cluster seeds...')
            progress_bar = progressbar.ProgressBar(
                widgets=[
                    '',
                    progressbar.Percentage(), ' ',
                    progressbar.Bar(marker='*', left='|', right='|'), ' ',
                    progressbar.AdaptiveETA()
                ],
                maxval=in_file_h5.root.Cluster.shape[0],
                term_width=80)
            progress_bar.start()
            total_cluster = 0  # to check analysis
            for cluster, index in analysis_utils.data_aligned_at_events(
                    in_file_h5.root.Cluster, chunk_size=chunk_size):
                total_cluster += len(cluster)
                histogram.add_cluster_seed_hits(cluster, len(cluster))
                progress_bar.update(index)
            progress_bar.finish()

            filter_table = tb.Filters(
                complib='blosc', complevel=5,
                fletcher32=False)  # compression of the written data
            occupancy_array = histogram.get_occupancy().T
            occupancy_array_table = out_file_h5.create_carray(
                out_file_h5.root,
                name='HistOcc',
                title='Occupancy Histogram',
                atom=tb.Atom.from_dtype(occupancy_array.dtype),
                shape=occupancy_array.shape,
                filters=filter_table)
            occupancy_array_table[:] = occupancy_array

            if total_cluster != np.sum(occupancy_array):
                logging.warning(
                    'Analysis shows inconsistent number of cluster used. Check needed!'
                )
            in_file_h5.root.meta_data.copy(
                out_file_h5.root)  # copy meta_data note to new file
Example #10
0
 def test_libraries_stability(self):  # calls 50 times the constructor and destructor to check the libraries
     for _ in range(50):
         interpreter = PyDataInterpreter()
         histogram = PyDataHistograming()
         del interpreter
         del histogram
Example #11
0
 def setUpClass(self):
     self.interpreter = PyDataInterpreter()
     self.histogram = PyDataHistograming()
class PybarFEI4Histogrammer(Transceiver):
    def setup_transceiver(self):
        self.set_bidirectional_communication(
        )  # We want to be able to change the histogrammmer settings

    def setup_interpretation(self):
        self.histograming = PyDataHistograming()
        self.histograming.set_no_scan_parameter()
        self.histograming.create_occupancy_hist(True)
        self.histograming.create_rel_bcid_hist(True)
        self.histograming.create_tot_hist(True)
        self.histograming.create_tdc_hist(True)

        # Variables
        self.n_readouts = 0
        self.readout = 0
        self.fps = 0  # data frames per second
        self.hps = 0  # hits per second
        self.eps = 0  # events per second
        self.plot_delay = 0
        self.total_hits = 0
        self.total_events = 0
        self.updateTime = time.time()

        # Histogrammes from interpretation stored for summing
        self.tdc_counters = None
        self.error_counters = None
        self.service_records_counters = None
        self.trigger_error_counters = None

    def deserialze_data(self, data):
        return jsonapi.loads(data, object_hook=utils.json_numpy_obj_hook)

    def interpret_data(self, data):
        # Meta data is directly forwarded to the receiver, only hit data and event counters are histogramed
        if 'meta_data' in data[0][1]:  # 0 for frontend index, 1 for data dict
            meta_data = data[0][1]['meta_data']
            now = time.time()
            recent_total_hits = meta_data['n_hits']
            recent_total_events = meta_data['n_events']
            recent_fps = 1.0 / (now - self.updateTime)  # calculate FPS
            recent_hps = (recent_total_hits -
                          self.total_hits) / (now - self.updateTime)
            recent_eps = (recent_total_events -
                          self.total_events) / (now - self.updateTime)
            self.updateTime = now
            self.total_hits = recent_total_hits
            self.total_events = recent_total_events
            self.fps = self.fps * 0.7 + recent_fps * 0.3
            self.hps = self.hps + (recent_hps - self.hps) * 0.3 / self.fps
            self.eps = self.eps + (recent_eps - self.eps) * 0.3 / self.fps
            meta_data.update({
                'fps': self.fps,
                'hps': self.hps,
                'total_hits': self.total_hits,
                'eps': self.eps,
                'total_events': self.total_events
            })
            return [data[0][1]]

        self.readout += 1

        if self.n_readouts != 0:  # 0 for infinite integration
            if self.readout % self.n_readouts == 0:
                self.histograming.reset()
                self.tdc_counters = np.zeros_like(self.tdc_counters)
                self.error_counters = np.zeros_like(self.error_counters)
                self.service_records_counters = np.zeros_like(
                    self.service_records_counters)
                self.trigger_error_counters = np.zeros_like(
                    self.trigger_error_counters)
                self.readouts = 0

        interpreted_data = data[0][1]

        self.histograming.add_hits(interpreted_data['hits'])

        # Sum up interpreter histograms
        if self.tdc_counters is not None:
            self.tdc_counters += interpreted_data['tdc_counters']
        else:
            self.tdc_counters = interpreted_data['tdc_counters'].copy(
            )  # Copy needed to give ownage to histogrammer
        if self.error_counters is not None:
            self.error_counters += interpreted_data['error_counters']
        else:
            self.error_counters = interpreted_data['error_counters'].copy(
            )  # Copy needed to give ownage to histogrammer
        if self.service_records_counters is not None:
            self.service_records_counters += interpreted_data[
                'service_records_counters']
        else:
            self.service_records_counters = interpreted_data[
                'service_records_counters'].copy(
                )  # Copy needed to give ownage to histogrammer
        if self.trigger_error_counters is not None:
            self.trigger_error_counters += interpreted_data[
                'trigger_error_counters']
        else:
            self.trigger_error_counters = interpreted_data[
                'trigger_error_counters'].copy(
                )  # Copy needed to give ownage to histogrammer

        histogrammed_data = {
            'occupancy': self.histograming.get_occupancy(),
            'tot_hist': self.histograming.get_tot_hist(),
            'tdc_counters': self.tdc_counters,
            'error_counters': self.error_counters,
            'service_records_counters': self.service_records_counters,
            'trigger_error_counters': self.trigger_error_counters,
            'rel_bcid_hist': self.histograming.get_rel_bcid_hist()
        }

        return [histogrammed_data]

    def serialze_data(self, data):
        return jsonapi.dumps(data, cls=utils.NumpyEncoder)

    def handle_command(self, command):
        if command[0] == 'RESET':  # Reset command to reset the histograms
            self.histograming.reset()
            self.tdc_counters = np.zeros_like(self.tdc_counters)
            self.error_counters = np.zeros_like(self.error_counters)
            self.service_records_counters = np.zeros_like(
                self.service_records_counters)
            self.trigger_error_counters = np.zeros_like(
                self.trigger_error_counters)
        else:
            self.n_readouts = int(command[0])
''' Example how to interpret raw data and how to histogram the hits.
'''

import numpy as np

from pybar_fei4_interpreter.data_interpreter import PyDataInterpreter
from pybar_fei4_interpreter.data_histograming import PyDataHistograming

# Initialize interpretation modules
interpreter = PyDataInterpreter()
histograming = PyDataHistograming()

# Create raw data
raw_data = np.array([67307647, 67645759, 67660079, 67541711, 67718111, 67913663, 67914223, 67847647, 67978655, 68081199, 68219119, 68219487, 68425615, 68311343, 68490719, 68373295, 68553519, 68693039, 68573503, 68709951, 68717058, 68734735, 68604719, 68753999, 68761151, 68847327, 69014799, 69079791, 69211359, 69221055, 69279567, 69499247, 69773183, 69788527, 69998559, 69868559, 69872655, 70003599, 69902527, 70274575, 70321471, 70429983, 70563295, 70574959, 70447631, 70584591, 70783023, 71091999, 70972687, 70985087, 71214815, 71382623, 71609135, 71643519, 71720527, 71897695, 72167199, 72040047, 72264927, 72423983, 77471983, 77602863, 77604383, 77485295, 77616415, 77618927, 77619231, 77639983, 77655871, 77544159, 77548303, 77338399, 77345567, 77346287, 77360399, 77255407, 77386211, 77268287, 77279215, 77409599, 77075983, 76951903, 76980527, 77117023, 76991055, 77011007, 77148127, 77148815, 76827167, 76700031, 76868895, 76758575, 76889567, 76558303, 76429599, 76584783, 76468191, 76610943, 76613743, 76620879, 76629375, 76285999, 76321908, 76194319, 76205599, 76233759, 76065391, 76075839, 76093759, 75801311, 75826319, 75829215, 75699231, 75403887, 75565039, 75439135, 75111711, 75115151, 75251487, 75258399, 75138015, 75303471, 74974111, 74868559, 75030047, 75050079, 74714591, 74722847, 74595103, 74649935, 74656815, 74796511, 74455519, 74391519, 74402607, 74534383, 74189695, 74064911, 74246271, 74116063, 74248719, 74133119, 73935183, 73941087, 73811295, 73663583, 73743423, 73449647, 73453391, 73323743, 73343471, 73474159, 73345087, 73206751, 72899295, 72958559, 72828447, 72542623, 82383232, 67374687, 67503967, 67766575, 68179999, 68052847, 68198239, 68104495, 68235759, 68238223, 68472415, 68490463, 68501279, 68621071, 68623903, 68821791, 68988639, 68864047, 69003183, 68876015, 69007423, 68891407, 69267743, 69272367, 69159567, 69666911, 69684447, 70003247, 70018895, 69898927, 69938543, 69942031, 70198863, 70339919, 70587455, 70462783, 70597679, 70796399, 70800015, 70703887, 71121183, 71323151, 71243535, 71578703, 71467695, 71622879, 71629359, 71831264, 71836511, 71710319, 71992943, 72353855, 72355039, 77606628, 77608287, 77622047, 77510223, 77653263, 77664319, 77546223, 77677471, 77549375, 77213519, 77219551, 77232207, 77234991, 77366511, 77373791, 77389647, 77404383, 77070655, 77087199, 76956975, 76996431, 77009183, 77015327, 76683567, 76840351, 76862255, 76888804, 76548975, 76554767, 76427087, 76560159, 76451967, 76456847, 76468015, 76627295, 76352831, 76354863, 76365887, 75923999, 76074175, 75955439, 76086063, 75774239, 75781535, 75792671, 75662111, 75793647, 75797167, 75827023, 75696543, 75390527, 75522031, 75533663, 75541775, 75432255, 75571535, 75115535, 75247999, 75145197, 75151391, 75160799, 74974991, 74852831, 74871839, 74882783, 75023199, 74896943, 75028767, 75046431, 74922463, 74725711, 74621199, 74658623, 74663183, 74336383, 74484559, 74364526, 74370287, 74370639, 74517983, 74393615, 74205471, 74217359, 74227263, 74231727, 74102559, 74237999, 74248735, 73953599, 73868591, 74000703, 74002975, 73877295, 73664910, 73695967, 73704751, 73579583, 73582639, 73719055, 73405998, 73448207, 73481951, 73008831, 73175087, 73044495, 73058863, 73194895, 73197919, 73093151, 72895567, 72918543, 72947039, 72957919, 82383481, 67392015, 67303135, 67312799, 67318303, 67453727, 67454767, 67634719, 67645887, 67717391, 67914111, 67947919, 67818463, 68052959, 68097215, 68500543, 68711909, 68584735, 68726975, 68741679, 68615471, 68750559, 68755487, 68629311, 68764687, 68765648, 68990175, 69022959, 69023727, 69217327, 69547327, 69665839, 69809983, 69814815, 70006831, 70037807, 70055951, 70068511, 70184031, 70323999, 70334687, 70566095, 70588751, 70723935, 71049695, 70952031, 71084831, 71376863, 71256287, 71611039, 71487727, 71618591, 71623999, 71514239, 71891231, 71897327, 71897663, 72036783, 72391487, 77604975, 77608163, 77621327, 77501983, 77635039, 77646559, 77654671, 77655695, 77546543, 77678383, 77345471, 77224735, 77375519, 77385519, 77393967, 76944399, 76975663, 77114628, 77115231, 77127525, 77142959, 76677423, 76699967, 76722287, 76857647, 76739039, 76883567, 76891615, 76453343, 76584335, 76590623, 76594607, 76600031, 76611167, 76617743, 76622303, 76285999, 76329231, 76335839, 76348175, 76350351, 76356783, 75910383, 75639343, 75787615, 75660079, 75796895, 75797615, 75692559, 75827999, 75833487, 75836479, 75518943, 75568143, 75278943, 75290271, 75297903, 75309391, 75312479, 75315119, 74852223, 74987055, 74858047, 74992943, 74875439, 75008031, 74885407, 75027743, 75055583, 74927839, 74738719, 74629087, 74767391, 74779295, 74789343, 74791247, 74323183, 74454239, 74349455, 74364751, 74516047, 74528559, 74192207, 74201535, 74084367, 74220511, 74109039, 74263263, 74133215, 73807119, 73945313, 73868148, 74001631, 73536815, 73684815, 73711439, 73275407, 73408799, 73052767, 73190975, 73209823, 72788271, 72960607, 72487647, 82383730, 67407151, 67415583, 67322127, 67523871, 67700959, 67583039, 67905375, 67793199, 68159583, 68237791, 68306479, 68492399], np.uint32)

# Set settings
histograming.set_no_scan_parameter()  # The data has no scan parameter, thus should not be histogrammed per scan parameter
histograming.create_occupancy_hist(True)  # Tell the histogrammer to create a occupancy hist

# Interpret the raw data (builds hits)
interpreter.interpret_raw_data(raw_data)
# Hits are buffered per event, since the interpret_raw_data call does not have to be called event aligned;
# to tell the interpreter that the last event is finished this has to be called
interpreter.store_event()

# Histogram the htis
hits = interpreter.get_hits()
histograming.add_hits(hits)

# Get and show the occupancy hist
occ_hist = histograming.get_occupancy()[:, :, 0]  # 0 because there is no scan parameter, otherwise histogramming is done per scan parameter
class ThresholdBaselineTuning(Fei4RunBase):
    '''Threshold Baseline Tuning

    Tuning the FEI4 to the lowest possible threshold (GDAC and TDAC). Feedback current will not be tuned.
    NOTE: In case of RX errors decrease the trigger frequency (= increase trigger_rate_limit)
    NOTE: To increase the TDAC range, decrease TdacVbp.
    '''
    _default_run_conf = {
        "occupancy_limit": 0,  # occupancy limit, when reached the TDAC will be decreased (increasing threshold). 0 will mask any pixel with occupancy greater than zero
        "scan_parameters": [('Vthin_AltFine', (120, None)), ('Step', 60)],  # the Vthin_AltFine range, number of steps (repetition at constant Vthin_AltFine)
        "increase_threshold": 5,  # increasing the global threshold (Vthin_AltFine) after tuning
        "disabled_pixels_limit": 0.01,  # limit of disabled pixels, fraction of all pixels
        "use_enable_mask": False,  # if True, enable mask from config file anded with mask (from col_span and row_span), if False use mask only for enable mask
        "n_triggers": 10000,  # total number of trigger sent to FE
        "trigger_rate_limit": 500,  # artificially limiting the trigger rate, in BCs (25ns)
        "trig_count": 0,  # FE-I4 trigger count, number of consecutive BCs, 0 means 16, from 0 to 15
        "col_span": [1, 80],  # column range (from minimum to maximum value). From 1 to 80.
        "row_span": [1, 336],  # row range (from minimum to maximum value). From 1 to 336.
    }

    def configure(self):
        if self.trig_count == 0:
            self.consecutive_lvl1 = (2 ** self.register.global_registers['Trig_Count']['bitlength'])
        else:
            self.consecutive_lvl1 = self.trig_count
        self.abs_occ_limit = int(self.occupancy_limit * self.n_triggers * self.consecutive_lvl1)
        if self.abs_occ_limit <= 0:
            logging.info('Any noise hit will lead to an increased pixel threshold.')
        else:
            logging.info('The pixel threshold of any pixel with an occpancy >%d will be increased' % self.abs_occ_limit)
            

        commands = []
        commands.extend(self.register.get_commands("ConfMode"))
        # TDAC
        tdac_max = 2 ** self.register.pixel_registers['TDAC']['bitlength'] - 1
        self.register.set_pixel_register_value("TDAC", tdac_max)
        commands.extend(self.register.get_commands("WrFrontEnd", same_mask_for_all_dc=False, name="TDAC"))
        mask = make_box_pixel_mask_from_col_row(column=self.col_span, row=self.row_span)
        # Enable
        if self.use_enable_mask:
            self.register.set_pixel_register_value("Enable", np.logical_and(mask, self.register.get_pixel_register_value("Enable")))
        else:
            self.register.set_pixel_register_value("Enable", mask)
        commands.extend(self.register.get_commands("WrFrontEnd", same_mask_for_all_dc=False, name="Enable"))
        # Imon
        self.register.set_pixel_register_value('Imon', 1)
        commands.extend(self.register.get_commands("WrFrontEnd", same_mask_for_all_dc=True, name='Imon'))
        # C_High
        self.register.set_pixel_register_value('C_High', 0)
        commands.extend(self.register.get_commands("WrFrontEnd", same_mask_for_all_dc=True, name='C_High'))
        # C_Low
        self.register.set_pixel_register_value('C_Low', 0)
        commands.extend(self.register.get_commands("WrFrontEnd", same_mask_for_all_dc=True, name='C_Low'))
        # Registers
#         self.register.set_global_register_value("Trig_Lat", self.trigger_latency)  # set trigger latency
        self.register.set_global_register_value("Trig_Count", self.trig_count)  # set number of consecutive triggers
        commands.extend(self.register.get_commands("WrRegister", name=["Trig_Count"]))
        commands.extend(self.register.get_commands("RunMode"))
        self.register_utils.send_commands(commands)

        self.interpreter = PyDataInterpreter()
        self.histogram = PyDataHistograming()
        self.interpreter.set_trig_count(self.trig_count)
        self.interpreter.set_warning_output(False)
        self.histogram.set_no_scan_parameter()
        self.histogram.create_occupancy_hist(True)

    def scan(self):
        scan_parameter_range = [self.register.get_global_register_value("Vthin_AltFine"), 0]
        if self.scan_parameters.Vthin_AltFine[0]:
            scan_parameter_range[0] = self.scan_parameters.Vthin_AltFine[0]
        if self.scan_parameters.Vthin_AltFine[1]:
            scan_parameter_range[1] = self.scan_parameters.Vthin_AltFine[1]
        steps = 1
        if self.scan_parameters.Step:
            steps = self.scan_parameters.Step

        lvl1_command = self.register.get_commands("LV1")[0] + self.register.get_commands("zeros", length=self.trigger_rate_limit)[0]
        self.total_scan_time = int(lvl1_command.length() * 25 * (10 ** -9) * self.n_triggers)

        preselected_pixels = invert_pixel_mask(self.register.get_pixel_register_value('Enable')).sum()
        disabled_pixels_limit_cnt = int(self.disabled_pixels_limit * self.register.get_pixel_register_value('Enable').sum())
        disabled_pixels = 0
        self.last_reg_val = deque([None] * self.increase_threshold, maxlen=self.increase_threshold + 1)
        self.last_step = deque([None] * self.increase_threshold, maxlen=self.increase_threshold + 1)
        self.last_good_threshold = deque([None] * self.increase_threshold, maxlen=self.increase_threshold + 1)
        self.last_good_tdac = deque([None] * self.increase_threshold, maxlen=self.increase_threshold + 1)
        self.last_good_enable_mask = deque([None] * self.increase_threshold, maxlen=self.increase_threshold + 1)
        self.last_occupancy_hist = deque([None] * self.increase_threshold, maxlen=self.increase_threshold + 1)
        self.last_occupancy_mask = deque([None] * self.increase_threshold, maxlen=self.increase_threshold + 1)

        for reg_val in range(scan_parameter_range[0], scan_parameter_range[1] - 1, -1):
            if self.stop_run.is_set():
                break
            logging.info('Scanning Vthin_AltFine %d', reg_val)
            commands = []
            commands.extend(self.register.get_commands("ConfMode"))
            self.register.set_global_register_value("Vthin_AltFine", reg_val)  # set number of consecutive triggers
            commands.extend(self.register.get_commands("WrRegister", name=["Vthin_AltFine"]))
            # setting FE into RunMode
            commands.extend(self.register.get_commands("RunMode"))
            self.register_utils.send_commands(commands)
            step = 0
            while True:
                if self.stop_run.is_set():
                    break
                self.histogram.reset()
                step += 1
                logging.info('Step %d / %d at Vthin_AltFine %d', step, steps, reg_val)
                logging.info('Estimated scan time: %ds', self.total_scan_time)

                with self.readout(Vthin_AltFine=reg_val, Step=step, reset_sram_fifo=True, fill_buffer=True, clear_buffer=True, callback=self.handle_data):
                    got_data = False
                    start = time()
                    self.register_utils.send_command(lvl1_command, repeat=self.n_triggers, wait_for_finish=False, set_length=True, clear_memory=False)
                    while not self.stop_run.wait(0.1):
                        if self.register_utils.is_ready:
                            if got_data:
                                self.progressbar.finish()
                            logging.info('Finished sending %d triggers', self.n_triggers)
                            break
                        if not got_data:
                            if self.fifo_readout.data_words_per_second() > 0:
                                got_data = True
                                logging.info('Taking data...')
                                self.progressbar = progressbar.ProgressBar(widgets=['', progressbar.Percentage(), ' ', progressbar.Bar(marker='*', left='|', right='|'), ' ', progressbar.Timer()], maxval=self.total_scan_time, poll=10, term_width=80).start()
                        else:
                            try:
                                self.progressbar.update(time() - start)
                            except ValueError:
                                pass
                # Use fast C++ hit histogramming to save time
                raw_data = np.ascontiguousarray(data_array_from_data_iterable(self.fifo_readout.data), dtype=np.uint32)
                self.interpreter.interpret_raw_data(raw_data)
                self.interpreter.store_event()  # force to create latest event
                self.histogram.add_hits(self.interpreter.get_hits())
                occ_hist = self.histogram.get_occupancy()[:, :, 0]
                # noisy pixels are set to 1
                occ_mask = np.zeros(shape=occ_hist.shape, dtype=np.dtype('>u1'))
                occ_mask[occ_hist > self.abs_occ_limit] = 1

                tdac_reg = self.register.get_pixel_register_value('TDAC')
                decrease_pixel_mask = np.logical_and(occ_mask > 0, tdac_reg > 0)
                disable_pixel_mask = np.logical_and(occ_mask > 0, tdac_reg == 0)
                enable_reg = self.register.get_pixel_register_value('Enable')
                enable_mask = np.logical_and(enable_reg, invert_pixel_mask(disable_pixel_mask))
                if np.logical_and(occ_mask > 0, enable_reg == 0).sum():
                    logging.warning('Received data from disabled pixels')
#                     disabled_pixels += disable_pixel_mask.sum()  # can lead to wrong values if the enable reg is corrupted
                disabled_pixels = invert_pixel_mask(enable_mask).sum() - preselected_pixels
                if disabled_pixels > disabled_pixels_limit_cnt:
                    logging.info('Limit of disabled pixels reached: %d (limit %d)... stopping scan' % (disabled_pixels, disabled_pixels_limit_cnt))
                    break
                else:
                    logging.info('Increasing threshold of %d pixel(s)', decrease_pixel_mask.sum())
                    logging.info('Disabling %d pixel(s), total number of disabled pixel(s): %d', disable_pixel_mask.sum(), disabled_pixels)
                    tdac_reg[decrease_pixel_mask] -= 1
                    self.register.set_pixel_register_value('TDAC', tdac_reg)
                    self.register.set_pixel_register_value('Enable', enable_mask)
                    commands = []
                    commands.extend(self.register.get_commands("ConfMode"))
                    commands.extend(self.register.get_commands("WrFrontEnd", same_mask_for_all_dc=False, name='TDAC'))
                    commands.extend(self.register.get_commands("WrFrontEnd", same_mask_for_all_dc=False, name='Enable'))
                    commands.extend(self.register.get_commands("RunMode"))
                    self.register_utils.send_commands(commands)
                    if occ_mask.sum() == 0 or step == steps or decrease_pixel_mask.sum() < disabled_pixels_limit_cnt:
                        self.last_reg_val.appendleft(reg_val)
                        self.last_step.appendleft(step)
                        self.last_good_threshold.appendleft(self.register.get_global_register_value("Vthin_AltFine"))
                        self.last_good_tdac.appendleft(self.register.get_pixel_register_value("TDAC"))
                        self.last_good_enable_mask.appendleft(self.register.get_pixel_register_value("Enable"))
                        self.last_occupancy_hist.appendleft(occ_hist.copy())
                        self.last_occupancy_mask.appendleft(occ_mask.copy())
                        break
                    else:
                        logging.info('Found %d noisy pixels... repeat tuning step for Vthin_AltFine %d', occ_mask.sum(), reg_val)

            if disabled_pixels > disabled_pixels_limit_cnt or scan_parameter_range[1] == reg_val:
                break

    def analyze(self):
        self.register.set_global_register_value("Vthin_AltFine", self.last_good_threshold[self.increase_threshold])
        self.register.set_pixel_register_value('TDAC', self.last_good_tdac[self.increase_threshold])
        self.register.set_pixel_register_value('Enable', self.last_good_enable_mask[0])  # use enable mask from the lowest point to mask bad pixels
        # write configuration to avaoid high current states
        commands = []
        commands.extend(self.register.get_commands("ConfMode"))
        commands.extend(self.register.get_commands("WrRegister", name=["Vthin_AltFine"]))
        commands.extend(self.register.get_commands("WrFrontEnd", same_mask_for_all_dc=False, name="TDAC"))
        commands.extend(self.register.get_commands("WrFrontEnd", same_mask_for_all_dc=False, name="Enable"))
        self.register_utils.send_commands(commands)

        with AnalyzeRawData(raw_data_file=self.output_filename, create_pdf=True) as analyze_raw_data:
            analyze_raw_data.create_source_scan_hist = True
            analyze_raw_data.interpreter.set_warning_output(False)
            analyze_raw_data.interpret_word_table()
            analyze_raw_data.interpreter.print_summary()
            analyze_raw_data.plot_histograms()
            plot_occupancy(self.last_occupancy_hist[self.increase_threshold].T, title='Noisy Pixels at Vthin_AltFine %d Step %d' % (self.last_reg_val[self.increase_threshold], self.last_step[self.increase_threshold]), filename=analyze_raw_data.output_pdf)
            plot_fancy_occupancy(self.last_occupancy_hist[self.increase_threshold].T, filename=analyze_raw_data.output_pdf)
            plot_occupancy(self.last_occupancy_mask[self.increase_threshold].T, title='Occupancy Mask at Vthin_AltFine %d Step %d' % (self.last_reg_val[self.increase_threshold], self.last_step[self.increase_threshold]), z_max=1, filename=analyze_raw_data.output_pdf)
            plot_fancy_occupancy(self.last_occupancy_mask[self.increase_threshold].T, filename=analyze_raw_data.output_pdf)
            plot_three_way(self.last_good_tdac[self.increase_threshold].T, title='TDAC at Vthin_AltFine %d Step %d' % (self.last_reg_val[self.increase_threshold], self.last_step[self.increase_threshold]), x_axis_title="TDAC", filename=analyze_raw_data.output_pdf, maximum=31, bins=32)
            plot_occupancy(self.last_good_tdac[self.increase_threshold].T, title='TDAC at Vthin_AltFine %d Step %d' % (self.last_reg_val[self.increase_threshold], self.last_step[self.increase_threshold]), z_max=31, filename=analyze_raw_data.output_pdf)
            plot_occupancy(self.last_good_enable_mask[self.increase_threshold].T, title='Intermediate Enable Mask at Vthin_AltFine %d Step %d' % (self.last_reg_val[self.increase_threshold], self.last_step[self.increase_threshold]), z_max=1, filename=analyze_raw_data.output_pdf)
            plot_fancy_occupancy(self.last_good_enable_mask[self.increase_threshold].T, filename=analyze_raw_data.output_pdf)
            plot_occupancy(self.last_good_enable_mask[0].T, title='Final Enable Mask at Vthin_AltFine %d Step %d' % (self.last_reg_val[0], self.last_step[0]), z_max=1, filename=analyze_raw_data.output_pdf)
            plot_fancy_occupancy(self.last_good_enable_mask[0].T, filename=analyze_raw_data.output_pdf)