Example #1
0
class PybarFEI4(Transceiver):
    def setup_interpretation(self):
        self.interpreter = PyDataInterpreter()
        self.interpreter.set_warning_output(False)

    def deserialze_data(self, data):  # According to pyBAR data serilization
        try:
            self.meta_data = jsonapi.loads(data)
        except ValueError:
            try:
                dtype = self.meta_data.pop('dtype')
                shape = self.meta_data.pop('shape')
                if self.meta_data:
                    try:
                        raw_data_array = np.frombuffer(
                            buffer(data), dtype=dtype).reshape(shape)
                        return raw_data_array
                    except (
                            KeyError, ValueError
                    ):  # KeyError happens if meta data read is omitted; ValueError if np.frombuffer fails due to wrong sha
                        return None
            except AttributeError:  # Happens if first data is not meta data
                return None
        return {'meta_data': self.meta_data}

    def interpret_data(self, data):
        if isinstance(
                data[0][1],
                dict):  # Meta data is omitted, only raw data is interpreted
            # Add info to meta data
            data[0][1]['meta_data'].update({
                'n_hits':
                self.interpreter.get_n_hits(),
                'n_events':
                self.interpreter.get_n_events()
            })
            return [data[0][1]]

        self.interpreter.interpret_raw_data(data[0][1])

        interpreted_data = {
            'hits':
            self.interpreter.get_hits(),
            'tdc_counters':
            self.interpreter.get_tdc_counters(),
            'error_counters':
            self.interpreter.get_error_counters(),
            'service_records_counters':
            self.interpreter.get_service_records_counters(),
            'trigger_error_counters':
            self.interpreter.get_trigger_error_counters()
        }

        self.interpreter.reset_histograms(
        )  # For the summing of histograms the histogrammer converter is used
        return [interpreted_data]

    def serialze_data(self, data):
        return jsonapi.dumps(data, cls=utils.NumpyEncoder)
class PybarFEI4(Transceiver):

    def setup_interpretation(self):
        self.interpreter = PyDataInterpreter()
        self.interpreter.set_warning_output(False)

    def deserialze_data(self, data):  # According to pyBAR data serilization
        try:
            self.meta_data = jsonapi.loads(data)
        except ValueError:
            try:
                dtype = self.meta_data.pop('dtype')
                shape = self.meta_data.pop('shape')
                if self.meta_data:
                    try:
                        raw_data_array = np.frombuffer(buffer(data), dtype=dtype).reshape(shape)
                        return raw_data_array
                    except (KeyError, ValueError):  # KeyError happens if meta data read is omitted; ValueError if np.frombuffer fails due to wrong sha
                        return None
            except AttributeError:  # Happens if first data is not meta data
                return None
        return {'meta_data': self.meta_data}

    def interpret_data(self, data):
        if isinstance(data[0][1], dict):  # Meta data is omitted, only raw data is interpreted
            # Add info to meta data
            data[0][1]['meta_data'].update({'n_hits': self.interpreter.get_n_hits(), 'n_events': self.interpreter.get_n_events()})
            return [data[0][1]]

        self.interpreter.interpret_raw_data(data[0][1])

        interpreted_data = {
            'hits': self.interpreter.get_hits(),
            'tdc_counters': self.interpreter.get_tdc_counters(),
            'error_counters': self.interpreter.get_error_counters(),
            'service_records_counters': self.interpreter.get_service_records_counters(),
            'trigger_error_counters': self.interpreter.get_trigger_error_counters()
        }

        self.interpreter.reset_histograms()  # For the summing of histograms the histogrammer converter is used
        return [interpreted_data]

    def serialze_data(self, data):
        return jsonapi.dumps(data, cls=utils.NumpyEncoder)
class DataWorker(QtCore.QObject):
    run_start = QtCore.pyqtSignal()
    run_config_data = QtCore.pyqtSignal(dict)
    global_config_data = QtCore.pyqtSignal(dict)
    filename = QtCore.pyqtSignal(dict)
    interpreted_data = QtCore.pyqtSignal(dict)
    meta_data = QtCore.pyqtSignal(dict)
    finished = QtCore.pyqtSignal()

    def __init__(self):
        QtCore.QObject.__init__(self)
        self.integrate_readouts = 1
        self.n_readout = 0
        self._stop_readout = Event()
        self.setup_raw_data_analysis()
        self.reset_lock = Lock()

    def setup_raw_data_analysis(self):
        self.interpreter = PyDataInterpreter()
        self.histogram = PyDataHistograming()
        self.interpreter.set_warning_output(False)
        self.histogram.set_no_scan_parameter()
        self.histogram.create_occupancy_hist(True)
        self.histogram.create_rel_bcid_hist(True)
        self.histogram.create_tot_hist(True)
        self.histogram.create_tdc_hist(True)
        try:
            self.histogram.create_tdc_distance_hist(True)
            self.interpreter.use_tdc_trigger_time_stamp(True)
        except AttributeError:
            self.has_tdc_distance = False
        else:
            self.has_tdc_distance = True

    def connect(self, socket_addr):
        self.socket_addr = socket_addr
        self.context = zmq.Context()
        self.socket_pull = self.context.socket(zmq.SUB)  # subscriber
        self.socket_pull.setsockopt(zmq.SUBSCRIBE,
                                    '')  # do not filter any data
        self.socket_pull.connect(self.socket_addr)

    def on_set_integrate_readouts(self, value):
        self.integrate_readouts = value

    def reset(self):
        with self.reset_lock:
            self.histogram.reset()
            self.interpreter.reset()
            self.n_readout = 0

    def analyze_raw_data(self, raw_data):
        self.interpreter.interpret_raw_data(raw_data)
        self.histogram.add_hits(self.interpreter.get_hits())

    def process_data(
        self
    ):  # infinite loop via QObject.moveToThread(), does not block event loop
        while (not self._stop_readout.wait(0.01)
               ):  # use wait(), do not block here
            with self.reset_lock:
                try:
                    meta_data = self.socket_pull.recv_json(flags=zmq.NOBLOCK)
                except zmq.Again:
                    pass
                else:
                    name = meta_data.pop('name')
                    if name == 'ReadoutData':
                        data = self.socket_pull.recv()
                        # reconstruct numpy array
                        buf = buffer(data)
                        dtype = meta_data.pop('dtype')
                        shape = meta_data.pop('shape')
                        data_array = np.frombuffer(buf,
                                                   dtype=dtype).reshape(shape)
                        # count readouts and reset
                        self.n_readout += 1
                        if self.integrate_readouts != 0 and self.n_readout % self.integrate_readouts == 0:
                            self.histogram.reset()
                            # we do not want to reset interpreter to keep the error counters
        #                         self.interpreter.reset()
        # interpreted data
                        self.analyze_raw_data(data_array)
                        if self.integrate_readouts == 0 or self.n_readout % self.integrate_readouts == self.integrate_readouts - 1:
                            interpreted_data = {
                                'occupancy':
                                self.histogram.get_occupancy(),
                                'tot_hist':
                                self.histogram.get_tot_hist(),
                                'tdc_counters':
                                self.interpreter.get_tdc_counters(),
                                'tdc_distance':
                                self.interpreter.get_tdc_distance()
                                if self.has_tdc_distance else np.zeros(
                                    (256, ), dtype=np.uint8),
                                'error_counters':
                                self.interpreter.get_error_counters(),
                                'service_records_counters':
                                self.interpreter.get_service_records_counters(
                                ),
                                'trigger_error_counters':
                                self.interpreter.get_trigger_error_counters(),
                                'rel_bcid_hist':
                                self.histogram.get_rel_bcid_hist()
                            }
                            self.interpreted_data.emit(interpreted_data)
                        # meta data
                        meta_data.update({
                            'n_hits':
                            self.interpreter.get_n_hits(),
                            'n_events':
                            self.interpreter.get_n_events()
                        })
                        self.meta_data.emit(meta_data)
                    elif name == 'RunConf':
                        self.run_config_data.emit(meta_data)
                    elif name == 'GlobalRegisterConf':
                        trig_count = int(meta_data['conf']['Trig_Count'])
                        self.interpreter.set_trig_count(trig_count)
                        self.global_config_data.emit(meta_data)
                    elif name == 'Reset':
                        self.histogram.reset()
                        self.interpreter.reset()
                        self.run_start.emit()
                    elif name == 'Filename':
                        self.filename.emit(meta_data)
        self.finished.emit()

    def stop(self):
        self._stop_readout.set()