Ejemplo n.º 1
0
 def __init__(self,
              configuration,
              data_queue,
              symbol_pipe,
              loglevel='INFO'):
     self.data_queue = data_queue
     self.symbol_pipe = symbol_pipe
     utils.setup_logging(loglevel)
     logging.info("Initialize converter mananager with configuration in %s",
                  configuration)
     self.configuration = utils.parse_config_file(configuration)
Ejemplo n.º 2
0
 def start_converter(self):
     current_path = os.path.dirname(
         os.path.dirname(os.path.abspath(__file__)))
     conv_utils.setup_logging('INFO')
     cm = ConverterManager(configuration=current_path + os.sep +
                           'tpx3_monitor.yaml',
                           data_queue=self.data_queue,
                           symbol_pipe=self.pipe_dest_conn)
     self.converter_process = Process(target=cm.start,
                                      name='TPX3 Converter')
     self.pipe_source_conn.send(True)
     self.converter_process.start()
Ejemplo n.º 3
0
 def __init__(self,
              configuration,
              path,
              loglevel='INFO',
              delay=0.1,
              kind='Test',
              name='Undefined'):
     utils.setup_logging(loglevel)
     logging.info("Initialize producer simulation mananager")
     self.configuration = utils.parse_config_file(configuration)
     self.data_file = path
     self.loglevel = loglevel
     self.delay = delay
     self.kind = kind
     self.name = name
Ejemplo n.º 4
0
    def run(
        self
    ):  # The receiver loop running in extra process; is called after start() method
        utils.setup_logging(self.loglevel)
        logging.debug("Start %s producer %s at %s", self.kind, self.name,
                      self.backend_address)

        self.setup_producer_device()

        while not self.exit.wait(0.02):
            self.send_data()

        ## Close connections
        self.sender.close()
        self.context.term()
        logging.debug("Close %s producer %s at %s", self.kind, self.name,
                      self.backend_address)
Ejemplo n.º 5
0
    def __init__(self,
                 frontend,
                 kind,
                 data_queue,
                 symbol_pipe,
                 name='Undefined',
                 max_buffer=None,
                 loglevel='INFO',
                 **kwarg):
        multiprocessing.Process.__init__(self)

        self.kind = kind  # kind of transeiver (e.g. forwarder)
        self.frontend_address = frontend  # socket facing a data publisher
        # Maximum number of input messages buffered, otherwise data omitted
        self.max_buffer = max_buffer
        self.name = name  # name of the DAQ/device
        # Std. setting is unidirectional frondend communication
        self.frontend_socket_type = zmq.SUB

        self.data_queue = data_queue
        self.symbol_pipe = symbol_pipe
        self.run_data_queue_symbol = True

        if 'max_cpu_load' in kwarg:
            logging.warning(
                'The parameter max_cpu_load is deprecated! Use max_buffer!')

        self.config = kwarg

        # Determine how many frontends the converter has
        # just one frontend socket given
        if not isinstance(self.frontend_address, list):
            self.frontend_address = [self.frontend_address]
            self.n_frontends = 1
        else:
            self.n_frontends = len(self.frontend_address)

        self.exit = multiprocessing.Event()  # exit signal

        self.loglevel = loglevel
        utils.setup_logging(self.loglevel)

        logging.debug("Initialize %s converter %s with frontends %s ",
                      self.kind, self.name, self.frontend_address)
Ejemplo n.º 6
0
    def setup_interpretation(self):
        ''' Objects defined here are available in interpretation process '''
        utils.setup_logging(self.loglevel)

        self.chunk_size = self.config.get('chunk_size', 1000000)

        # Init result hists
        self.reset_hists()

        # Number of readouts to integrate
        self.int_readouts = 0

        # Variables for meta data time calculations
        self.ts_last_readout = 0.  # Time stamp last readout
        self.hits_last_readout = 0.  # Number of hits
        self.events_last_readout = 0.  # Number of events in last chunk
        self.fps = 0.  # Readouts per second
        self.hps = 0.  # Hits per second
        self.eps = 0.  # Events per second
        self.ext_trg_num = -1  # external trigger number
Ejemplo n.º 7
0
    def __init__(self,
                 backend,
                 data_file,
                 delay=0.1,
                 kind='Test',
                 name='Undefined',
                 loglevel='INFO'):
        multiprocessing.Process.__init__(self)

        self.backend_address = backend
        self.name = name  # name of the DAQ/device
        self.kind = kind
        self.delay = delay
        self.data_file = data_file

        self.loglevel = loglevel
        self.exit = multiprocessing.Event()  # exit signal
        utils.setup_logging(loglevel)

        logging.debug("Initialize %s producer %s at %s", self.kind, self.name,
                      self.backend_address)
Ejemplo n.º 8
0
    def run(self):  # the Receiver loop run in extra process
        utils.setup_logging(self.loglevel)
        self._setup_transceiver()
        self.setup_interpretation()

        process = psutil.Process(self.ident)  # access this process info
        self.cpu_load = 0.

        fe_thread = threading.Thread(target=self.recv_data)
        fe_thread.start()

        logging.debug("Start %s transceiver %s", self.kind, self.name)
        while not self.exit.wait(0.01):
            if self.raw_data.empty():
                continue
            else:
                raw_data = self.raw_data.get_nowait()

            actual_cpu_load = process.cpu_percent()
            # Filter cpu load by running mean since it changes rapidly;
            # cpu load spikes can be filtered away since data queues up
            # through ZMQ
            self.cpu_load = 0.90 * self.cpu_load + 0.1 * actual_cpu_load
            # Check if already too many messages queued up then omit data
            if not self.max_buffer or self.max_buffer > self.raw_data.qsize():
                self.interpret_data(raw_data)
            else:
                logging.warning(
                    'Converter cannot keep up, omitting data for interpretation!'
                )

        self.fe_stop.set()
        fe_thread.join()
        # Close connections
        for actual_frontend in self.frontends:
            actual_frontend[1].close()
        self.context.term()

        logging.debug("Close %s transceiver %s", self.kind, self.name)