def __init__(self, frontend, backend, kind, name='Undefined', max_cpu_load=100, loglevel='INFO', **kwarg): multiprocessing.Process.__init__(self) self.kind = kind # kind of transeiver (e.g. forwarder) self.frontend_address = frontend # socket facing a data publisher self.backend_address = backend # socket facing a data receiver self.max_cpu_load = max_cpu_load # Maximum CPU load allowed, otherwise data omitted self.name = name # name of the DAQ/device self.frontend_socket_type = zmq.SUB # std. setting is unidirectional frondend communication self.backend_socket_type = zmq.PUB # std. setting is unidirectional backend communication self.config = kwarg # Additional implementation specifig configurations given in yaml file # Determine how many frontends/backends the converter has if not isinstance(self.frontend_address, list): # just one frontend socket given self.frontend_address = [self.frontend_address] self.n_frontends = 1 else: self.n_frontends = len(self.frontend_address) if not isinstance(self.backend_address, list): # just one backend socket given self.backend_address = [self.backend_address] self.n_backends = 1 else: self.n_backends = len(self.backend_address) self.exit = multiprocessing.Event() # exit signal self.loglevel = loglevel utils.setup_logging(self.loglevel) self.setup_transceiver() logging.debug("Initialize %s converter %s with frontends %s and backends %s", self.kind, self.name, self.frontend_address, self.backend_address)
def setup_interpretation(self): ''' Objects defined here are available in interpretation process ''' utils.setup_logging(self.loglevel) self.chunk_size = self.config.get('chunk_size', 1000000) self.analyze_tdc = self.config.get('analyze_tdc', False) # self.rx_id = int(self.config.get('rx', 'rx0')[2]) # Mask pixels that have a higher occupancy than 3 * the median of all firering pixels self.noisy_threshold = self.config.get('noisy_threshold', 3) self.mask_noisy_pixel = False # Init result hists self.reset_hists() # Number of readouts to integrate self.int_readouts = 0 # Variables for meta data time calculations self.ts_last_readout = 0. # Time stamp last readout self.hits_last_readout = 0. # Number of hits self.triggers_last_readout = 0. # Number of trigger words self.fps = 0. # Readouts per second self.hps = 0. # Hits per second self.tps = 0. # Triggers per second self.total_trigger_words = 0 # self.trigger_id = -1 # Last chunk trigger id # self.ext_trg_num = -1 # external trigger number self.last_rawdata = None # Leftover from last chunk self.interpreter = RawDataInterpreter()
def __init__(self, config_file, loglevel='INFO'): super(OnlineMonitorApplication, self).__init__() utils.setup_logging(loglevel) logging.debug("Initialize online monitor with configuration in %s", config_file) self.configuration = utils.parse_config_file(config_file, expect_receiver=True) self.setup_style() self.setup_widgets() self.receivers = self.start_receivers()
def main(): # pragma: no cover, cannot be tested in unittests due to qt event loop args = utils.parse_arguments() utils.setup_logging(args.log) app = Qt.QApplication(sys.argv) win = OnlineMonitorApplication(args.config_file) # enter remote IP to connect to the other side listening win.show() sys.exit(app.exec_())
def __init__(self, backend, kind='Test', name='Undefined', loglevel='INFO', **kwarg): multiprocessing.Process.__init__(self) self.backend_address = backend self.name = name # name of the DAQ/device self.kind = kind self.config = kwarg self.loglevel = loglevel self.exit = multiprocessing.Event() # exit signal utils.setup_logging(loglevel) logging.info("Initialize %s producer %s at %s", self.kind, self.name, self.backend_address)
def run(self): # The receiver loop running in extra process; is called after start() method utils.setup_logging(self.loglevel) logging.info("Start %s producer %s at %s", self.kind, self.name, self.backend_address) self.setup_producer_device() while not self.exit.wait(0.02): self.send_data() # Close connections self.sender.close() self.context.term() logging.info("Close %s producer %s at %s", self.kind, self.name, self.backend_address)
def run(self): # the Receiver loop run in extra process utils.setup_logging(self.loglevel) self._setup_transceiver() self.setup_interpretation() process = psutil.Process(self.ident) # access this process info self.cpu_load = 0. be_thread = threading.Thread(target=self.recv_commands) be_thread.start() fe_thread = threading.Thread(target=self.recv_data) fe_thread.start() logging.debug("Start %s transceiver %s at %s", self.kind, self.name, self.backend_address) while not self.exit.wait(0.01): if self.raw_data.empty(): continue else: raw_data = self.raw_data.get_nowait() actual_cpu_load = process.cpu_percent() # Filter cpu load by running mean since it changes rapidly; # cpu load spikes can be filtered away since data queues up # through ZMQ self.cpu_load = 0.90 * self.cpu_load + 0.1 * actual_cpu_load # Check if already too many messages queued up then omit data if not self.max_buffer or self.max_buffer > self.raw_data.qsize(): data = self.interpret_data(raw_data) # Data is None if the data cannot be converted # (e.g. is incomplete, broken, etc.) if data is not None and len(data) != 0: self.send_data(data) else: logging.warning('Converter cannot keep up, omitting data for interpretation!') self.be_stop.set() be_thread.join() self.fe_stop.set() fe_thread.join() # Close connections for actual_frontend in self.frontends: actual_frontend[1].close() for actual_backend in self.backends: actual_backend[1].close() self.context.term() logging.debug( "Close %s transceiver %s at %s", self.kind, self.name, self.backend_address)
def run( self ): # The receiver loop running in extra process; is called after start() method utils.setup_logging(self.loglevel) logging.info("Start %s producer %s at %s", self.kind, self.name, self.backend_address) self.setup_producer_device() while not self.exit.wait(0.02): self.send_data() # Close connections self.sender.close() self.context.term() logging.info("Close %s producer %s at %s", self.kind, self.name, self.backend_address)
def __init__(self, frontend, kind, name='Undefined', max_cpu_load=100, loglevel='INFO', **kwarg): QtCore.QObject.__init__(self) self.kind = kind self.frontend_address = frontend self.max_cpu_load = max_cpu_load self.name = name # name of the DAQ/device self.config = kwarg self._active = False # flag to tell receiver if its active (viewed int the foreground) self.socket_type = zmq.SUB # atandard is unidirectional communication with PUB/SUB pattern self.frontend_address = self.frontend_address utils.setup_logging(loglevel) logging.debug("Initialize %s receiver %s at %s", self.kind, self.name, self.frontend_address) self.setup_receiver_device() self.setup_receiver()
def main(): if sys.argv[1:]: args = utils.parse_arguments() else: # no config yaml provided -> start online monitor with std. settings class Dummy(object): def __init__(self): folder = os.path.dirname(os.path.realpath(__file__)) self.config_file = os.path.join(folder, r'tpx3_monitor.yaml') self.log = 'INFO' args = Dummy() logging.info('No configuration file provided! Use std. settings!') utils.setup_logging(args.log) # Start the converter run_script_in_shell('', args.config_file, 'start_online_monitor')
def run(self): # the receiver loop utils.setup_logging(self.loglevel) self._setup_transceiver() self.setup_interpretation() process = psutil.Process(self.ident) # access this process info self.cpu_load = 0. logging.debug("Start %s transceiver %s at %s", self.kind, self.name, self.backend_address) while not self.exit.wait(0.01): raw_data = self.recv_data() commands = self.recv_commands() if commands: self.handle_command(commands) if not raw_data: # read again if no raw data is read continue actual_cpu_load = process.cpu_percent() # Filter cpu load by running mean since it changes rapidly; cpu load spikes can be filtered away since data queues up through ZMQ # FIXME: To use a high water mark would be a better solution self.cpu_load = 0.90 * self.cpu_load + 0.1 * actual_cpu_load if not self.max_cpu_load or self.cpu_load < self.max_cpu_load: # check if already too much CPU is used by the conversion, then omit data data = self.interpret_data(raw_data) if data is not None and len( data ) != 0: # data is None if the data cannot be converted (e.g. is incomplete, broken, etc.) self.send_data(data) else: logging.warning( 'CPU load of %s converter %s is with %1.2f > %1.2f too high, omit data!', self.kind, self.name, self.cpu_load, self.max_cpu_load) # Close connections for actual_frontend in self.frontends: actual_frontend[1].close() for actual_backend in self.backends: actual_backend[1].close() self.context.term() logging.debug("Close %s transceiver %s at %s", self.kind, self.name, self.backend_address)
def main(): # If no configuration file is provided show a demo of the online monitor if sys.argv[1:]: args = utils.parse_arguments() else: # Add examples folder to entity search paths to be able to show DEMO using the examples package_path = os.path.dirname(online_monitor.__file__) # Get the absoulte path of the online_monitor installation settings.add_producer_sim_path(os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(package_path)) + r'/examples/producer_sim'))) settings.add_converter_path(os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(package_path)) + r'/examples/converter'))) settings.add_receiver_path(os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(package_path)) + r'/examples/receiver'))) class Dummy(object): def __init__(self): self.config_file = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)) + r'/configuration.yaml')) self.log = 'INFO' args = Dummy() logging.warning('No configuration file provided! Show a demo of the online monitor!') utils.setup_logging(args.log) # Start the simulation producer to create some fake data producer_sim_process = run_script_in_shell('', args.config_file, 'start_producer_sim') # Start the converter converter_manager_process = run_script_in_shell('', args.config_file, 'start_converter') # Helper function to run code after OnlineMonitor Application exit def appExec(): app.exec_() # Stop other processes try: kill(producer_sim_process) except psutil.NoSuchProcess: # If the process was never started it cannot be killed pass try: kill(converter_manager_process) except psutil.NoSuchProcess: # If the process was never started it cannot be killed pass # Start the online monitor app = Qt.QApplication(sys.argv) win = OnlineMonitorApplication(args.config_file) win.show() sys.exit(appExec())
def __init__(self, frontend, backend, kind, name='Undefined', max_buffer=None, loglevel='INFO', **kwarg): multiprocessing.Process.__init__(self) self.kind = kind # kind of transeiver (e.g. forwarder) self.frontend_address = frontend # socket facing a data publisher self.backend_address = backend # socket facing a data receiver # Maximum number of input messages buffered, otherwise data omitted self.max_buffer = max_buffer self.name = name # name of the DAQ/device # Std. setting is unidirectional frondend communication self.frontend_socket_type = zmq.SUB # Std. setting is unidirectional backend communication self.backend_socket_type = zmq.PUB if 'max_cpu_load' in kwarg: logging.warning('The parameter max_cpu_load is deprecated! Use max_buffer!') self.config = kwarg # Determine how many frontends/backends the converter has # just one frontend socket given if not isinstance(self.frontend_address, list): self.frontend_address = [self.frontend_address] self.n_frontends = 1 else: self.n_frontends = len(self.frontend_address) # just one backend socket given if not isinstance(self.backend_address, list): self.backend_address = [self.backend_address] self.n_backends = 1 else: self.n_backends = len(self.backend_address) self.exit = multiprocessing.Event() # exit signal self.loglevel = loglevel utils.setup_logging(self.loglevel) self.setup_transceiver() logging.debug("Initialize %s converter %s with frontends %s " "and backends %s", self.kind, self.name, self.frontend_address, self.backend_address)
def __init__(self, frontend, backend, kind, name='Undefined', max_cpu_load=100, loglevel='INFO', **kwarg): multiprocessing.Process.__init__(self) self.kind = kind # kind of transeiver (e.g. forwarder) self.frontend_address = frontend # socket facing a data publisher self.backend_address = backend # socket facing a data receiver self.max_cpu_load = max_cpu_load # Maximum CPU load allowed, otherwise data omitted self.name = name # name of the DAQ/device self.frontend_socket_type = zmq.SUB # std. setting is unidirectional frondend communication self.backend_socket_type = zmq.PUB # std. setting is unidirectional backend communication self.config = kwarg # Additional implementation specifig configurations given in yaml file # Determine how many frontends/backends the converter has if not isinstance(self.frontend_address, list): # just one frontend socket given self.frontend_address = [self.frontend_address] self.n_frontends = 1 else: self.n_frontends = len(self.frontend_address) if not isinstance(self.backend_address, list): # just one backend socket given self.backend_address = [self.backend_address] self.n_backends = 1 else: self.n_backends = len(self.backend_address) self.exit = multiprocessing.Event() # exit signal self.loglevel = loglevel utils.setup_logging(self.loglevel) self.setup_transceiver() logging.debug( "Initialize %s converter %s with frontends %s and backends %s", self.kind, self.name, self.frontend_address, self.backend_address)
def setup_interpretation(self): ''' Objects defined here are available in interpretation process ''' utils.setup_logging(self.loglevel) self.chunk_size = self.config.get('chunk_size', 1000000) # Init result hists self.reset_hists() # Number of readouts to integrate self.int_readouts = 0 # Variables for meta data time calculations self.ts_last_readout = 0. # Time stamp last readout self.hits_last_readout = 0. # Number of hits self.events_last_readout = 0. # Number of events in last chunk self.fps = 0. # Readouts per second self.hps = 0. # Hits per second self.eps = 0. # Events per second self.ext_trg_num = -1 # external trigger number
def main(): if sys.argv[1:]: args = utils.parse_arguments() else: # no config yaml provided -> start online monitor with std. settings class Dummy(object): def __init__(self): folder = os.path.dirname(os.path.realpath(__file__)) self.config_file = os.path.join(folder, r'configuration.yaml') self.log = 'INFO' args = Dummy() logging.info('No configuration file provided! Use std. settings!') utils.setup_logging(args.log) # Start the producer producer_manager_process = run_script_in_shell('', args.config_file, 'start_producer_sim') # Start the converter converter_manager_process = run_script_in_shell('', args.config_file, 'start_converter') # Helper function to run code after OnlineMonitor Application exit def appExec(): app.exec_() # Stop other processes try: kill(converter_manager_process) kill(producer_manager_process) # If the process was never started it cannot be killed except psutil.NoSuchProcess: pass # Start the online monitor app = Qt.QApplication(sys.argv) win = OnlineMonitorApplication(args.config_file) win.show() sys.exit(appExec())
def run(self): # the receiver loop utils.setup_logging(self.loglevel) self._setup_transceiver() self.setup_interpretation() process = psutil.Process(self.ident) # access this process info self.cpu_load = 0. logging.debug("Start %s transceiver %s at %s", self.kind, self.name, self.backend_address) while not self.exit.wait(0.01): raw_data = self.recv_data() commands = self.recv_commands() if commands: self.handle_command(commands) if not raw_data: # read again if no raw data is read continue actual_cpu_load = process.cpu_percent() # Filter cpu load by running mean since it changes rapidly; cpu load spikes can be filtered away since data queues up through ZMQ # FIXME: To use a high water mark would be a better solution self.cpu_load = 0.90 * self.cpu_load + 0.1 * actual_cpu_load if not self.max_cpu_load or self.cpu_load < self.max_cpu_load: # check if already too much CPU is used by the conversion, then omit data data = self.interpret_data(raw_data) if data is not None and len(data) != 0: # data is None if the data cannot be converted (e.g. is incomplete, broken, etc.) self.send_data(data) else: logging.warning('CPU load of %s converter %s is with %1.2f > %1.2f too high, omit data!', self.kind, self.name, self.cpu_load, self.max_cpu_load) # Close connections for actual_frontend in self.frontends: actual_frontend[1].close() for actual_backend in self.backends: actual_backend[1].close() self.context.term() logging.debug("Close %s transceiver %s at %s", self.kind, self.name, self.backend_address)
def main(): args = utils.parse_arguments() utils.setup_logging(args.log) cm = ProducerSimManager(args.config_file) cm.start() # blocking function, returns on SIGTERM signal
def main(): # If no configuration file is provided show a demo of the online monitor if sys.argv[1:]: args = utils.parse_arguments() else: # Add examples folder to entity search paths to be able to show DEMO using the examples package_path = os.path.dirname( online_monitor.__file__ ) # Get the absoulte path of the online_monitor installation settings.add_producer_sim_path( os.path.abspath( os.path.join( os.path.dirname(os.path.realpath(package_path)) + r'/examples/producer_sim'))) settings.add_converter_path( os.path.abspath( os.path.join( os.path.dirname(os.path.realpath(package_path)) + r'/examples/converter'))) settings.add_receiver_path( os.path.abspath( os.path.join( os.path.dirname(os.path.realpath(package_path)) + r'/examples/receiver'))) class Dummy(object): def __init__(self): self.config_file = os.path.abspath( os.path.join( os.path.dirname(os.path.realpath(__file__)) + r'/configuration.yaml')) self.log = 'INFO' args = Dummy() logging.warning( 'No configuration file provided! Show a demo of the online monitor!' ) utils.setup_logging(args.log) # Start the simulation producer to create some fake data producer_sim_process = run_script_in_shell('', args.config_file, 'start_producer_sim') # Start the converter converter_manager_process = run_script_in_shell('', args.config_file, 'start_converter') # Helper function to run code after OnlineMonitor Application exit def appExec(): app.exec_() # Stop other processes try: kill(producer_sim_process) except psutil.NoSuchProcess: # If the process was never started it cannot be killed pass try: kill(converter_manager_process) except psutil.NoSuchProcess: # If the process was never started it cannot be killed pass # Start the online monitor app = Qt.QApplication(sys.argv) win = OnlineMonitorApplication(args.config_file) win.show() sys.exit(appExec())
def __init__(self, configuration, loglevel='INFO'): utils.setup_logging(loglevel) logging.info("Initialize converter mananager with configuration in %s", configuration) self.configuration = utils.parse_config_file(configuration)
def main(): args = utils.parse_arguments() utils.setup_logging(args.log) cm = ConverterManager(args.config_file) cm.start() # blocking function, returns on SIGTERM signal