def __init__( self, # NW_SPECS, RU_CONFIG=[], EMULATOR_MODE=False, DATA_RATE_SLOWDOWN_FACTOR=1, RUN_NUMBER=333, DATA_FILE="./frames.bin", FLX_INPUT=False, SSP_INPUT=True, CLOCK_SPEED_HZ=50000000, RUIDX=0, RAW_RECORDING_ENABLED=False, RAW_RECORDING_OUTPUT_DIR=".", FRONTEND_TYPE='wib', SYSTEM_TYPE='TPC', SOFTWARE_TPG_ENABLED=False, USE_FAKE_DATA_PRODUCERS=False, PARTITION="UNKNOWN", LATENCY_BUFFER_SIZE=499968, HOST="localhost"): """Generate the json configuration for the readout and DF process""" NUMBER_OF_DATA_PRODUCERS = len(RU_CONFIG) cmd_data = {} required_eps = {f'{PARTITION}.timesync_{RUIDX}'} # if not required_eps.issubset([nw.name for nw in NW_SPECS]): # raise RuntimeError(f"ERROR: not all the required endpoints ({', '.join(required_eps)}) found in list of endpoints {' '.join([nw.name for nw in NW_SPECS])}") RATE_KHZ = CLOCK_SPEED_HZ / (25 * 12 * DATA_RATE_SLOWDOWN_FACTOR * 1000) MIN_LINK = RU_CONFIG[RUIDX]["start_channel"] MAX_LINK = MIN_LINK + RU_CONFIG[RUIDX]["channel_count"] print( f"ReadoutApp.__init__ with RUIDX={RUIDX}, MIN_LINK={MIN_LINK}, MAX_LINK={MAX_LINK}" ) modules = [] total_link_count = 0 for ru in range(len(RU_CONFIG)): total_link_count += RU_CONFIG[ru]["channel_count"] if SOFTWARE_TPG_ENABLED: connections = {} request_receiver_geoid_map = [] for idx in range(MIN_LINK, MAX_LINK): queue_inst = f"data_requests_{idx}" request_receiver_geoid_map.append( rrcv.geoidinst(region=RU_CONFIG[RUIDX]["region_id"], element=idx, system=SYSTEM_TYPE, queueinstance=queue_inst)) connections[f'output_{idx}'] = Connection( f"datahandler_{idx}.data_requests_0", queue_name=queue_inst) if SOFTWARE_TPG_ENABLED: queue_inst = f"tp_requests_{idx}" request_receiver_geoid_map.append( rrcv.geoidinst(region=RU_CONFIG[RUIDX]["region_id"], element=idx + total_link_count, system=SYSTEM_TYPE, queueinstance=queue_inst)) connections[f'tp_output_{idx}'] = Connection( f"tp_datahandler_{idx}.data_requests_0", queue_name=queue_inst) modules += [ DAQModule(name="request_receiver", plugin="RequestReceiver", connections=connections, conf=rrcv.ConfParams( map=request_receiver_geoid_map, general_queue_timeout=QUEUE_POP_WAIT_MS, connection_name=f"{PARTITION}.datareq_{RUIDX}")) ] for idx in range(MIN_LINK, MAX_LINK): modules += [ DAQModule( name=f"tp_datahandler_{idx}", plugin="DataLinkHandler", connections={}, conf=rconf.Conf( readoutmodelconf=rconf.ReadoutModelConf( source_queue_timeout_ms=QUEUE_POP_WAIT_MS, region_id=RU_CONFIG[RUIDX]["region_id"], element_id=total_link_count + idx), latencybufferconf=rconf.LatencyBufferConf( latency_buffer_size=LATENCY_BUFFER_SIZE, region_id=RU_CONFIG[RUIDX]["region_id"], element_id=total_link_count + idx), rawdataprocessorconf=rconf.RawDataProcessorConf( region_id=RU_CONFIG[RUIDX]["region_id"], element_id=total_link_count + idx, enable_software_tpg=False), requesthandlerconf=rconf.RequestHandlerConf( latency_buffer_size=LATENCY_BUFFER_SIZE, pop_limit_pct=0.8, pop_size_pct=0.1, region_id=RU_CONFIG[RUIDX]["region_id"], element_id=total_link_count + idx, # output_file = f"output_{idx + MIN_LINK}.out", stream_buffer_size=100 if FRONTEND_TYPE == 'pacman' else 8388608, enable_raw_recording=False))) ] # modules += [DAQModule(name = f"tpset_publisher", # plugin = "QueueToNetwork", # # connections = {'input': Connection('tpset_queue', Direction.IN)}, # conf = qton.Conf(msg_type="dunedaq::trigger::TPSet", # msg_module_name="TPSetNQ", # sender_config=nos.Conf(name=f"{PARTITION}.tpsets_{RUIDX}", # topic="TPSets", # stype="msgpack")))] if FRONTEND_TYPE == 'wib': modules += [ DAQModule(name="errored_frame_consumer", plugin="ErroredFrameConsumer", connections={}) ] # There are two flags to be checked so I think a for loop # is the closest way to the blocks that are being used here for idx in range(MIN_LINK, MAX_LINK): if USE_FAKE_DATA_PRODUCERS: modules += [ DAQModule(name=f"fakedataprod_{idx}", plugin='FakeDataProd', connections={ 'input': Connection(f'data_request_{idx}') }) ] else: connections = {} # connections['raw_input'] = Connection(f"{FRONTEND_TYPE}_link_{idx}", Direction.IN) # connections['data_request_0'] = Connection(f'data_requests_{idx}', Direction.IN) connections['fragment_queue'] = Connection( 'fragment_sender.input_queue') if SOFTWARE_TPG_ENABLED: connections['tp_out'] = Connection( f"tp_datahandler_{idx}.raw_input", queue_name=f"sw_tp_link_{idx}", queue_kind="FollySPSCQueue", queue_capacity=100000) # connections['tpset_out'] = Connection('tpset_queue', Direction.OUT) if FRONTEND_TYPE == 'wib': connections['errored_frames'] = Connection( 'errored_frame_consumer.input_queue') modules += [ DAQModule(name=f"datahandler_{idx}", plugin="DataLinkHandler", connections=connections) ] if not USE_FAKE_DATA_PRODUCERS: if FLX_INPUT: connections = {} for idx in range( MIN_LINK, MIN_LINK + min(5, RU_CONFIG[RUIDX]["channel_count"])): connections[f'output_{idx}'] = Connection( f"datahandler_{idx}.raw_input", queue_name=f'{FRONTEND_TYPE}_link_{idx}', queue_kind="FollySPSCQueue", queue_capacity=100000) modules += [ DAQModule(name='flxcard_0', plugin='FelixCardReader', connections=connections, conf=flxcr.Conf( card_id=RU_CONFIG[RUIDX]["card_id"], logical_unit=0, dma_id=0, chunk_trailer_size=32, dma_block_size_kb=4, dma_memory_size_gb=4, numa_id=0, num_links=min( 5, RU_CONFIG[RUIDX]["channel_count"]))) ] if RU_CONFIG[RUIDX]["channel_count"] > 5: connections = {} for idx in range(MIN_LINK + 5, MAX_LINK): connections[f'output_{idx}'] = Connection( f"datahandler_{idx}.raw_input", queue_name=f'{FRONTEND_TYPE}_link_{idx}', queue_kind="FollySPSCQueue", queue_capacity=100000) modules += [ DAQModule( name="flxcard_1", plugin="FelixCardReader", connections=connections, conf=flxcr.Conf( card_id=RU_CONFIG[RUIDX]["card_id"], logical_unit=1, dma_id=0, chunk_trailer_size=32, dma_block_size_kb=4, dma_memory_size_gb=4, numa_id=0, num_links=max( 0, RU_CONFIG[RUIDX]["channel_count"] - 5))) ] elif SSP_INPUT: modules += [ DAQModule(name="ssp_0", plugin="SSPCardReader", connections={ f'output_{idx}': Connection( f"datahandler_{idx}.raw_input", queue_name=f'{FRONTEND_TYPE}_link_{idx}', queue_kind="FollySPSCQueue", queue_capacity=100000) }, conf=flxcr.Conf( card_id=RU_CONFIG[RUIDX]["card_id"], logical_unit=0, dma_id=0, chunk_trailer_size=32, dma_block_size_kb=4, dma_memory_size_gb=4, numa_id=0, num_links=RU_CONFIG[RUIDX]["channel_count"])) ] else: fake_source = "fake_source" card_reader = "FakeCardReader" conf = sec.Conf( link_confs=[ sec.LinkConfiguration( geoid=sec.GeoID( system=SYSTEM_TYPE, region=RU_CONFIG[RUIDX]["region_id"], element=idx), slowdown=DATA_RATE_SLOWDOWN_FACTOR, queue_name=f"output_{idx}", data_filename=DATA_FILE, emu_frame_error_rate=0) for idx in range(MIN_LINK, MAX_LINK) ], # input_limit=10485100, # default queue_timeout_ms=QUEUE_POP_WAIT_MS) if FRONTEND_TYPE == 'pacman': fake_source = "pacman_source" card_reader = "PacmanCardReader" conf = pcr.Conf(link_confs=[ pcr.LinkConfiguration(geoid=pcr.GeoID( system=SYSTEM_TYPE, region=RU_CONFIG[RUIDX]["region_id"], element=idx)) for idx in range(MIN_LINK, MAX_LINK) ], zmq_receiver_timeout=10000) modules += [ DAQModule(name=fake_source, plugin=card_reader, connections={ f'output_{idx}': Connection( f"datahandler_{idx}.raw_input", queue_name=f'{FRONTEND_TYPE}_link_{idx}', queue_kind="FollySPSCQueue", queue_capacity=100000) for idx in range(MIN_LINK, MAX_LINK) }, conf=conf) ] modules += [ DAQModule(name="fragment_sender", plugin="FragmentSender", conf=None) ] mgraph = ModuleGraph(modules) for idx in range(MIN_LINK, MAX_LINK): # TODO: Should we just have one timesync outgoing endpoint? mgraph.add_endpoint(f"timesync_{idx}", f"datahandler_{idx}.timesync", Direction.OUT) if SOFTWARE_TPG_ENABLED: mgraph.add_endpoint(f"tpsets_{idx}", f"datahandler_{idx}.tpset_out", Direction.OUT) mgraph.add_endpoint( f"timesync_{idx+RU_CONFIG[RUIDX]['channel_count']}", f"tp_datahandler_{idx}.timesync", Direction.OUT) # Add fragment producers for raw data mgraph.add_fragment_producer( region=RU_CONFIG[RUIDX]["region_id"], element=idx, system=SYSTEM_TYPE, requests_in=f"datahandler_{idx}.data_requests_0", fragments_out=f"datahandler_{idx}.data_response_0") # Add fragment producers for TPC TPs. Make sure the element index doesn't overlap with the ones for raw data if SOFTWARE_TPG_ENABLED: mgraph.add_fragment_producer( region=RU_CONFIG[RUIDX]["region_id"], element=idx + RU_CONFIG[RUIDX]["channel_count"], system=SYSTEM_TYPE, requests_in=f"tp_datahandler_{idx}.data_requests_0", fragments_out=f"tp_datahandler_{idx}.data_response_0") super().__init__(mgraph, host=HOST) self.export("readout_app.dot")
def generate(NETWORK_ENDPOINTS, NUMBER_OF_DATA_PRODUCERS=2, EMULATOR_MODE=False, DATA_RATE_SLOWDOWN_FACTOR=1, RUN_NUMBER=333, DATA_FILE="./frames.bin", OUTPUT_PATH=".", DISABLE_OUTPUT=False, FLX_INPUT=True, TOKEN_COUNT=0, CLOCK_SPEED_HZ=50000000): """Generate the json configuration for the readout and DF process""" cmd_data = {} required_eps = {'trigdec', 'triginh', 'timesync'} if not required_eps.issubset(NETWORK_ENDPOINTS): raise RuntimeError( f"ERROR: not all the required endpoints ({', '.join(required_eps)}) found in list of endpoints {' '.join(NETWORK_ENDPOINTS.keys())}" ) LATENCY_BUFFER_SIZE = 3 * CLOCK_SPEED_HZ / (25 * 12 * DATA_RATE_SLOWDOWN_FACTOR) RATE_KHZ = CLOCK_SPEED_HZ / (25 * 12 * DATA_RATE_SLOWDOWN_FACTOR * 1000) # Define modules and queues queue_bare_specs = [ app.QueueSpec(inst="time_sync_q", kind='FollyMPMCQueue', capacity=100), app.QueueSpec(inst="token_q", kind='FollySPSCQueue', capacity=100), app.QueueSpec( inst="trigger_decision_q", kind='FollySPSCQueue', capacity=100), app.QueueSpec(inst="trigger_decision_from_netq", kind='FollySPSCQueue', capacity=100), app.QueueSpec(inst="trigger_decision_copy_for_bookkeeping", kind='FollySPSCQueue', capacity=100), app.QueueSpec( inst="trigger_record_q", kind='FollySPSCQueue', capacity=100), app.QueueSpec( inst="data_fragments_q", kind='FollyMPMCQueue', capacity=1000), ] + [ app.QueueSpec( inst=f"data_requests_{idx}", kind='FollySPSCQueue', capacity=100) for idx in range(NUMBER_OF_DATA_PRODUCERS) ] + [ app.QueueSpec( inst=f"wib_link_{idx}", kind='FollySPSCQueue', capacity=100000) for idx in range(NUMBER_OF_DATA_PRODUCERS) ] # Only needed to reproduce the same order as when using jsonnet queue_specs = app.QueueSpecs(sorted(queue_bare_specs, key=lambda x: x.inst)) mod_specs = [ mspec("ntoq_trigdec", "NetworkToQueue", [ app.QueueInfo( name="output", inst="trigger_decision_from_netq", dir="output") ]), mspec("qton_token", "QueueToNetwork", [app.QueueInfo(name="input", inst="token_q", dir="input")]), mspec("qton_timesync", "QueueToNetwork", [app.QueueInfo(name="input", inst="time_sync_q", dir="input")]), mspec("rqg", "RequestGenerator", [ app.QueueInfo(name="trigger_decision_input_queue", inst="trigger_decision_from_netq", dir="input"), app.QueueInfo(name="trigger_decision_for_event_building", inst="trigger_decision_copy_for_bookkeeping", dir="output"), ] + [ app.QueueInfo(name=f"data_request_{idx}_output_queue", inst=f"data_requests_{idx}", dir="output") for idx in range(NUMBER_OF_DATA_PRODUCERS) ]), mspec("ffr", "FragmentReceiver", [ app.QueueInfo(name="trigger_decision_input_queue", inst="trigger_decision_copy_for_bookkeeping", dir="input"), app.QueueInfo(name="trigger_record_output_queue", inst="trigger_record_q", dir="output"), app.QueueInfo(name="data_fragment_input_queue", inst="data_fragments_q", dir="input"), ]), mspec("datawriter", "DataWriter", [ app.QueueInfo(name="trigger_record_input_queue", inst="trigger_record_q", dir="input"), app.QueueInfo( name="token_output_queue", inst="token_q", dir="output"), ]), ] + [ mspec(f"datahandler_{idx}", "DataLinkHandler", [ app.QueueInfo( name="raw_input", inst=f"wib_link_{idx}", dir="input"), app.QueueInfo(name="timesync", inst="time_sync_q", dir="output"), app.QueueInfo( name="requests", inst=f"data_requests_{idx}", dir="input"), app.QueueInfo( name="fragments", inst="data_fragments_q", dir="output"), ]) for idx in range(NUMBER_OF_DATA_PRODUCERS) ] if FLX_INPUT: mod_specs.append( mspec("flxcard_0", "FelixCardReader", [ app.QueueInfo( name=f"output_{idx}", inst=f"wib_link_{idx}", dir="output") for idx in range(0, min(5, NUMBER_OF_DATA_PRODUCERS)) ])) if NUMBER_OF_DATA_PRODUCERS > 5: mod_specs.append( mspec("flxcard_1", "FelixCardReader", [ app.QueueInfo(name=f"output_{idx}", inst=f"wib_link_{idx}", dir="output") for idx in range(5, NUMBER_OF_DATA_PRODUCERS) ])) else: mod_specs.append( mspec("fake_source", "FakeCardReader", [ app.QueueInfo( name=f"output_{idx}", inst=f"wib_link_{idx}", dir="output") for idx in range(NUMBER_OF_DATA_PRODUCERS) ])) cmd_data['init'] = app.Init(queues=queue_specs, modules=mod_specs) cmd_data['conf'] = acmd([ ("ntoq_trigdec", ntoq.Conf(msg_type="dunedaq::dfmessages::TriggerDecision", msg_module_name="TriggerDecisionNQ", receiver_config=nor.Conf( ipm_plugin_type="ZmqReceiver", address=NETWORK_ENDPOINTS["trigdec"]))), ("qton_token", qton.Conf(msg_type="dunedaq::dfmessages::TriggerDecisionToken", msg_module_name="TriggerDecisionTokenNQ", sender_config=nos.Conf(ipm_plugin_type="ZmqSender", address=NETWORK_ENDPOINTS["triginh"], stype="msgpack"))), ("qton_timesync", qton.Conf(msg_type="dunedaq::dfmessages::TimeSync", msg_module_name="TimeSyncNQ", sender_config=nos.Conf( ipm_plugin_type="ZmqSender", address=NETWORK_ENDPOINTS["timesync"], stype="msgpack"))), ("rqg", rqg.ConfParams(map=rqg.mapgeoidqueue([ rqg.geoidinst( apa=0, link=idx, queueinstance=f"data_requests_{idx}") for idx in range(NUMBER_OF_DATA_PRODUCERS) ]))), ("ffr", ffr.ConfParams(general_queue_timeout=QUEUE_POP_WAIT_MS)), ( "datawriter", dw.ConfParams( initial_token_count=TOKEN_COUNT, data_store_parameters=hdf5ds.ConfParams( name="data_store", # type = "HDF5DataStore", # default directory_path=OUTPUT_PATH, # default # mode = "all-per-file", # default max_file_size_bytes=1073741824, disable_unique_filename_suffix=False, filename_parameters=hdf5ds.HDF5DataStoreFileNameParams( overall_prefix="swtest", digits_for_run_number=6, file_index_prefix="", digits_for_file_index=4, ), file_layout_parameters=hdf5ds. HDF5DataStoreFileLayoutParams( trigger_record_name_prefix="TriggerRecord", digits_for_trigger_number=5, digits_for_apa_number=3, digits_for_link_number=2, )))), ( "fake_source", fakecr.Conf( link_ids=list(range(NUMBER_OF_DATA_PRODUCERS)), # input_limit=10485100, # default rate_khz=RATE_KHZ, raw_type="wib", data_filename=DATA_FILE, queue_timeout_ms=QUEUE_POP_WAIT_MS)), ("flxcard_0", flxcr.Conf(card_id=0, logical_unit=0, dma_id=0, chunk_trailer_size=32, dma_block_size_kb=4, dma_memory_size_gb=4, numa_id=0, num_links=min(5, NUMBER_OF_DATA_PRODUCERS))), ("flxcard_1", flxcr.Conf(card_id=0, logical_unit=1, dma_id=0, chunk_trailer_size=32, dma_block_size_kb=4, dma_memory_size_gb=4, numa_id=0, num_links=max(0, NUMBER_OF_DATA_PRODUCERS - 5))), ] + [ ( f"datahandler_{idx}", dlh.Conf( raw_type="wib", emulator_mode=EMULATOR_MODE, # fake_trigger_flag=0, # default source_queue_timeout_ms=QUEUE_POP_WAIT_MS, latency_buffer_size=LATENCY_BUFFER_SIZE, pop_limit_pct=0.8, pop_size_pct=0.1, apa_number=0, link_number=idx)) for idx in range(NUMBER_OF_DATA_PRODUCERS) ]) startpars = rccmd.StartParams(run=RUN_NUMBER, disable_data_storage=DISABLE_OUTPUT) cmd_data['start'] = acmd([ ("qton_token", startpars), ("datawriter", startpars), ("ffr", startpars), ("qton_timesync", startpars), ("datahandler_.*", startpars), ("fake_source", startpars), ("flxcard.*", startpars), ("rqg", startpars), ("ntoq_trigdec", startpars), ]) cmd_data['stop'] = acmd([ ("ntoq_trigdec", None), ("rqg", None), ("flxcard.*", None), ("fake_source", None), ("datahandler_.*", None), ("qton_timesync", None), ("ffr", None), ("datawriter", None), ("qton_token", None), ]) cmd_data['pause'] = acmd([("", None)]) cmd_data['resume'] = acmd([("", None)]) cmd_data['scrap'] = acmd([("", None)]) return cmd_data
def generate(NUMBER_OF_DATA_PRODUCERS=2, EMULATOR_MODE=False, RUN_NUMBER=333, TRIGGER_RATE_HZ=1.0, OUTPUT_PATH=".", DISABLE_OUTPUT=False, TOKEN_COUNT=10): trigger_interval_ticks = math.floor((1 / TRIGGER_RATE_HZ) * CLOCK_SPEED_HZ) # Define modules and queues queue_bare_specs = [ app.QueueSpec(inst="time_sync_q", kind='FollyMPMCQueue', capacity=100), app.QueueSpec(inst="token_q", kind='FollySPSCQueue', capacity=20), app.QueueSpec( inst="trigger_decision_q", kind='FollySPSCQueue', capacity=20), app.QueueSpec(inst="trigger_decision_copy_for_bookkeeping", kind='FollySPSCQueue', capacity=20), app.QueueSpec( inst="trigger_record_q", kind='FollySPSCQueue', capacity=20), app.QueueSpec( inst="data_fragments_q", kind='FollyMPMCQueue', capacity=100), ] + [ app.QueueSpec( inst=f"data_requests_{idx}", kind='FollySPSCQueue', capacity=20) for idx in range(NUMBER_OF_DATA_PRODUCERS) ] + [ app.QueueSpec( inst=f"wib_link_{idx}", kind='FollySPSCQueue', capacity=100000) for idx in range(NUMBER_OF_DATA_PRODUCERS) ] # Only needed to reproduce the same order as when using jsonnet queue_specs = app.QueueSpecs(sorted(queue_bare_specs, key=lambda x: x.inst)) mod_specs = [ mspec("tde", "TriggerDecisionEmulator", [ app.QueueInfo( name="time_sync_source", inst="time_sync_q", dir="input"), app.QueueInfo(name="token_source", inst="token_q", dir="input"), app.QueueInfo(name="trigger_decision_sink", inst="trigger_decision_q", dir="output"), ]), mspec("rqg", "RequestGenerator", [ app.QueueInfo(name="trigger_decision_input_queue", inst="trigger_decision_q", dir="input"), app.QueueInfo(name="trigger_decision_for_event_building", inst="trigger_decision_copy_for_bookkeeping", dir="output"), ] + [ app.QueueInfo(name=f"data_request_{idx}_output_queue", inst=f"data_requests_{idx}", dir="output") for idx in range(NUMBER_OF_DATA_PRODUCERS) ]), mspec("ffr", "FragmentReceiver", [ app.QueueInfo(name="trigger_decision_input_queue", inst="trigger_decision_copy_for_bookkeeping", dir="input"), app.QueueInfo(name="trigger_record_output_queue", inst="trigger_record_q", dir="output"), app.QueueInfo(name="data_fragment_input_queue", inst="data_fragments_q", dir="input"), ]), mspec("datawriter", "DataWriter", [ app.QueueInfo(name="trigger_record_input_queue", inst="trigger_record_q", dir="input"), app.QueueInfo( name="token_output_queue", inst="token_q", dir="output"), ]), mspec("flxcard_0", "FelixCardReader", [ app.QueueInfo( name=f"output_{idx}", inst=f"wib_link_{idx}", dir="output") for idx in range(min(5, NUMBER_OF_DATA_PRODUCERS)) ]), ] + [ mspec(f"datahandler_{idx}", "DataLinkHandler", [ app.QueueInfo( name="raw_input", inst=f"wib_link_{idx}", dir="input"), app.QueueInfo(name="timesync", inst="time_sync_q", dir="output"), app.QueueInfo( name="requests", inst=f"data_requests_{idx}", dir="input"), app.QueueInfo( name="fragments", inst="data_fragments_q", dir="output"), ]) for idx in range(NUMBER_OF_DATA_PRODUCERS) ] if NUMBER_OF_DATA_PRODUCERS > 5: mod_specs.append( mspec("flxcard_1", "FelixCardReader", [ app.QueueInfo( name=f"output_{idx}", inst=f"wib_link_{idx}", dir="output") for idx in range(5, NUMBER_OF_DATA_PRODUCERS) ])) init_specs = app.Init(queues=queue_specs, modules=mod_specs) jstr = json.dumps(init_specs.pod(), indent=4, sort_keys=True) print(jstr) initcmd = rccmd.RCCommand(id=basecmd.CmdId("init"), entry_state="NONE", exit_state="INITIAL", data=init_specs) if TOKEN_COUNT > 0: df_token_count = 0 trigemu_token_count = TOKEN_COUNT else: df_token_count = -1 * TOKEN_COUNT trigemu_token_count = 0 confcmd = mrccmd( "conf", "INITIAL", "CONFIGURED", [ ( "tde", tde.ConfParams( links=[idx for idx in range(NUMBER_OF_DATA_PRODUCERS)], min_links_in_request=NUMBER_OF_DATA_PRODUCERS, max_links_in_request=NUMBER_OF_DATA_PRODUCERS, min_readout_window_ticks=1200, max_readout_window_ticks=1200, trigger_window_offset=1000, # The delay is set to put the trigger well within the latency buff trigger_delay_ticks=math.floor(2 * CLOCK_SPEED_HZ), trigger_interval_ticks=trigger_interval_ticks, clock_frequency_hz=CLOCK_SPEED_HZ, initial_token_count=trigemu_token_count)), ("rqg", rqg.ConfParams(map=rqg.mapgeoidqueue([ rqg.geoidinst( apa=0, link=idx, queueinstance=f"data_requests_{idx}") for idx in range(NUMBER_OF_DATA_PRODUCERS) ]))), ("ffr", ffr.ConfParams(general_queue_timeout=QUEUE_POP_WAIT_MS)), ( "datawriter", dw.ConfParams( initial_token_count=df_token_count, data_store_parameters=hdf5ds.ConfParams( name="data_store", # type = "HDF5DataStore", # default directory_path=OUTPUT_PATH, # default # mode = "all-per-file", # default max_file_size_bytes=1073741824, filename_parameters=hdf5ds.HDF5DataStoreFileNameParams( overall_prefix="minidaqapp", # digits_for_run_number = 6, #default file_index_prefix="file"), file_layout_parameters=hdf5ds. HDF5DataStoreFileLayoutParams( trigger_record_name_prefix="TriggerRecord", digits_for_trigger_number=5, )))), ("flxcard_0", fcr.Conf(card_id=0, logical_unit=0, dma_id=0, chunk_trailer_size=32, dma_block_size_kb=4, dma_memory_size_gb=4, numa_id=0, num_links=min(5, NUMBER_OF_DATA_PRODUCERS))), ("flxcard_1", fcr.Conf(card_id=0, logical_unit=1, dma_id=0, chunk_trailer_size=32, dma_block_size_kb=4, dma_memory_size_gb=4, numa_id=0, num_links=max(0, NUMBER_OF_DATA_PRODUCERS - 5))), ] + [ ( f"datahandler_{idx}", dlh.Conf( raw_type="wib", emulator_mode=EMULATOR_MODE, # fake_trigger_flag=0, # default source_queue_timeout_ms=QUEUE_POP_WAIT_MS, latency_buffer_size=3 * CLOCK_SPEED_HZ / (25 * 12), pop_limit_pct=0.8, pop_size_pct=0.1, apa_number=0, link_number=idx)) for idx in range(NUMBER_OF_DATA_PRODUCERS) ]) jstr = json.dumps(confcmd.pod(), indent=4, sort_keys=True) print(jstr) startpars = rccmd.StartParams( run=RUN_NUMBER, trigger_interval_ticks=trigger_interval_ticks, disable_data_storage=DISABLE_OUTPUT) startcmd = mrccmd("start", "CONFIGURED", "RUNNING", [ ("datawriter", startpars), ("ffr", startpars), ("datahandler_.*", startpars), ("flxcard.*", startpars), ("rqg", startpars), ("tde", startpars), ]) jstr = json.dumps(startcmd.pod(), indent=4, sort_keys=True) print("=" * 80 + "\nStart\n\n", jstr) stopcmd = mrccmd("stop", "RUNNING", "CONFIGURED", [ ("tde", None), ("rqg", None), ("flxcard.*", None), ("datahandler_.*", None), ("ffr", None), ("datawriter", None), ]) jstr = json.dumps(stopcmd.pod(), indent=4, sort_keys=True) print("=" * 80 + "\nStop\n\n", jstr) pausecmd = mrccmd("pause", "RUNNING", "RUNNING", [("", None)]) jstr = json.dumps(pausecmd.pod(), indent=4, sort_keys=True) print("=" * 80 + "\nPause\n\n", jstr) resumecmd = mrccmd( "resume", "RUNNING", "RUNNING", [("tde", tde.ResumeParams(trigger_interval_ticks=trigger_interval_ticks))]) jstr = json.dumps(resumecmd.pod(), indent=4, sort_keys=True) print("=" * 80 + "\nResume\n\n", jstr) scrapcmd = mrccmd("scrap", "CONFIGURED", "INITIAL", [("", None)]) jstr = json.dumps(scrapcmd.pod(), indent=4, sort_keys=True) print("=" * 80 + "\nScrap\n\n", jstr) # Create a list of commands cmd_seq = [ initcmd, confcmd, startcmd, stopcmd, pausecmd, resumecmd, scrapcmd ] # Print them as json (to be improved/moved out) jstr = json.dumps([c.pod() for c in cmd_seq], indent=4, sort_keys=True) return jstr
def generate_df( network_endpoints, NUMBER_OF_DATA_PRODUCERS=2, EMULATOR_MODE=False, DATA_RATE_SLOWDOWN_FACTOR = 1, RUN_NUMBER = 333, TRIGGER_RATE_HZ = 1.0, DATA_FILE="./frames.bin", OUTPUT_PATH=".", DISABLE_OUTPUT=False, FLX_INPUT=True, TOKEN_COUNT=0 ): """Generate the json configuration for the readout and DF process""" trg_interval_ticks = math.floor((1/TRIGGER_RATE_HZ) * CLOCK_SPEED_HZ/DATA_RATE_SLOWDOWN_FACTOR) # Define modules and queues queue_bare_specs = [ app.QueueSpec(inst="time_sync_q", kind='FollyMPMCQueue', capacity=100), app.QueueSpec(inst="token_q", kind='FollySPSCQueue', capacity=100), app.QueueSpec(inst="trigger_decision_q", kind='FollySPSCQueue', capacity=100), app.QueueSpec(inst="trigger_decision_from_netq", kind='FollySPSCQueue', capacity=100), app.QueueSpec(inst="trigger_decision_copy_for_bookkeeping", kind='FollySPSCQueue', capacity=100), app.QueueSpec(inst="trigger_record_q", kind='FollySPSCQueue', capacity=100), app.QueueSpec(inst="data_fragments_q", kind='FollyMPMCQueue', capacity=1000), ] + [ app.QueueSpec(inst=f"data_requests_{idx}", kind='FollySPSCQueue', capacity=100) for idx in range(NUMBER_OF_DATA_PRODUCERS) ] + [ app.QueueSpec(inst=f"wib_link_{idx}", kind='FollySPSCQueue', capacity=100000) for idx in range(NUMBER_OF_DATA_PRODUCERS) ] # Only needed to reproduce the same order as when using jsonnet queue_specs = app.QueueSpecs(sorted(queue_bare_specs, key=lambda x: x.inst)) mod_specs = [ mspec("ntoq_trigdec", "NetworkToQueue", [ app.QueueInfo(name="output", inst="trigger_decision_from_netq", dir="output") ]), mspec("qton_token", "QueueToNetwork", [ app.QueueInfo(name="input", inst="token_q", dir="input") ]), mspec("qton_timesync", "QueueToNetwork", [ app.QueueInfo(name="input", inst="time_sync_q", dir="input") ]), mspec("rqg", "RequestGenerator", [ app.QueueInfo(name="trigger_decision_input_queue", inst="trigger_decision_from_netq", dir="input"), app.QueueInfo(name="trigger_decision_for_event_building", inst="trigger_decision_copy_for_bookkeeping", dir="output"), ] + [ app.QueueInfo(name=f"data_request_{idx}_output_queue", inst=f"data_requests_{idx}", dir="output") for idx in range(NUMBER_OF_DATA_PRODUCERS) ]), mspec("ffr", "FragmentReceiver", [ app.QueueInfo(name="trigger_decision_input_queue", inst="trigger_decision_copy_for_bookkeeping", dir="input"), app.QueueInfo(name="trigger_record_output_queue", inst="trigger_record_q", dir="output"), app.QueueInfo(name="data_fragment_input_queue", inst="data_fragments_q", dir="input"), ]), mspec("datawriter", "DataWriter", [ app.QueueInfo(name="trigger_record_input_queue", inst="trigger_record_q", dir="input"), app.QueueInfo(name="token_output_queue", inst="token_q", dir="output"), ]), ] + [ mspec(f"datahandler_{idx}", "DataLinkHandler", [ app.QueueInfo(name="raw_input", inst=f"wib_link_{idx}", dir="input"), app.QueueInfo(name="timesync", inst="time_sync_q", dir="output"), app.QueueInfo(name="requests", inst=f"data_requests_{idx}", dir="input"), app.QueueInfo(name="fragments", inst="data_fragments_q", dir="output"), ]) for idx in range(NUMBER_OF_DATA_PRODUCERS) ] if FLX_INPUT: mod_specs.append(mspec("flxcard_0", "FelixCardReader", [ app.QueueInfo(name=f"output_{idx}", inst=f"wib_link_{idx}", dir="output") for idx in range(0,min(5, NUMBER_OF_DATA_PRODUCERS)) ])) if NUMBER_OF_DATA_PRODUCERS>5 : mod_specs.append(mspec("flxcard_1", "FelixCardReader", [ app.QueueInfo(name=f"output_{idx}", inst=f"wib_link_{idx}", dir="output") for idx in range(5, NUMBER_OF_DATA_PRODUCERS) ])) else: mod_specs.append(mspec("fake_source", "FakeCardReader", [ app.QueueInfo(name=f"output_{idx}", inst=f"wib_link_{idx}", dir="output") for idx in range(NUMBER_OF_DATA_PRODUCERS) ])) init_specs = app.Init(queues=queue_specs, modules=mod_specs) initcmd = rccmd.RCCommand( id=basecmd.CmdId("init"), entry_state="NONE", exit_state="INITIAL", data=init_specs ) confcmd = mrccmd("conf", "INITIAL", "CONFIGURED",[ ("ntoq_trigdec", ntoq.Conf(msg_type="dunedaq::dfmessages::TriggerDecision", msg_module_name="TriggerDecisionNQ", receiver_config=nor.Conf(ipm_plugin_type="ZmqReceiver", address=network_endpoints["trigdec"]) ) ), ("qton_token", qton.Conf(msg_type="dunedaq::dfmessages::TriggerDecisionToken", msg_module_name="TriggerDecisionTokenNQ", sender_config=nos.Conf(ipm_plugin_type="ZmqSender", address=network_endpoints["triginh"], stype="msgpack") ) ), ("qton_timesync", qton.Conf(msg_type="dunedaq::dfmessages::TimeSync", msg_module_name="TimeSyncNQ", sender_config=nos.Conf(ipm_plugin_type="ZmqSender", address=network_endpoints["timesync"], stype="msgpack") ) ), ("rqg", rqg.ConfParams( map=rqg.mapgeoidqueue([ rqg.geoidinst(apa=0, link=idx, queueinstance=f"data_requests_{idx}") for idx in range(NUMBER_OF_DATA_PRODUCERS) ]) )), ("ffr", ffr.ConfParams( general_queue_timeout=QUEUE_POP_WAIT_MS )), ("datawriter", dw.ConfParams( initial_token_count=TOKEN_COUNT, data_store_parameters=hdf5ds.ConfParams( name="data_store", # type = "HDF5DataStore", # default directory_path = OUTPUT_PATH, # default # mode = "all-per-file", # default max_file_size_bytes = 1073741824, disable_unique_filename_suffix = False, filename_parameters = hdf5ds.HDF5DataStoreFileNameParams( overall_prefix = "swtest", digits_for_run_number = 6, file_index_prefix = "", digits_for_file_index = 4, ), file_layout_parameters = hdf5ds.HDF5DataStoreFileLayoutParams( trigger_record_name_prefix= "TriggerRecord", digits_for_trigger_number = 5, digits_for_apa_number = 3, digits_for_link_number = 2, ) ) )), ("fake_source",fakecr.Conf( link_ids=list(range(NUMBER_OF_DATA_PRODUCERS)), # input_limit=10485100, # default rate_khz = CLOCK_SPEED_HZ/(25*12*DATA_RATE_SLOWDOWN_FACTOR*1000), raw_type = "wib", data_filename = DATA_FILE, queue_timeout_ms = QUEUE_POP_WAIT_MS )), ("flxcard_0",flxcr.Conf( card_id=0, logical_unit=0, dma_id=0, chunk_trailer_size= 32, dma_block_size_kb= 4, dma_memory_size_gb= 4, numa_id=0, num_links=min(5,NUMBER_OF_DATA_PRODUCERS) )), ("flxcard_1",flxcr.Conf( card_id=0, logical_unit=1, dma_id=0, chunk_trailer_size= 32, dma_block_size_kb= 4, dma_memory_size_gb= 4, numa_id=0, num_links=max(0, NUMBER_OF_DATA_PRODUCERS-5) )), ] + [ (f"datahandler_{idx}", dlh.Conf( raw_type = "wib", emulator_mode = EMULATOR_MODE, # fake_trigger_flag=0, # default source_queue_timeout_ms= QUEUE_POP_WAIT_MS, latency_buffer_size = 3*CLOCK_SPEED_HZ/(25*12*DATA_RATE_SLOWDOWN_FACTOR), pop_limit_pct = 0.8, pop_size_pct = 0.1, apa_number = 0, link_number = idx )) for idx in range(NUMBER_OF_DATA_PRODUCERS) ]) startpars = rccmd.StartParams(run=RUN_NUMBER, trigger_interval_ticks=trg_interval_ticks, disable_data_storage=DISABLE_OUTPUT) startcmd = mrccmd("start", "CONFIGURED", "RUNNING", [ ("qton_token", startpars), ("datawriter", startpars), ("ffr", startpars), ("qton_timesync", startpars), ("datahandler_.*", startpars), ("fake_source", startpars), ("flxcard.*", startpars), ("rqg", startpars), ("ntoq_trigdec", startpars), ]) stopcmd = mrccmd("stop", "RUNNING", "CONFIGURED", [ ("ntoq_trigdec", None), ("rqg", None), ("flxcard.*", None), ("fake_source", None), ("datahandler_.*", None), ("qton_timesync", None), ("ffr", None), ("datawriter", None), ("qton_token", None), ]) pausecmd = mrccmd("pause", "RUNNING", "RUNNING", [ ("", None) ]) resumecmd = mrccmd("resume", "RUNNING", "RUNNING", [ ("tde", tde.ResumeParams( trigger_interval_ticks=trg_interval_ticks )) ]) scrapcmd = mrccmd("scrap", "CONFIGURED", "INITIAL", [ ("", None) ]) # Create a list of commands cmd_seq = [initcmd, confcmd, startcmd, stopcmd, pausecmd, resumecmd, scrapcmd] # Print them as json (to be improved/moved out) jstr = json.dumps([c.pod() for c in cmd_seq], indent=4, sort_keys=True) return jstr
def generate(NW_SPECS, RU_CONFIG=[], EMULATOR_MODE=False, DATA_RATE_SLOWDOWN_FACTOR=1, RUN_NUMBER=333, DATA_FILE="./frames.bin", FLX_INPUT=False, SSP_INPUT=True, CLOCK_SPEED_HZ=50000000, RUIDX=0, RAW_RECORDING_ENABLED=False, RAW_RECORDING_OUTPUT_DIR=".", FRONTEND_TYPE='wib', SYSTEM_TYPE='TPC', SOFTWARE_TPG_ENABLED=False, USE_FAKE_DATA_PRODUCERS=False, PARTITION="UNKNOWN", LATENCY_BUFFER_SIZE=499968): """Generate the json configuration for the readout and DF process""" cmd_data = {} required_eps = {f'{PARTITION}.timesync_{RUIDX}'} if not required_eps.issubset([nw.name for nw in NW_SPECS]): raise RuntimeError( f"ERROR: not all the required endpoints ({', '.join(required_eps)}) found in list of endpoints {' '.join([nw.name for nw in NW_SPECS])}" ) RATE_KHZ = CLOCK_SPEED_HZ / (25 * 12 * DATA_RATE_SLOWDOWN_FACTOR * 1000) MIN_LINK = RU_CONFIG[RUIDX]["start_channel"] MAX_LINK = MIN_LINK + RU_CONFIG[RUIDX]["channel_count"] # Define modules and queues queue_bare_specs = [ app.QueueSpec( inst=f"data_requests_{idx}", kind='FollySPSCQueue', capacity=100) for idx in range(MIN_LINK, MAX_LINK) ] + [ app.QueueSpec(inst="fragment_q", kind="FollyMPMCQueue", capacity=100) ] if not USE_FAKE_DATA_PRODUCERS: queue_bare_specs += [ app.QueueSpec(inst=f"{FRONTEND_TYPE}_link_{idx}", kind='FollySPSCQueue', capacity=100000) for idx in range(MIN_LINK, MAX_LINK) ] if SOFTWARE_TPG_ENABLED: queue_bare_specs += [ app.QueueSpec(inst=f"sw_tp_link_{idx}", kind='FollySPSCQueue', capacity=100000) for idx in range(MIN_LINK, MAX_LINK) ] + [ app.QueueSpec( inst=f"tpset_queue", kind='FollyMPMCQueue', capacity=10000) ] + [ app.QueueSpec( inst=f"tp_requests_{idx}", kind='FollySPSCQueue', capacity=100) for idx in range(MIN_LINK, MAX_LINK) ] if FRONTEND_TYPE == 'wib': queue_bare_specs += [ app.QueueSpec(inst="errored_frames_q", kind="FollyMPMCQueue", capacity=10000) ] # Only needed to reproduce the same order as when using jsonnet queue_specs = app.QueueSpecs(sorted(queue_bare_specs, key=lambda x: x.inst)) mod_specs = [ mspec(f"fragment_sender", "FragmentSender", [ app.QueueInfo(name="input_queue", inst="fragment_q", dir="input") ]) ] if SOFTWARE_TPG_ENABLED: mod_specs += [ mspec(f"request_receiver", "RequestReceiver", [ app.QueueInfo( name="output", inst=f"data_requests_{idx}", dir="output") for idx in range(MIN_LINK, MAX_LINK) ] + [ app.QueueInfo( name="output", inst=f"tp_requests_{idx}", dir="output") for idx in range(MIN_LINK, MAX_LINK) ]) ] + [ mspec(f"tp_datahandler_{idx}", "DataLinkHandler", [ app.QueueInfo( name="raw_input", inst=f"sw_tp_link_{idx}", dir="input"), app.QueueInfo(name="data_requests_0", inst=f"tp_requests_{idx}", dir="input"), app.QueueInfo( name="fragment_queue", inst="fragment_q", dir="output") ]) for idx in range(MIN_LINK, MAX_LINK) ] + [ mspec(f"tpset_publisher", "QueueToNetwork", [ app.QueueInfo(name="input", inst=f"tpset_queue", dir="input") ]) ] else: mod_specs += [ mspec(f"request_receiver", "RequestReceiver", [ app.QueueInfo( name="output", inst=f"data_requests_{idx}", dir="output") for idx in range(MIN_LINK, MAX_LINK) ]) ] if FRONTEND_TYPE == 'wib': mod_specs += [ mspec("errored_frame_consumer", "ErroredFrameConsumer", [ app.QueueInfo( name="input_queue", inst="errored_frames_q", dir="input") ]) ] # There are two flags to be checked so I think a for loop # is the closest way to the blocks that are being used here for idx in range(MIN_LINK, MAX_LINK): if USE_FAKE_DATA_PRODUCERS: mod_specs = mod_specs + [ mspec(f"fakedataprod_{idx}", "FakeDataProd", [ app.QueueInfo(name="data_request_input_queue", inst=f"data_requests_{idx}", dir="input"), ]) ] else: ls = [ app.QueueInfo(name="raw_input", inst=f"{FRONTEND_TYPE}_link_{idx}", dir="input"), app.QueueInfo(name="data_requests_0", inst=f"data_requests_{idx}", dir="input"), app.QueueInfo(name="fragment_queue", inst="fragment_q", dir="output") ] if SOFTWARE_TPG_ENABLED: ls.extend([ app.QueueInfo(name="tp_out", inst=f"sw_tp_link_{idx}", dir="output"), app.QueueInfo(name="tpset_out", inst=f"tpset_queue", dir="output") ]) if FRONTEND_TYPE == 'wib': ls.extend([ app.QueueInfo(name="errored_frames", inst="errored_frames_q", dir="output") ]) mod_specs += [mspec(f"datahandler_{idx}", "DataLinkHandler", ls)] if not USE_FAKE_DATA_PRODUCERS: if FLX_INPUT: mod_specs.append( mspec("flxcard_0", "FelixCardReader", [ app.QueueInfo(name=f"output_{idx}", inst=f"{FRONTEND_TYPE}_link_{idx}", dir="output") for idx in range( MIN_LINK, MIN_LINK + min(5, RU_CONFIG[RUIDX]["channel_count"])) ])) if RU_CONFIG[RUIDX]["channel_count"] > 5: mod_specs.append( mspec("flxcard_1", "FelixCardReader", [ app.QueueInfo(name=f"output_{idx}", inst=f"{FRONTEND_TYPE}_link_{idx}", dir="output") for idx in range(MIN_LINK + 5, MAX_LINK) ])) elif SSP_INPUT: mod_specs.append( mspec("ssp_0", "SSPCardReader", [ app.QueueInfo(name=f"output_{idx}", inst=f"{FRONTEND_TYPE}_link_{idx}", dir="output") for idx in range(MIN_LINK, MAX_LINK) ])) else: fake_source = "fake_source" card_reader = "FakeCardReader" if FRONTEND_TYPE == 'pacman': fake_source = "pacman_source" card_reader = "PacmanCardReader" mod_specs.append( mspec(fake_source, card_reader, [ app.QueueInfo(name=f"output_{idx}", inst=f"{FRONTEND_TYPE}_link_{idx}", dir="output") for idx in range(MIN_LINK, MAX_LINK) ])) cmd_data['init'] = app.Init(queues=queue_specs, modules=mod_specs, nwconnections=NW_SPECS) total_link_count = 0 for ru in range(len(RU_CONFIG)): total_link_count += RU_CONFIG[ru]["channel_count"] conf_list = [ ( "fake_source", sec.Conf( link_confs=[ sec.LinkConfiguration( geoid=sec.GeoID(system=SYSTEM_TYPE, region=RU_CONFIG[RUIDX]["region_id"], element=idx), slowdown=DATA_RATE_SLOWDOWN_FACTOR, queue_name=f"output_{idx}", data_filename=DATA_FILE, emu_frame_error_rate=0, ) for idx in range(MIN_LINK, MAX_LINK) ], # input_limit=10485100, # default queue_timeout_ms=QUEUE_POP_WAIT_MS)), ("pacman_source", pcr.Conf(link_confs=[ pcr.LinkConfiguration(geoid=pcr.GeoID( system=SYSTEM_TYPE, region=RU_CONFIG[RUIDX]["region_id"], element=idx), ) for idx in range(MIN_LINK, MAX_LINK) ], zmq_receiver_timeout=10000)), ("flxcard_0", flxcr.Conf(card_id=RU_CONFIG[RUIDX]["card_id"], logical_unit=0, dma_id=0, chunk_trailer_size=32, dma_block_size_kb=4, dma_memory_size_gb=4, numa_id=0, num_links=min(5, RU_CONFIG[RUIDX]["channel_count"]))), ("flxcard_1", flxcr.Conf(card_id=RU_CONFIG[RUIDX]["card_id"], logical_unit=1, dma_id=0, chunk_trailer_size=32, dma_block_size_kb=4, dma_memory_size_gb=4, numa_id=0, num_links=max(0, RU_CONFIG[RUIDX]["channel_count"] - 5))), ("ssp_0", flxcr.Conf(card_id=RU_CONFIG[RUIDX]["card_id"], logical_unit=0, dma_id=0, chunk_trailer_size=32, dma_block_size_kb=4, dma_memory_size_gb=4, numa_id=0, num_links=RU_CONFIG[RUIDX]["channel_count"])), ] + [ ("request_receiver", rrcv.ConfParams(map=[ rrcv.geoidinst(region=RU_CONFIG[RUIDX]["region_id"], element=idx, system=SYSTEM_TYPE, queueinstance=f"data_requests_{idx}") for idx in range(MIN_LINK, MAX_LINK) ] + [ rrcv.geoidinst(region=RU_CONFIG[RUIDX]["region_id"], element=idx + total_link_count, system=SYSTEM_TYPE, queueinstance=f"tp_requests_{idx}") for idx in range(MIN_LINK, MAX_LINK) if SOFTWARE_TPG_ENABLED ], general_queue_timeout=QUEUE_POP_WAIT_MS, connection_name=f"{PARTITION}.datareq_{RUIDX}")) ] + [ ( f"datahandler_{idx}", rconf.Conf( readoutmodelconf=rconf.ReadoutModelConf( source_queue_timeout_ms=QUEUE_POP_WAIT_MS, # fake_trigger_flag=0, # default region_id=RU_CONFIG[RUIDX]["region_id"], element_id=idx, timesync_connection_name=f"{PARTITION}.timesync_{RUIDX}", timesync_topic_name="Timesync", ), latencybufferconf=rconf.LatencyBufferConf( latency_buffer_alignment_size=4096, latency_buffer_size=LATENCY_BUFFER_SIZE, region_id=RU_CONFIG[RUIDX]["region_id"], element_id=idx, ), rawdataprocessorconf=rconf.RawDataProcessorConf( region_id=RU_CONFIG[RUIDX]["region_id"], element_id=idx, enable_software_tpg=SOFTWARE_TPG_ENABLED, emulator_mode=EMULATOR_MODE, error_counter_threshold=100, error_reset_freq=10000), requesthandlerconf=rconf.RequestHandlerConf( latency_buffer_size=LATENCY_BUFFER_SIZE, pop_limit_pct=0.8, pop_size_pct=0.1, region_id=RU_CONFIG[RUIDX]["region_id"], element_id=idx, output_file=path.join(RAW_RECORDING_OUTPUT_DIR, f"output_{RUIDX}_{idx}.out"), stream_buffer_size=8388608, enable_raw_recording=RAW_RECORDING_ENABLED, ))) for idx in range(MIN_LINK, MAX_LINK) ] + [ ( f"tp_datahandler_{idx}", rconf.Conf( readoutmodelconf=rconf.ReadoutModelConf( source_queue_timeout_ms=QUEUE_POP_WAIT_MS, # fake_trigger_flag=0, default region_id=RU_CONFIG[RUIDX]["region_id"], element_id=total_link_count + idx, ), latencybufferconf=rconf.LatencyBufferConf( latency_buffer_size=LATENCY_BUFFER_SIZE, region_id=RU_CONFIG[RUIDX]["region_id"], element_id=total_link_count + idx, ), rawdataprocessorconf=rconf.RawDataProcessorConf( region_id=RU_CONFIG[RUIDX]["region_id"], element_id=total_link_count + idx, enable_software_tpg=False, ), requesthandlerconf=rconf.RequestHandlerConf( latency_buffer_size=LATENCY_BUFFER_SIZE, pop_limit_pct=0.8, pop_size_pct=0.1, region_id=RU_CONFIG[RUIDX]["region_id"], element_id=total_link_count + idx, # output_file = f"output_{idx + MIN_LINK}.out", stream_buffer_size=100 if FRONTEND_TYPE == 'pacman' else 8388608, enable_raw_recording=False, ))) for idx in range(MIN_LINK, MAX_LINK) ] if SOFTWARE_TPG_ENABLED: conf_list.extend([(f"tpset_publisher", qton.Conf(msg_type="dunedaq::trigger::TPSet", msg_module_name="TPSetNQ", sender_config=nos.Conf( name=f"{PARTITION}.tpsets_{RUIDX}", topic="TPSets", stype="msgpack")))]) if USE_FAKE_DATA_PRODUCERS: conf_list.extend([ (f"fakedataprod_{idx}", fdp.ConfParams( system_type=SYSTEM_TYPE, apa_number=RU_CONFIG[RUIDX]["region_id"], link_number=idx, time_tick_diff=25, frame_size=464, response_delay=0, timesync_connection_name=f"{PARTITION}.timesync_{RUIDX}", timesync_topic_name="Timesync", fragment_type="FakeData")) for idx in range(MIN_LINK, MAX_LINK) ]) conf_list.extend([("fragment_sender", None)]) cmd_data['conf'] = acmd(conf_list) startpars = rccmd.StartParams(run=RUN_NUMBER) cmd_data['start'] = acmd([("datahandler_.*", startpars), ("fake_source", startpars), ("pacman_source", startpars), ("flxcard.*", startpars), ("request_receiver", startpars), ("ssp.*", startpars), ("ntoq_trigdec", startpars), (f"tp_datahandler_.*", startpars), (f"tpset_publisher", startpars), ("fakedataprod_.*", startpars), ("fragment_sender", startpars), ("errored_frame_consumer", startpars)]) cmd_data['stop'] = acmd([("request_receiver", None), ("flxcard.*", None), ("ssp.*", None), ("fake_source", None), ("pacman_source", None), ("datahandler_.*", None), (f"tp_datahandler_.*", None), (f"tpset_publisher", None), ("fakedataprod_.*", None), ("fragment_sender", None), ("errored_frame_consumer", None)]) cmd_data['pause'] = acmd([("", None)]) cmd_data['resume'] = acmd([("", None)]) cmd_data['scrap'] = acmd([("", None)]) cmd_data['record'] = acmd([("", None)]) return cmd_data