Exemple #1
0
    def __init__(self,
                 RUN_NUMBER: int,
                 NW_SPECS: list,
                 TIMING_CMD_NETWORK_ENDPOINTS: set,
                 GATHER_INTERVAL=1e6,
                 GATHER_INTERVAL_DEBUG=10e6,
                 HSI_DEVICE_NAME="",
                 CONNECTIONS_FILE="${TIMING_SHARE}/config/etc/connections.xml",
                 UHAL_LOG_LEVEL="notice",
                 PARTITION="UNKNOWN"):
        """
        { item_description }
        """
        cmd_data = {}
        ## TODO: Everything?
        required_eps = TIMING_CMD_NETWORK_ENDPOINTS
        if not required_eps.issubset([nw.name for nw in NW_SPECS]):
            raise RuntimeError(f"ERROR: not all the required endpoints ({', '.join(required_eps)}) found in list of endpoints {' '.join([nw.name for nw in NW_SPECS])}")
    
        # # Define modules and queues
        # queue_bare_specs = [app.QueueSpec(inst="ntoq_timing_cmds", kind='FollyMPMCQueue', capacity=100),]
    
        # # Only needed to reproduce the same order as when using jsonnet
        # queue_specs = app.QueueSpecs(sorted(queue_bare_specs, key=lambda x: x.inst))
        thi_init_data = thi.InitParams(qinfos=app.QueueInfos([app.QueueInfo(name="hardware_commands_in", inst="ntoq_timing_cmds", dir="input")]),
                                       connections_file=CONNECTIONS_FILE,
                                       gather_interval=GATHER_INTERVAL,
                                       gather_interval_debug=GATHER_INTERVAL_DEBUG,
                                       monitored_device_name_master="",
                                       monitored_device_names_fanout=[],
                                       monitored_device_name_endpoint="",
                                       monitored_device_name_hsi=HSI_DEVICE_NAME,
                                       uhal_log_level=UHAL_LOG_LEVEL)

        modules = {}
        modules["thi"] = Module("TimingHardwareManagerPDI")
        # mod_specs = [app.ModSpec(inst="thi", plugin="TimingHardwareManagerPDI", data=thi_init_data),]
        for cmd_nw_endpoint in TIMING_CMD_NETWORK_ENDPOINTS:
            mod_specs.extend([mspec(f'ntoq_{cmd_nw_endpoint}', "NetworkToQueue", [app.QueueInfo(name="output", inst="ntoq_timing_cmds", dir="output")]),])
                
        cmd_data['init'] = app.Init(queues=queue_specs, modules=mod_specs, nwconnections=NW_SPECS)
        
    
        conf_cmds = []
        for cmd_nw_endpoint in TIMING_CMD_NETWORK_ENDPOINTS:
            conf_cmds.extend([(f'ntoq_{cmd_nw_endpoint}', ntoq.Conf(msg_type="dunedaq::timinglibs::timingcmd::TimingHwCmd",
                                                   msg_module_name="TimingHwCmdNQ",
                                                   receiver_config=nor.Conf(name=cmd_nw_endpoint))),])
        mgraph = ModuleGraph(modules)
        super().__init__(modulegraph=mgraph, host=HOST)
Exemple #2
0
def get_wib_app(nickname, endpoint, version, host="localhost"):
    '''
    Here an entire application consisting only of one (Proto)WIBConfigurator module is generated. 
    '''

    # Define modules

    modules = []

    if version == 1:
        modules += [
            DAQModule(name=nickname,
                      plugin='ProtoWIBConfigurator',
                      conf=protowib.WIBConf(
                          wib_addr=endpoint,
                          settings=protowib.WIBSettings(
                              femb1=protowib.FEMBSettings(),
                              femb2=protowib.FEMBSettings(),
                              femb3=protowib.FEMBSettings(),
                              femb4=protowib.FEMBSettings())))
        ]
    else:
        modules += [
            DAQModule(name=nickname,
                      plugin='WIBConfigurator',
                      conf=wib.WIBConf(wib_addr=endpoint,
                                       settings=wib.WIBSettings(
                                           femb0=wib.FEMBSettings(),
                                           femb1=wib.FEMBSettings(),
                                           femb2=wib.FEMBSettings(),
                                           femb3=wib.FEMBSettings())))
        ]

    mgraph = ModuleGraph(modules)
    wib_app = App(modulegraph=mgraph, host=host, name=nickname)

    return wib_app
Exemple #3
0
    def __init__(self,
                 # NW_SPECS: list,
                 RUN_NUMBER=333,
                 CLOCK_SPEED_HZ: int=50000000,
                 DATA_RATE_SLOWDOWN_FACTOR: int=1,
                 TRIGGER_RATE_HZ: int=1,
                 HSI_DEVICE_ID: int=0,
                 MEAN_SIGNAL_MULTIPLICITY: int=0,
                 SIGNAL_EMULATION_MODE: int=0,
                 ENABLED_SIGNALS: int=0b00000001,
                 PARTITION="UNKNOWN",
                 HOST="localhost"):
        
        trigger_interval_ticks = 0
        required_eps = {PARTITION + '.hsievent'}
        # if not required_eps.issubset([nw.name for nw in NW_SPECS]):
        #     raise RuntimeError(f"ERROR: not all the required endpoints ({', '.join(required_eps)}) found in list of endpoints {' '.join([nw.name for nw in NW_SPECS])}")

        if TRIGGER_RATE_HZ > 0:
            trigger_interval_ticks = math.floor((1 / TRIGGER_RATE_HZ) * CLOCK_SPEED_HZ / DATA_RATE_SLOWDOWN_FACTOR)

        modules = []
        modules += [DAQModule(name = 'fhsig',
                           plugin = "FakeHSIEventGenerator",
                           conf =  fhsig.Conf(clock_frequency=CLOCK_SPEED_HZ/DATA_RATE_SLOWDOWN_FACTOR,
                                              # timestamp_offset=???,
                                              # hsi_device_id=???,
                                              trigger_interval_ticks=trigger_interval_ticks,
                                              mean_signal_multiplicity=MEAN_SIGNAL_MULTIPLICITY,
                                              signal_emulation_mode=SIGNAL_EMULATION_MODE,
                                              enabled_signals=ENABLED_SIGNALS,
                                              hsievent_connection_name=PARTITION+".hsievent"))]
    
        mgraph = ModuleGraph(modules)
        mgraph.add_endpoint("time_sync", "fhsig.time_sync_source", Direction.IN)
        mgraph.add_endpoint("hsievents", "fhsig.hsievent_sink",    Direction.OUT)
        super().__init__(modulegraph=mgraph, host=HOST, name="FakeHSIApp")
        self.export("fake_hsi_app.dot")
Exemple #4
0
    def __init__(
            self,
            # NW_SPECS,
            FRAGMENT_PRODUCERS,
            RU_CONFIG=[],
            RUN_NUMBER=333,
            OUTPUT_PATH=".",
            TOKEN_COUNT=0,
            SYSTEM_TYPE="TPC",
            SOFTWARE_TPG_ENABLED=False,
            TPSET_WRITING_ENABLED=False,
            PARTITION="UNKNOWN",
            OPERATIONAL_ENVIRONMENT="swtest",
            TPC_REGION_NAME_PREFIX="APA",
            HOST="localhost",
            MAX_FILE_SIZE=4 * 1024 * 1024 * 1024):
        """Generate the json configuration for the readout and DF process"""

        required_eps = {PARTITION + '.trigdec', PARTITION + '.triginh'}
        # if not required_eps.issubset([nw.name for nw in NW_SPECS]):
        #     raise RuntimeError(f"ERROR: not all the required endpoints ({', '.join(required_eps)}) found in list of endpoints {' '.join([nw.name for nw in NW_SPECS])}")

        modules = []
        total_link_count = 0
        for ru in range(len(RU_CONFIG)):
            total_link_count += RU_CONFIG[ru]["channel_count"]

        modules += [
            DAQModule(
                name='trigdec_receiver',
                plugin='TriggerDecisionReceiver',
                connections={'output': Connection('trb.trigger_decision_q')},
                conf=tdrcv.ConfParams(general_queue_timeout=QUEUE_POP_WAIT_MS,
                                      connection_name=PARTITION + ".trigdec")),
            DAQModule(
                name='fragment_receiver',
                plugin='FragmentReceiver',
                connections={'output': Connection('trb.data_fragments_q')},
                conf=frcv.ConfParams(general_queue_timeout=QUEUE_POP_WAIT_MS,
                                     connection_name=PARTITION + ".frags_0")),
            DAQModule(
                name='trb',
                plugin='TriggerRecordBuilder',
                connections={
                    'trigger_record_output_queue':
                    Connection('datawriter.trigger_record_q')
                },
                conf=trb.ConfParams(
                    general_queue_timeout=QUEUE_POP_WAIT_MS,
                    reply_connection_name=PARTITION + ".frags_0",
                    map=trb.mapgeoidconnections([
                        trb.geoidinst(
                            region=RU_CONFIG[ru]["region_id"],
                            element=idx + RU_CONFIG[ru]["start_channel"],
                            system=SYSTEM_TYPE,
                            connection_name=f"{PARTITION}.datareq_{ru}")
                        for ru in range(len(RU_CONFIG))
                        for idx in range(RU_CONFIG[ru]["channel_count"])
                    ] + ([
                        trb.geoidinst(
                            region=RU_CONFIG[ru]["region_id"],
                            element=idx +
                            RU_CONFIG[ru]["start_channel"] + total_link_count,
                            system=SYSTEM_TYPE,
                            connection_name=f"{PARTITION}.datareq_{ru}")
                        for ru in range(len(RU_CONFIG))
                        for idx in range(RU_CONFIG[ru]["channel_count"])
                    ] if SOFTWARE_TPG_ENABLED else []) + ([
                        trb.geoidinst(
                            region=RU_CONFIG[ru]["region_id"],
                            element=idx +
                            RU_CONFIG[ru]["start_channel"],
                            system="DataSelection",
                            connection_name=f"{PARTITION}.ds_tp_datareq_0")
                        for ru in range(len(RU_CONFIG))
                        for idx in range(RU_CONFIG[ru]["channel_count"])
                    ] if SOFTWARE_TPG_ENABLED else [])))),
            DAQModule(
                name='datawriter',
                plugin='DataWriter',
                connections=
                {},  # {'trigger_record_input_queue': Connection('datawriter.trigger_record_q')},
                conf=dw.ConfParams(
                    initial_token_count=TOKEN_COUNT,
                    token_connection=PARTITION + ".triginh",
                    data_store_parameters=hdf5ds.ConfParams(
                        name="data_store",
                        version=3,
                        operational_environment=OPERATIONAL_ENVIRONMENT,
                        directory_path=OUTPUT_PATH,
                        max_file_size_bytes=MAX_FILE_SIZE,
                        disable_unique_filename_suffix=False,
                        filename_parameters=hdf5ds.FileNameParams(
                            overall_prefix=OPERATIONAL_ENVIRONMENT,
                            digits_for_run_number=6,
                            file_index_prefix="",
                            digits_for_file_index=4),
                        file_layout_parameters=hdf5ds.FileLayoutParams(
                            trigger_record_name_prefix="TriggerRecord",
                            digits_for_trigger_number=5,
                            path_param_list=hdf5ds.PathParamList([
                                hdf5ds.PathParams(
                                    detector_group_type="TPC",
                                    detector_group_name="TPC",
                                    region_name_prefix=TPC_REGION_NAME_PREFIX,
                                    element_name_prefix="Link"),
                                hdf5ds.PathParams(detector_group_type="PDS",
                                                  detector_group_name="PDS"),
                                hdf5ds.PathParams(
                                    detector_group_type="NDLArTPC",
                                    detector_group_name="NDLArTPC"),
                                hdf5ds.PathParams(
                                    detector_group_type="Trigger",
                                    detector_group_name="Trigger"),
                                hdf5ds.PathParams(detector_group_type="TPC_TP",
                                                  detector_group_name="TPC",
                                                  region_name_prefix="TP_APA",
                                                  element_name_prefix="Link")
                            ])))))
        ]

        if TPSET_WRITING_ENABLED:
            for idx in range(len(RU_CONFIG)):
                modules += [
                    DAQModule(name=f'tpset_subscriber_{idx}',
                              plugin="NetworkToQueue",
                              connections={
                                  'output':
                                  Connection(f"tpswriter.tpsets_from_netq")
                              },
                              conf=nor.Conf(name=f'{PARTITION}.tpsets_{idx}',
                                            subscriptions=["TPSets"]))
                ]

            modules += [
                DAQModule(name='tpswriter',
                          plugin="TPSetWriter",
                          connections={
                              'tpset_source': Connection("tpsets_from_netq")
                          },
                          conf=tpsw.ConfParams(max_file_size_bytes=1000000000))
            ]

        if SOFTWARE_TPG_ENABLED:
            modules += [
                DAQModule(
                    name='tp_fragment_receiver',
                    plugin="FragmentReceiver",
                    connections={'output': Connection("trb.data_fragments_q")},
                    conf=frcv.ConfParams(
                        general_queue_timeout=QUEUE_POP_WAIT_MS,
                        connection_name=PARTITION + ".tp_frags_0")),
                DAQModule(
                    name='ds_tpset_fragment_receiver',
                    plugin="FragmentReceiver",
                    connections={"output": Connection("trb.data_fragments_q")},
                    conf=frcv.ConfParams(
                        general_queue_timeout=QUEUE_POP_WAIT_MS,
                        connection_name=PARTITION + ".frags_tpset_ds_0"))
            ]

        mgraph = ModuleGraph(modules)
        # PAR 2021-12-10 All of the dataflow app's sending and
        # receiving is done via NetworkManager, so there are no
        # endpoints for the moment

        # mgraph.add_endpoint("fragments",         "trb.data_fragment_input_queue",    Direction.IN)
        # mgraph.add_endpoint("trigger_decisions", "trb.trigger_decision_input_queue", Direction.IN)
        # mgraph.add_endpoint("tokens",            "datawriter.token_output_queue",    Direction.OUT)

        # for i, producer in enumerate(FRAGMENT_PRODUCERS):
        #     queue_name=f"data_request_{i}_output_queue"
        #     mgraph.add_endpoint(data_request_endpoint_name(producer), f"trb.{queue_name}", Direction.OUT)

        super().__init__(modulegraph=mgraph, host=HOST)
        self.export("dataflow_app.dot")
Exemple #5
0
    def __init__(
            self,
            # NW_SPECS,
            RU_CONFIG=[],
            EMULATOR_MODE=False,
            DATA_RATE_SLOWDOWN_FACTOR=1,
            RUN_NUMBER=333,
            DATA_FILE="./frames.bin",
            FLX_INPUT=False,
            SSP_INPUT=True,
            CLOCK_SPEED_HZ=50000000,
            RUIDX=0,
            RAW_RECORDING_ENABLED=False,
            RAW_RECORDING_OUTPUT_DIR=".",
            FRONTEND_TYPE='wib',
            SYSTEM_TYPE='TPC',
            SOFTWARE_TPG_ENABLED=False,
            USE_FAKE_DATA_PRODUCERS=False,
            PARTITION="UNKNOWN",
            LATENCY_BUFFER_SIZE=499968,
            HOST="localhost"):
        """Generate the json configuration for the readout and DF process"""
        NUMBER_OF_DATA_PRODUCERS = len(RU_CONFIG)
        cmd_data = {}

        required_eps = {f'{PARTITION}.timesync_{RUIDX}'}
        # if not required_eps.issubset([nw.name for nw in NW_SPECS]):
        #     raise RuntimeError(f"ERROR: not all the required endpoints ({', '.join(required_eps)}) found in list of endpoints {' '.join([nw.name for nw in NW_SPECS])}")

        RATE_KHZ = CLOCK_SPEED_HZ / (25 * 12 * DATA_RATE_SLOWDOWN_FACTOR *
                                     1000)

        MIN_LINK = RU_CONFIG[RUIDX]["start_channel"]
        MAX_LINK = MIN_LINK + RU_CONFIG[RUIDX]["channel_count"]

        print(
            f"ReadoutApp.__init__ with RUIDX={RUIDX}, MIN_LINK={MIN_LINK}, MAX_LINK={MAX_LINK}"
        )
        modules = []

        total_link_count = 0
        for ru in range(len(RU_CONFIG)):
            total_link_count += RU_CONFIG[ru]["channel_count"]

        if SOFTWARE_TPG_ENABLED:
            connections = {}

            request_receiver_geoid_map = []
            for idx in range(MIN_LINK, MAX_LINK):
                queue_inst = f"data_requests_{idx}"
                request_receiver_geoid_map.append(
                    rrcv.geoidinst(region=RU_CONFIG[RUIDX]["region_id"],
                                   element=idx,
                                   system=SYSTEM_TYPE,
                                   queueinstance=queue_inst))
                connections[f'output_{idx}'] = Connection(
                    f"datahandler_{idx}.data_requests_0",
                    queue_name=queue_inst)

                if SOFTWARE_TPG_ENABLED:
                    queue_inst = f"tp_requests_{idx}"
                    request_receiver_geoid_map.append(
                        rrcv.geoidinst(region=RU_CONFIG[RUIDX]["region_id"],
                                       element=idx + total_link_count,
                                       system=SYSTEM_TYPE,
                                       queueinstance=queue_inst))
                    connections[f'tp_output_{idx}'] = Connection(
                        f"tp_datahandler_{idx}.data_requests_0",
                        queue_name=queue_inst)

            modules += [
                DAQModule(name="request_receiver",
                          plugin="RequestReceiver",
                          connections=connections,
                          conf=rrcv.ConfParams(
                              map=request_receiver_geoid_map,
                              general_queue_timeout=QUEUE_POP_WAIT_MS,
                              connection_name=f"{PARTITION}.datareq_{RUIDX}"))
            ]
            for idx in range(MIN_LINK, MAX_LINK):
                modules += [
                    DAQModule(
                        name=f"tp_datahandler_{idx}",
                        plugin="DataLinkHandler",
                        connections={},
                        conf=rconf.Conf(
                            readoutmodelconf=rconf.ReadoutModelConf(
                                source_queue_timeout_ms=QUEUE_POP_WAIT_MS,
                                region_id=RU_CONFIG[RUIDX]["region_id"],
                                element_id=total_link_count + idx),
                            latencybufferconf=rconf.LatencyBufferConf(
                                latency_buffer_size=LATENCY_BUFFER_SIZE,
                                region_id=RU_CONFIG[RUIDX]["region_id"],
                                element_id=total_link_count + idx),
                            rawdataprocessorconf=rconf.RawDataProcessorConf(
                                region_id=RU_CONFIG[RUIDX]["region_id"],
                                element_id=total_link_count + idx,
                                enable_software_tpg=False),
                            requesthandlerconf=rconf.RequestHandlerConf(
                                latency_buffer_size=LATENCY_BUFFER_SIZE,
                                pop_limit_pct=0.8,
                                pop_size_pct=0.1,
                                region_id=RU_CONFIG[RUIDX]["region_id"],
                                element_id=total_link_count + idx,
                                # output_file = f"output_{idx + MIN_LINK}.out",
                                stream_buffer_size=100
                                if FRONTEND_TYPE == 'pacman' else 8388608,
                                enable_raw_recording=False)))
                ]
            # modules += [DAQModule(name = f"tpset_publisher",
            #                    plugin = "QueueToNetwork",
            #                    # connections = {'input': Connection('tpset_queue', Direction.IN)},
            #                    conf = qton.Conf(msg_type="dunedaq::trigger::TPSet",
            #                                     msg_module_name="TPSetNQ",
            #                                     sender_config=nos.Conf(name=f"{PARTITION}.tpsets_{RUIDX}",
            #                                                            topic="TPSets",
            #                                                            stype="msgpack")))]
        if FRONTEND_TYPE == 'wib':
            modules += [
                DAQModule(name="errored_frame_consumer",
                          plugin="ErroredFrameConsumer",
                          connections={})
            ]

        # There are two flags to be checked so I think a for loop
        # is the closest way to the blocks that are being used here

        for idx in range(MIN_LINK, MAX_LINK):
            if USE_FAKE_DATA_PRODUCERS:
                modules += [
                    DAQModule(name=f"fakedataprod_{idx}",
                              plugin='FakeDataProd',
                              connections={
                                  'input': Connection(f'data_request_{idx}')
                              })
                ]
            else:
                connections = {}
                # connections['raw_input']      = Connection(f"{FRONTEND_TYPE}_link_{idx}", Direction.IN)
                # connections['data_request_0'] = Connection(f'data_requests_{idx}',        Direction.IN)
                connections['fragment_queue'] = Connection(
                    'fragment_sender.input_queue')
                if SOFTWARE_TPG_ENABLED:
                    connections['tp_out'] = Connection(
                        f"tp_datahandler_{idx}.raw_input",
                        queue_name=f"sw_tp_link_{idx}",
                        queue_kind="FollySPSCQueue",
                        queue_capacity=100000)
                    # connections['tpset_out'] = Connection('tpset_queue',       Direction.OUT)

                if FRONTEND_TYPE == 'wib':
                    connections['errored_frames'] = Connection(
                        'errored_frame_consumer.input_queue')

                modules += [
                    DAQModule(name=f"datahandler_{idx}",
                              plugin="DataLinkHandler",
                              connections=connections)
                ]

        if not USE_FAKE_DATA_PRODUCERS:
            if FLX_INPUT:
                connections = {}
                for idx in range(
                        MIN_LINK,
                        MIN_LINK + min(5, RU_CONFIG[RUIDX]["channel_count"])):
                    connections[f'output_{idx}'] = Connection(
                        f"datahandler_{idx}.raw_input",
                        queue_name=f'{FRONTEND_TYPE}_link_{idx}',
                        queue_kind="FollySPSCQueue",
                        queue_capacity=100000)

                modules += [
                    DAQModule(name='flxcard_0',
                              plugin='FelixCardReader',
                              connections=connections,
                              conf=flxcr.Conf(
                                  card_id=RU_CONFIG[RUIDX]["card_id"],
                                  logical_unit=0,
                                  dma_id=0,
                                  chunk_trailer_size=32,
                                  dma_block_size_kb=4,
                                  dma_memory_size_gb=4,
                                  numa_id=0,
                                  num_links=min(
                                      5, RU_CONFIG[RUIDX]["channel_count"])))
                ]

                if RU_CONFIG[RUIDX]["channel_count"] > 5:
                    connections = {}
                    for idx in range(MIN_LINK + 5, MAX_LINK):
                        connections[f'output_{idx}'] = Connection(
                            f"datahandler_{idx}.raw_input",
                            queue_name=f'{FRONTEND_TYPE}_link_{idx}',
                            queue_kind="FollySPSCQueue",
                            queue_capacity=100000)

                    modules += [
                        DAQModule(
                            name="flxcard_1",
                            plugin="FelixCardReader",
                            connections=connections,
                            conf=flxcr.Conf(
                                card_id=RU_CONFIG[RUIDX]["card_id"],
                                logical_unit=1,
                                dma_id=0,
                                chunk_trailer_size=32,
                                dma_block_size_kb=4,
                                dma_memory_size_gb=4,
                                numa_id=0,
                                num_links=max(
                                    0, RU_CONFIG[RUIDX]["channel_count"] - 5)))
                    ]

            elif SSP_INPUT:
                modules += [
                    DAQModule(name="ssp_0",
                              plugin="SSPCardReader",
                              connections={
                                  f'output_{idx}':
                                  Connection(
                                      f"datahandler_{idx}.raw_input",
                                      queue_name=f'{FRONTEND_TYPE}_link_{idx}',
                                      queue_kind="FollySPSCQueue",
                                      queue_capacity=100000)
                              },
                              conf=flxcr.Conf(
                                  card_id=RU_CONFIG[RUIDX]["card_id"],
                                  logical_unit=0,
                                  dma_id=0,
                                  chunk_trailer_size=32,
                                  dma_block_size_kb=4,
                                  dma_memory_size_gb=4,
                                  numa_id=0,
                                  num_links=RU_CONFIG[RUIDX]["channel_count"]))
                ]

            else:
                fake_source = "fake_source"
                card_reader = "FakeCardReader"
                conf = sec.Conf(
                    link_confs=[
                        sec.LinkConfiguration(
                            geoid=sec.GeoID(
                                system=SYSTEM_TYPE,
                                region=RU_CONFIG[RUIDX]["region_id"],
                                element=idx),
                            slowdown=DATA_RATE_SLOWDOWN_FACTOR,
                            queue_name=f"output_{idx}",
                            data_filename=DATA_FILE,
                            emu_frame_error_rate=0)
                        for idx in range(MIN_LINK, MAX_LINK)
                    ],
                    # input_limit=10485100, # default
                    queue_timeout_ms=QUEUE_POP_WAIT_MS)

                if FRONTEND_TYPE == 'pacman':
                    fake_source = "pacman_source"
                    card_reader = "PacmanCardReader"
                    conf = pcr.Conf(link_confs=[
                        pcr.LinkConfiguration(geoid=pcr.GeoID(
                            system=SYSTEM_TYPE,
                            region=RU_CONFIG[RUIDX]["region_id"],
                            element=idx)) for idx in range(MIN_LINK, MAX_LINK)
                    ],
                                    zmq_receiver_timeout=10000)
                modules += [
                    DAQModule(name=fake_source,
                              plugin=card_reader,
                              connections={
                                  f'output_{idx}': Connection(
                                      f"datahandler_{idx}.raw_input",
                                      queue_name=f'{FRONTEND_TYPE}_link_{idx}',
                                      queue_kind="FollySPSCQueue",
                                      queue_capacity=100000)
                                  for idx in range(MIN_LINK, MAX_LINK)
                              },
                              conf=conf)
                ]

        modules += [
            DAQModule(name="fragment_sender",
                      plugin="FragmentSender",
                      conf=None)
        ]

        mgraph = ModuleGraph(modules)

        for idx in range(MIN_LINK, MAX_LINK):
            # TODO: Should we just have one timesync outgoing endpoint?
            mgraph.add_endpoint(f"timesync_{idx}",
                                f"datahandler_{idx}.timesync", Direction.OUT)
            if SOFTWARE_TPG_ENABLED:
                mgraph.add_endpoint(f"tpsets_{idx}",
                                    f"datahandler_{idx}.tpset_out",
                                    Direction.OUT)
                mgraph.add_endpoint(
                    f"timesync_{idx+RU_CONFIG[RUIDX]['channel_count']}",
                    f"tp_datahandler_{idx}.timesync", Direction.OUT)

            # Add fragment producers for raw data
            mgraph.add_fragment_producer(
                region=RU_CONFIG[RUIDX]["region_id"],
                element=idx,
                system=SYSTEM_TYPE,
                requests_in=f"datahandler_{idx}.data_requests_0",
                fragments_out=f"datahandler_{idx}.data_response_0")

            # Add fragment producers for TPC TPs. Make sure the element index doesn't overlap with the ones for raw data
            if SOFTWARE_TPG_ENABLED:
                mgraph.add_fragment_producer(
                    region=RU_CONFIG[RUIDX]["region_id"],
                    element=idx + RU_CONFIG[RUIDX]["channel_count"],
                    system=SYSTEM_TYPE,
                    requests_in=f"tp_datahandler_{idx}.data_requests_0",
                    fragments_out=f"tp_datahandler_{idx}.data_response_0")

        super().__init__(mgraph, host=HOST)
        self.export("readout_app.dot")
    def __init__(self, INPUT_FILES: [str], SLOWDOWN_FACTOR: float,
                 CHANNEL_MAP_NAME: str, KEEP_COLLECTION: bool,
                 KEEP_INDUCTION: bool):

        clock_frequency_hz = 50_000_000 / SLOWDOWN_FACTOR
        modules = []

        n_streams = len(INPUT_FILES)

        tp_streams = [
            tpm.TPStream(filename=input_file,
                         region_id=0,
                         element_id=istream,
                         output_sink_name=f"output{istream}")
            for istream, input_file in enumerate(INPUT_FILES)
        ]

        tpm_connections = {
            f"output{istream}":
            Connection(f"chan_filter{istream}.tpset_source")
            for istream in range(n_streams)
        }
        modules.append(
            DAQModule(
                name="tpm",
                plugin="TriggerPrimitiveMaker",
                conf=tpm.ConfParams(
                    tp_streams=tp_streams,
                    number_of_loops=-1,  # Infinite
                    tpset_time_offset=0,
                    tpset_time_width=10000,
                    clock_frequency_hz=clock_frequency_hz,
                    maximum_wait_time_us=1000,
                ),
                connections=tpm_connections))

        for istream in range(n_streams):
            modules.append(
                DAQModule(name=f"chan_filter{istream}",
                          plugin="TPChannelFilter",
                          conf=chan_filter.Conf(
                              channel_map_name=CHANNEL_MAP_NAME,
                              keep_collection=KEEP_COLLECTION,
                              keep_induction=KEEP_INDUCTION),
                          connections={
                              "tpset_sink":
                              Connection(f"ftpchm{istream}.tpset_source")
                          }))

            modules.append(
                DAQModule(name=f"ftpchm{istream}",
                          plugin="FakeTPCreatorHeartbeatMaker",
                          conf=ftpchm.Conf(heartbeat_interval=500_000),
                          connections={"tpset_sink": Connection("zip.input")}))

        modules.append(
            DAQModule(
                name="zip",
                plugin="TPZipper",
                conf=tzip.ConfParams(
                    cardinality=n_streams,
                    max_latency_ms=10,
                    region_id=0,
                    element_id=0,
                ),
                connections={"output": Connection("tps_sink.tpset_source")}))

        modules.append(DAQModule(name="tps_sink", plugin="TPSetSink"))

        mgraph = ModuleGraph(modules)
        super().__init__(modulegraph=mgraph,
                         host="localhost",
                         name='FakeTPToSinkApp')
Exemple #7
0
    def __init__(self,
                 # NW_SPECS: list,
                 
                 SOFTWARE_TPG_ENABLED: bool = False,
                 RU_CONFIG: list = [],

                 ACTIVITY_PLUGIN: str = 'TriggerActivityMakerPrescalePlugin',
                 ACTIVITY_CONFIG: dict = dict(prescale=10000),

                 CANDIDATE_PLUGIN: str = 'TriggerCandidateMakerPrescalePlugin',
                 CANDIDATE_CONFIG: int = dict(prescale=10),

                 TOKEN_COUNT: int = 10,
                 SYSTEM_TYPE = 'wib',
                 TTCM_S1: int = 1,
                 TTCM_S2: int = 2,
                 TRIGGER_WINDOW_BEFORE_TICKS: int = 1000,
                 TRIGGER_WINDOW_AFTER_TICKS: int = 1000,
                 PARTITION="UNKNOWN",
                 HOST="localhost"
                 ):
        """
        { item_description }
        """
        
        # Generate schema for the maker plugins on the fly in the temptypes module
        make_moo_record(ACTIVITY_CONFIG , 'ActivityConf' , 'temptypes')
        make_moo_record(CANDIDATE_CONFIG, 'CandidateConf', 'temptypes')
        import temptypes

        modules = []
    
        if SOFTWARE_TPG_ENABLED:
            connections_request_receiver = {}
            connections_tpset_receiver = {}
            for ru in range(len(RU_CONFIG)):
                for idy in range(RU_CONFIG[ru]["channel_count"]):
                    connections_request_receiver[f'output_{ru}_{idy}'] = Connection(f'buf{ru}_{idy}.data_request_q{ru}_{idy}')
                    connections_tpset_receiver  [f'output_{ru}_{idy}'] = Connection(f'buf{ru}_{idy}.tpset_q_for_buf{ru}_{idy}')

            config_request_receiver = rrcv.ConfParams(map = [rrcv.geoidinst(region=RU_CONFIG[ru]["region_id"],
                                                                            element=idy+RU_CONFIG[ru]["start_channel"],
                                                                            system="DataSelection",
                                                                            queueinstance=f"data_request_q{ru}_{idy}")
                                                             for ru in range(len(RU_CONFIG)) for idy in range(RU_CONFIG[ru]["channel_count"])],
                                                      general_queue_timeout = 100,
                                                      connection_name = f"{PARTITION}.ds_tp_datareq_0")
            
            config_tpset_receiver = tpsrcv.ConfParams(map = [tpsrcv.geoidinst(region=RU_CONFIG[ru]["region_id"],
                                                                              element=idy+RU_CONFIG[ru]["start_channel"],
                                                                              system=SYSTEM_TYPE,
                                                                              queueinstance=f"tpset_q_for_buf{ru}_{idy}")
                                                             for ru in range(len(RU_CONFIG)) for idy in range(RU_CONFIG[ru]["channel_count"])],
                                                      general_queue_timeout = 100,
                                                      topic = f"TPSets")
    
            config_qton_fragment = qton.Conf(msg_type="std::unique_ptr<dunedaq::daqdataformats::Fragment>",
                                             msg_module_name="FragmentNQ",
                                             sender_config=nos.Conf(name=f"{PARTITION}.frags_tpset_ds_0",stype="msgpack"))

            config_tcm =  tcm.Conf(candidate_maker=CANDIDATE_PLUGIN,
                                   candidate_maker_config=temptypes.CandidateConf(**CANDIDATE_CONFIG))
            
            modules += [DAQModule(name = 'request_receiver',
                               plugin = 'RequestReceiver',
                               connections = connections_request_receiver,
                               conf = config_request_receiver),
                        
                        DAQModule(name = 'tpset_receiver',
                               plugin = 'TPSetReceiver',
                               connections = connections_tpset_receiver,
                               conf = config_tpset_receiver),
                        
                        DAQModule(name = 'qton_fragments',
                               plugin = 'QueueToNetwork',
                               connections = {}, # all the incoming links in TPSetBufferCreators
                               conf = config_qton_fragment),
                        
                        DAQModule(name = 'tcm',
                               plugin = 'TriggerCandidateMaker',
                               connections = {#'input' : Connection(f'tcm.taset_q'),
                                   'output': Connection(f'mlt.trigger_candidate_q')},
                               conf = config_tcm)]
            
            for ru in range(len(RU_CONFIG)):
                
                modules += [DAQModule(name = f'tpset_subscriber_{ru}',
                                   plugin = 'NetworkToQueue',
                                   connections = {'output': Connection(f'zip_{ru}.tpsets_from_netq_{ru}')},
                                   conf = ntoq.Conf(msg_type="dunedaq::trigger::TPSet",
                                                    msg_module_name="TPSetNQ",
                                                    receiver_config=nor.Conf(name=f'{PARTITION}.tpsets_{ru}',
                                                                             subscriptions=["TPSets"]))),
                            
                            DAQModule(name = f'zip_{ru}',
                                   plugin = 'TPZipper',
                                   connections = {# 'input' are App.network_endpoints, from RU
                                       'output': Connection(f'tam_{ru}.input')},
                                   conf = tzip.ConfParams(cardinality=RU_CONFIG[ru]['channel_count'],
                                                          max_latency_ms=1000,
                                                          region_id=0,
                                                          element_id=0)),
                            
                            DAQModule(name = f'tam_{ru}',
                                   plugin = 'TriggerActivityMaker',
                                   connections = {'output': Connection('tcm.taset_q')},
                                   conf = tam.Conf(activity_maker=ACTIVITY_PLUGIN,
                                                   geoid_region=0,  # Fake placeholder
                                                   geoid_element=0,  # Fake placeholder
                                                   window_time=10000,  # should match whatever makes TPSets, in principle
                                                   buffer_time=625000,  # 10ms in 62.5 MHz ticks
                                                   activity_maker_config=temptypes.ActivityConf(**ACTIVITY_CONFIG)))]

                for idy in range(RU_CONFIG[ru]["channel_count"]):
                    modules += [DAQModule(name = f'buf{ru}_{idy}',
                                       plugin = 'TPSetBufferCreator',
                                       connections = {#'tpset_source': Connection(f"tpset_q_for_buf{ru}_{idy}"),#already in request_receiver
                                                      #'data_request_source': Connection(f"data_request_q{ru}_{idy}"), #ditto
                                                      'fragment_sink': Connection('qton_fragments.fragment_q')},
                                       conf = buf.Conf(tpset_buffer_size=10000, region=RU_CONFIG[ru]["region_id"], element=idy + RU_CONFIG[ru]["start_channel"]))]

        modules += [DAQModule(name = 'ttcm',
                           plugin = 'TimingTriggerCandidateMaker',
                           connections={"output": Connection("mlt.trigger_candidate_q")},
                           conf=ttcm.Conf(s1=ttcm.map_t(signal_type=TTCM_S1,
                                                        time_before=TRIGGER_WINDOW_BEFORE_TICKS,
                                                        time_after=TRIGGER_WINDOW_AFTER_TICKS),
                                          s2=ttcm.map_t(signal_type=TTCM_S2,
                                                        time_before=TRIGGER_WINDOW_BEFORE_TICKS,
                                                        time_after=TRIGGER_WINDOW_AFTER_TICKS),
                                          hsievent_connection_name = PARTITION+".hsievent"))]
                    
        # We need to populate the list of links based on the fragment
        # producers available in the system. This is a bit of a
        # chicken-and-egg problem, because the trigger app itself creates
        # fragment producers (see below). Eventually when the MLT is its
        # own process, this problem will probably go away, but for now, we
        # leave the list of links here blank, and replace it in
        # util.connect_fragment_producers
        modules += [DAQModule(name = 'mlt',
                              plugin = 'ModuleLevelTrigger',
                              conf=mlt.ConfParams(links=[]))] # To be updated later - see comment above
        
        mgraph = ModuleGraph(modules)
        mgraph.add_endpoint("hsievents",  "ttcm.input", Direction.IN)
        if SOFTWARE_TPG_ENABLED:
            for idx in range(len(RU_CONFIG)):
                mgraph.add_endpoint(f"tpsets_into_chain_link{idx}", f"tpset_receiver.input", Direction.IN)
                mgraph.add_endpoint(f"tpsets_into_buffer_link{idx}", f"tpset_subscriber_{idx}.tpset_source", Direction.IN)

                mgraph.add_fragment_producer(region=0, element=idx, system="DataSelection",
                                             requests_in=f"request_receiver.data_request_source",
                                             fragments_out=f"qton_fragments.fragment_sink")


        mgraph.add_endpoint("trigger_decisions", "mlt.trigger_decision_sink", Direction.OUT)
        mgraph.add_endpoint("tokens", "mlt.token_source", Direction.IN)

        super().__init__(modulegraph=mgraph, host=HOST, name='TriggerApp')
        self.export("trigger_app.dot")