Exemple #1
0
    def __init__(
            self,
            # NW_SPECS,
            FRAGMENT_PRODUCERS,
            RU_CONFIG=[],
            RUN_NUMBER=333,
            OUTPUT_PATH=".",
            TOKEN_COUNT=0,
            SYSTEM_TYPE="TPC",
            SOFTWARE_TPG_ENABLED=False,
            TPSET_WRITING_ENABLED=False,
            PARTITION="UNKNOWN",
            OPERATIONAL_ENVIRONMENT="swtest",
            TPC_REGION_NAME_PREFIX="APA",
            HOST="localhost",
            MAX_FILE_SIZE=4 * 1024 * 1024 * 1024):
        """Generate the json configuration for the readout and DF process"""

        required_eps = {PARTITION + '.trigdec', PARTITION + '.triginh'}
        # if not required_eps.issubset([nw.name for nw in NW_SPECS]):
        #     raise RuntimeError(f"ERROR: not all the required endpoints ({', '.join(required_eps)}) found in list of endpoints {' '.join([nw.name for nw in NW_SPECS])}")

        modules = []
        total_link_count = 0
        for ru in range(len(RU_CONFIG)):
            total_link_count += RU_CONFIG[ru]["channel_count"]

        modules += [
            DAQModule(
                name='trigdec_receiver',
                plugin='TriggerDecisionReceiver',
                connections={'output': Connection('trb.trigger_decision_q')},
                conf=tdrcv.ConfParams(general_queue_timeout=QUEUE_POP_WAIT_MS,
                                      connection_name=PARTITION + ".trigdec")),
            DAQModule(
                name='fragment_receiver',
                plugin='FragmentReceiver',
                connections={'output': Connection('trb.data_fragments_q')},
                conf=frcv.ConfParams(general_queue_timeout=QUEUE_POP_WAIT_MS,
                                     connection_name=PARTITION + ".frags_0")),
            DAQModule(
                name='trb',
                plugin='TriggerRecordBuilder',
                connections={
                    'trigger_record_output_queue':
                    Connection('datawriter.trigger_record_q')
                },
                conf=trb.ConfParams(
                    general_queue_timeout=QUEUE_POP_WAIT_MS,
                    reply_connection_name=PARTITION + ".frags_0",
                    map=trb.mapgeoidconnections([
                        trb.geoidinst(
                            region=RU_CONFIG[ru]["region_id"],
                            element=idx + RU_CONFIG[ru]["start_channel"],
                            system=SYSTEM_TYPE,
                            connection_name=f"{PARTITION}.datareq_{ru}")
                        for ru in range(len(RU_CONFIG))
                        for idx in range(RU_CONFIG[ru]["channel_count"])
                    ] + ([
                        trb.geoidinst(
                            region=RU_CONFIG[ru]["region_id"],
                            element=idx +
                            RU_CONFIG[ru]["start_channel"] + total_link_count,
                            system=SYSTEM_TYPE,
                            connection_name=f"{PARTITION}.datareq_{ru}")
                        for ru in range(len(RU_CONFIG))
                        for idx in range(RU_CONFIG[ru]["channel_count"])
                    ] if SOFTWARE_TPG_ENABLED else []) + ([
                        trb.geoidinst(
                            region=RU_CONFIG[ru]["region_id"],
                            element=idx +
                            RU_CONFIG[ru]["start_channel"],
                            system="DataSelection",
                            connection_name=f"{PARTITION}.ds_tp_datareq_0")
                        for ru in range(len(RU_CONFIG))
                        for idx in range(RU_CONFIG[ru]["channel_count"])
                    ] if SOFTWARE_TPG_ENABLED else [])))),
            DAQModule(
                name='datawriter',
                plugin='DataWriter',
                connections=
                {},  # {'trigger_record_input_queue': Connection('datawriter.trigger_record_q')},
                conf=dw.ConfParams(
                    initial_token_count=TOKEN_COUNT,
                    token_connection=PARTITION + ".triginh",
                    data_store_parameters=hdf5ds.ConfParams(
                        name="data_store",
                        version=3,
                        operational_environment=OPERATIONAL_ENVIRONMENT,
                        directory_path=OUTPUT_PATH,
                        max_file_size_bytes=MAX_FILE_SIZE,
                        disable_unique_filename_suffix=False,
                        filename_parameters=hdf5ds.FileNameParams(
                            overall_prefix=OPERATIONAL_ENVIRONMENT,
                            digits_for_run_number=6,
                            file_index_prefix="",
                            digits_for_file_index=4),
                        file_layout_parameters=hdf5ds.FileLayoutParams(
                            trigger_record_name_prefix="TriggerRecord",
                            digits_for_trigger_number=5,
                            path_param_list=hdf5ds.PathParamList([
                                hdf5ds.PathParams(
                                    detector_group_type="TPC",
                                    detector_group_name="TPC",
                                    region_name_prefix=TPC_REGION_NAME_PREFIX,
                                    element_name_prefix="Link"),
                                hdf5ds.PathParams(detector_group_type="PDS",
                                                  detector_group_name="PDS"),
                                hdf5ds.PathParams(
                                    detector_group_type="NDLArTPC",
                                    detector_group_name="NDLArTPC"),
                                hdf5ds.PathParams(
                                    detector_group_type="Trigger",
                                    detector_group_name="Trigger"),
                                hdf5ds.PathParams(detector_group_type="TPC_TP",
                                                  detector_group_name="TPC",
                                                  region_name_prefix="TP_APA",
                                                  element_name_prefix="Link")
                            ])))))
        ]

        if TPSET_WRITING_ENABLED:
            for idx in range(len(RU_CONFIG)):
                modules += [
                    DAQModule(name=f'tpset_subscriber_{idx}',
                              plugin="NetworkToQueue",
                              connections={
                                  'output':
                                  Connection(f"tpswriter.tpsets_from_netq")
                              },
                              conf=nor.Conf(name=f'{PARTITION}.tpsets_{idx}',
                                            subscriptions=["TPSets"]))
                ]

            modules += [
                DAQModule(name='tpswriter',
                          plugin="TPSetWriter",
                          connections={
                              'tpset_source': Connection("tpsets_from_netq")
                          },
                          conf=tpsw.ConfParams(max_file_size_bytes=1000000000))
            ]

        if SOFTWARE_TPG_ENABLED:
            modules += [
                DAQModule(
                    name='tp_fragment_receiver',
                    plugin="FragmentReceiver",
                    connections={'output': Connection("trb.data_fragments_q")},
                    conf=frcv.ConfParams(
                        general_queue_timeout=QUEUE_POP_WAIT_MS,
                        connection_name=PARTITION + ".tp_frags_0")),
                DAQModule(
                    name='ds_tpset_fragment_receiver',
                    plugin="FragmentReceiver",
                    connections={"output": Connection("trb.data_fragments_q")},
                    conf=frcv.ConfParams(
                        general_queue_timeout=QUEUE_POP_WAIT_MS,
                        connection_name=PARTITION + ".frags_tpset_ds_0"))
            ]

        mgraph = ModuleGraph(modules)
        # PAR 2021-12-10 All of the dataflow app's sending and
        # receiving is done via NetworkManager, so there are no
        # endpoints for the moment

        # mgraph.add_endpoint("fragments",         "trb.data_fragment_input_queue",    Direction.IN)
        # mgraph.add_endpoint("trigger_decisions", "trb.trigger_decision_input_queue", Direction.IN)
        # mgraph.add_endpoint("tokens",            "datawriter.token_output_queue",    Direction.OUT)

        # for i, producer in enumerate(FRAGMENT_PRODUCERS):
        #     queue_name=f"data_request_{i}_output_queue"
        #     mgraph.add_endpoint(data_request_endpoint_name(producer), f"trb.{queue_name}", Direction.OUT)

        super().__init__(modulegraph=mgraph, host=HOST)
        self.export("dataflow_app.dot")
Exemple #2
0
def generate(NETWORK_ENDPOINTS,
             NUMBER_OF_DATA_PRODUCERS=2,
             EMULATOR_MODE=False,
             DATA_RATE_SLOWDOWN_FACTOR=1,
             RUN_NUMBER=333,
             DATA_FILE="./frames.bin",
             OUTPUT_PATH=".",
             DISABLE_OUTPUT=False,
             FLX_INPUT=True,
             TOKEN_COUNT=0,
             CLOCK_SPEED_HZ=50000000):
    """Generate the json configuration for the readout and DF process"""

    cmd_data = {}

    required_eps = {'trigdec', 'triginh', 'timesync'}
    if not required_eps.issubset(NETWORK_ENDPOINTS):
        raise RuntimeError(
            f"ERROR: not all the required endpoints ({', '.join(required_eps)}) found in list of endpoints {' '.join(NETWORK_ENDPOINTS.keys())}"
        )

    LATENCY_BUFFER_SIZE = 3 * CLOCK_SPEED_HZ / (25 * 12 *
                                                DATA_RATE_SLOWDOWN_FACTOR)
    RATE_KHZ = CLOCK_SPEED_HZ / (25 * 12 * DATA_RATE_SLOWDOWN_FACTOR * 1000)

    # Define modules and queues
    queue_bare_specs = [
        app.QueueSpec(inst="time_sync_q", kind='FollyMPMCQueue', capacity=100),
        app.QueueSpec(inst="token_q", kind='FollySPSCQueue', capacity=100),
        app.QueueSpec(
            inst="trigger_decision_q", kind='FollySPSCQueue', capacity=100),
        app.QueueSpec(inst="trigger_decision_from_netq",
                      kind='FollySPSCQueue',
                      capacity=100),
        app.QueueSpec(inst="trigger_decision_copy_for_bookkeeping",
                      kind='FollySPSCQueue',
                      capacity=100),
        app.QueueSpec(
            inst="trigger_record_q", kind='FollySPSCQueue', capacity=100),
        app.QueueSpec(
            inst="data_fragments_q", kind='FollyMPMCQueue', capacity=1000),
    ] + [
        app.QueueSpec(
            inst=f"data_requests_{idx}", kind='FollySPSCQueue', capacity=100)
        for idx in range(NUMBER_OF_DATA_PRODUCERS)
    ] + [
        app.QueueSpec(
            inst=f"wib_link_{idx}", kind='FollySPSCQueue', capacity=100000)
        for idx in range(NUMBER_OF_DATA_PRODUCERS)
    ]

    # Only needed to reproduce the same order as when using jsonnet
    queue_specs = app.QueueSpecs(sorted(queue_bare_specs,
                                        key=lambda x: x.inst))

    mod_specs = [
        mspec("ntoq_trigdec", "NetworkToQueue", [
            app.QueueInfo(
                name="output", inst="trigger_decision_from_netq", dir="output")
        ]),
        mspec("qton_token", "QueueToNetwork",
              [app.QueueInfo(name="input", inst="token_q", dir="input")]),
        mspec("qton_timesync", "QueueToNetwork",
              [app.QueueInfo(name="input", inst="time_sync_q", dir="input")]),
        mspec("rqg", "RequestGenerator", [
            app.QueueInfo(name="trigger_decision_input_queue",
                          inst="trigger_decision_from_netq",
                          dir="input"),
            app.QueueInfo(name="trigger_decision_for_event_building",
                          inst="trigger_decision_copy_for_bookkeeping",
                          dir="output"),
        ] + [
            app.QueueInfo(name=f"data_request_{idx}_output_queue",
                          inst=f"data_requests_{idx}",
                          dir="output")
            for idx in range(NUMBER_OF_DATA_PRODUCERS)
        ]),
        mspec("ffr", "FragmentReceiver", [
            app.QueueInfo(name="trigger_decision_input_queue",
                          inst="trigger_decision_copy_for_bookkeeping",
                          dir="input"),
            app.QueueInfo(name="trigger_record_output_queue",
                          inst="trigger_record_q",
                          dir="output"),
            app.QueueInfo(name="data_fragment_input_queue",
                          inst="data_fragments_q",
                          dir="input"),
        ]),
        mspec("datawriter", "DataWriter", [
            app.QueueInfo(name="trigger_record_input_queue",
                          inst="trigger_record_q",
                          dir="input"),
            app.QueueInfo(
                name="token_output_queue", inst="token_q", dir="output"),
        ]),
    ] + [
        mspec(f"datahandler_{idx}", "DataLinkHandler", [
            app.QueueInfo(
                name="raw_input", inst=f"wib_link_{idx}", dir="input"),
            app.QueueInfo(name="timesync", inst="time_sync_q", dir="output"),
            app.QueueInfo(
                name="requests", inst=f"data_requests_{idx}", dir="input"),
            app.QueueInfo(
                name="fragments", inst="data_fragments_q", dir="output"),
        ]) for idx in range(NUMBER_OF_DATA_PRODUCERS)
    ]

    if FLX_INPUT:
        mod_specs.append(
            mspec("flxcard_0", "FelixCardReader", [
                app.QueueInfo(
                    name=f"output_{idx}", inst=f"wib_link_{idx}", dir="output")
                for idx in range(0, min(5, NUMBER_OF_DATA_PRODUCERS))
            ]))
        if NUMBER_OF_DATA_PRODUCERS > 5:
            mod_specs.append(
                mspec("flxcard_1", "FelixCardReader", [
                    app.QueueInfo(name=f"output_{idx}",
                                  inst=f"wib_link_{idx}",
                                  dir="output")
                    for idx in range(5, NUMBER_OF_DATA_PRODUCERS)
                ]))
    else:
        mod_specs.append(
            mspec("fake_source", "FakeCardReader", [
                app.QueueInfo(
                    name=f"output_{idx}", inst=f"wib_link_{idx}", dir="output")
                for idx in range(NUMBER_OF_DATA_PRODUCERS)
            ]))

    cmd_data['init'] = app.Init(queues=queue_specs, modules=mod_specs)

    cmd_data['conf'] = acmd([
        ("ntoq_trigdec",
         ntoq.Conf(msg_type="dunedaq::dfmessages::TriggerDecision",
                   msg_module_name="TriggerDecisionNQ",
                   receiver_config=nor.Conf(
                       ipm_plugin_type="ZmqReceiver",
                       address=NETWORK_ENDPOINTS["trigdec"]))),
        ("qton_token",
         qton.Conf(msg_type="dunedaq::dfmessages::TriggerDecisionToken",
                   msg_module_name="TriggerDecisionTokenNQ",
                   sender_config=nos.Conf(ipm_plugin_type="ZmqSender",
                                          address=NETWORK_ENDPOINTS["triginh"],
                                          stype="msgpack"))),
        ("qton_timesync",
         qton.Conf(msg_type="dunedaq::dfmessages::TimeSync",
                   msg_module_name="TimeSyncNQ",
                   sender_config=nos.Conf(
                       ipm_plugin_type="ZmqSender",
                       address=NETWORK_ENDPOINTS["timesync"],
                       stype="msgpack"))),
        ("rqg",
         rqg.ConfParams(map=rqg.mapgeoidqueue([
             rqg.geoidinst(
                 apa=0, link=idx, queueinstance=f"data_requests_{idx}")
             for idx in range(NUMBER_OF_DATA_PRODUCERS)
         ]))),
        ("ffr", ffr.ConfParams(general_queue_timeout=QUEUE_POP_WAIT_MS)),
        (
            "datawriter",
            dw.ConfParams(
                initial_token_count=TOKEN_COUNT,
                data_store_parameters=hdf5ds.ConfParams(
                    name="data_store",
                    # type = "HDF5DataStore", # default
                    directory_path=OUTPUT_PATH,  # default
                    # mode = "all-per-file", # default
                    max_file_size_bytes=1073741824,
                    disable_unique_filename_suffix=False,
                    filename_parameters=hdf5ds.HDF5DataStoreFileNameParams(
                        overall_prefix="swtest",
                        digits_for_run_number=6,
                        file_index_prefix="",
                        digits_for_file_index=4,
                    ),
                    file_layout_parameters=hdf5ds.
                    HDF5DataStoreFileLayoutParams(
                        trigger_record_name_prefix="TriggerRecord",
                        digits_for_trigger_number=5,
                        digits_for_apa_number=3,
                        digits_for_link_number=2,
                    )))),
        (
            "fake_source",
            fakecr.Conf(
                link_ids=list(range(NUMBER_OF_DATA_PRODUCERS)),
                # input_limit=10485100, # default
                rate_khz=RATE_KHZ,
                raw_type="wib",
                data_filename=DATA_FILE,
                queue_timeout_ms=QUEUE_POP_WAIT_MS)),
        ("flxcard_0",
         flxcr.Conf(card_id=0,
                    logical_unit=0,
                    dma_id=0,
                    chunk_trailer_size=32,
                    dma_block_size_kb=4,
                    dma_memory_size_gb=4,
                    numa_id=0,
                    num_links=min(5, NUMBER_OF_DATA_PRODUCERS))),
        ("flxcard_1",
         flxcr.Conf(card_id=0,
                    logical_unit=1,
                    dma_id=0,
                    chunk_trailer_size=32,
                    dma_block_size_kb=4,
                    dma_memory_size_gb=4,
                    numa_id=0,
                    num_links=max(0, NUMBER_OF_DATA_PRODUCERS - 5))),
    ] + [
        (
            f"datahandler_{idx}",
            dlh.Conf(
                raw_type="wib",
                emulator_mode=EMULATOR_MODE,
                # fake_trigger_flag=0, # default
                source_queue_timeout_ms=QUEUE_POP_WAIT_MS,
                latency_buffer_size=LATENCY_BUFFER_SIZE,
                pop_limit_pct=0.8,
                pop_size_pct=0.1,
                apa_number=0,
                link_number=idx)) for idx in range(NUMBER_OF_DATA_PRODUCERS)
    ])

    startpars = rccmd.StartParams(run=RUN_NUMBER,
                                  disable_data_storage=DISABLE_OUTPUT)
    cmd_data['start'] = acmd([
        ("qton_token", startpars),
        ("datawriter", startpars),
        ("ffr", startpars),
        ("qton_timesync", startpars),
        ("datahandler_.*", startpars),
        ("fake_source", startpars),
        ("flxcard.*", startpars),
        ("rqg", startpars),
        ("ntoq_trigdec", startpars),
    ])

    cmd_data['stop'] = acmd([
        ("ntoq_trigdec", None),
        ("rqg", None),
        ("flxcard.*", None),
        ("fake_source", None),
        ("datahandler_.*", None),
        ("qton_timesync", None),
        ("ffr", None),
        ("datawriter", None),
        ("qton_token", None),
    ])

    cmd_data['pause'] = acmd([("", None)])

    cmd_data['resume'] = acmd([("", None)])

    cmd_data['scrap'] = acmd([("", None)])

    return cmd_data
def generate_df(
        network_endpoints,
        NUMBER_OF_DATA_PRODUCERS=2,
        EMULATOR_MODE=False,
        DATA_RATE_SLOWDOWN_FACTOR = 1,
        RUN_NUMBER = 333, 
        TRIGGER_RATE_HZ = 1.0,
        DATA_FILE="./frames.bin",
        OUTPUT_PATH=".",
        DISABLE_OUTPUT=False,
        FLX_INPUT=True,
        TOKEN_COUNT=0
    ):
    """Generate the json configuration for the readout and DF process"""
   
    trg_interval_ticks = math.floor((1/TRIGGER_RATE_HZ) * CLOCK_SPEED_HZ/DATA_RATE_SLOWDOWN_FACTOR)

    # Define modules and queues
    queue_bare_specs = [
            app.QueueSpec(inst="time_sync_q", kind='FollyMPMCQueue', capacity=100),
            app.QueueSpec(inst="token_q", kind='FollySPSCQueue', capacity=100),
            app.QueueSpec(inst="trigger_decision_q", kind='FollySPSCQueue', capacity=100),
            app.QueueSpec(inst="trigger_decision_from_netq", kind='FollySPSCQueue', capacity=100),
            app.QueueSpec(inst="trigger_decision_copy_for_bookkeeping", kind='FollySPSCQueue', capacity=100),
            app.QueueSpec(inst="trigger_record_q", kind='FollySPSCQueue', capacity=100),
            app.QueueSpec(inst="data_fragments_q", kind='FollyMPMCQueue', capacity=1000),
        ] + [
            app.QueueSpec(inst=f"data_requests_{idx}", kind='FollySPSCQueue', capacity=100)
                for idx in range(NUMBER_OF_DATA_PRODUCERS)
        ] + [

            app.QueueSpec(inst=f"wib_link_{idx}", kind='FollySPSCQueue', capacity=100000)
                for idx in range(NUMBER_OF_DATA_PRODUCERS)
        ]
    

    # Only needed to reproduce the same order as when using jsonnet
    queue_specs = app.QueueSpecs(sorted(queue_bare_specs, key=lambda x: x.inst))


    mod_specs = [
        mspec("ntoq_trigdec", "NetworkToQueue", [
                        app.QueueInfo(name="output", inst="trigger_decision_from_netq", dir="output")
                    ]),

        mspec("qton_token", "QueueToNetwork", [
                        app.QueueInfo(name="input", inst="token_q", dir="input")
                    ]),

        mspec("qton_timesync", "QueueToNetwork", [
                        app.QueueInfo(name="input", inst="time_sync_q", dir="input")
                    ]),

        mspec("rqg", "RequestGenerator", [
                        app.QueueInfo(name="trigger_decision_input_queue", inst="trigger_decision_from_netq", dir="input"),
                        app.QueueInfo(name="trigger_decision_for_event_building", inst="trigger_decision_copy_for_bookkeeping", dir="output"),
                    ] + [
                        app.QueueInfo(name=f"data_request_{idx}_output_queue", inst=f"data_requests_{idx}", dir="output")
                            for idx in range(NUMBER_OF_DATA_PRODUCERS)
                    ]),

        mspec("ffr", "FragmentReceiver", [
                        app.QueueInfo(name="trigger_decision_input_queue", inst="trigger_decision_copy_for_bookkeeping", dir="input"),
                        app.QueueInfo(name="trigger_record_output_queue", inst="trigger_record_q", dir="output"),
                        app.QueueInfo(name="data_fragment_input_queue", inst="data_fragments_q", dir="input"),
                    ]),

        mspec("datawriter", "DataWriter", [
                        app.QueueInfo(name="trigger_record_input_queue", inst="trigger_record_q", dir="input"),
                        app.QueueInfo(name="token_output_queue", inst="token_q", dir="output"),
                    ]),

        ] + [
                mspec(f"datahandler_{idx}", "DataLinkHandler", [

                            app.QueueInfo(name="raw_input", inst=f"wib_link_{idx}", dir="input"),
                            app.QueueInfo(name="timesync", inst="time_sync_q", dir="output"),
                            app.QueueInfo(name="requests", inst=f"data_requests_{idx}", dir="input"),
                            app.QueueInfo(name="fragments", inst="data_fragments_q", dir="output"),
                            ]) for idx in range(NUMBER_OF_DATA_PRODUCERS)
        ]

    if FLX_INPUT:
        mod_specs.append(mspec("flxcard_0", "FelixCardReader", [
                        app.QueueInfo(name=f"output_{idx}", inst=f"wib_link_{idx}", dir="output")
                            for idx in range(0,min(5, NUMBER_OF_DATA_PRODUCERS))
                        ]))
        if NUMBER_OF_DATA_PRODUCERS>5 :
            mod_specs.append(mspec("flxcard_1", "FelixCardReader", [
                            app.QueueInfo(name=f"output_{idx}", inst=f"wib_link_{idx}", dir="output")
                                for idx in range(5, NUMBER_OF_DATA_PRODUCERS)
                            ]))
    else:
        mod_specs.append(mspec("fake_source", "FakeCardReader", [
                        app.QueueInfo(name=f"output_{idx}", inst=f"wib_link_{idx}", dir="output")
                            for idx in range(NUMBER_OF_DATA_PRODUCERS)
                        ]))

    


    init_specs = app.Init(queues=queue_specs, modules=mod_specs)

    initcmd = rccmd.RCCommand(
        id=basecmd.CmdId("init"),
        entry_state="NONE",
        exit_state="INITIAL",
        data=init_specs
    )

    confcmd = mrccmd("conf", "INITIAL", "CONFIGURED",[
                ("ntoq_trigdec", ntoq.Conf(msg_type="dunedaq::dfmessages::TriggerDecision",
                                           msg_module_name="TriggerDecisionNQ",
                                           receiver_config=nor.Conf(ipm_plugin_type="ZmqReceiver",
                                                                    address=network_endpoints["trigdec"])
                                           )
                 ),

                ("qton_token", qton.Conf(msg_type="dunedaq::dfmessages::TriggerDecisionToken",
                                           msg_module_name="TriggerDecisionTokenNQ",
                                           sender_config=nos.Conf(ipm_plugin_type="ZmqSender",
                                                                  address=network_endpoints["triginh"],
                                                                  stype="msgpack")
                                           )
                 ),

                ("qton_timesync", qton.Conf(msg_type="dunedaq::dfmessages::TimeSync",
                                            msg_module_name="TimeSyncNQ",
                                            sender_config=nos.Conf(ipm_plugin_type="ZmqSender",
                                                                   address=network_endpoints["timesync"],
                                                                   stype="msgpack")
                                           )
                ),
        
                ("rqg", rqg.ConfParams(
                        map=rqg.mapgeoidqueue([
                                rqg.geoidinst(apa=0, link=idx, queueinstance=f"data_requests_{idx}") for idx in range(NUMBER_OF_DATA_PRODUCERS)
                            ])  
                        )),
                ("ffr", ffr.ConfParams(
                            general_queue_timeout=QUEUE_POP_WAIT_MS
                        )),
                ("datawriter", dw.ConfParams(
                            initial_token_count=TOKEN_COUNT,
                            data_store_parameters=hdf5ds.ConfParams(
                                name="data_store",
                                # type = "HDF5DataStore", # default
                                directory_path = OUTPUT_PATH, # default
                                # mode = "all-per-file", # default
                                max_file_size_bytes = 1073741824,
                                disable_unique_filename_suffix = False,
                                filename_parameters = hdf5ds.HDF5DataStoreFileNameParams(
                                    overall_prefix = "swtest",
                                    digits_for_run_number = 6,
                                    file_index_prefix = "",
                                    digits_for_file_index = 4,
                                ),
                                file_layout_parameters = hdf5ds.HDF5DataStoreFileLayoutParams(
                                    trigger_record_name_prefix= "TriggerRecord",
                                    digits_for_trigger_number = 5,
                                    digits_for_apa_number = 3,
                                    digits_for_link_number = 2,
                                )
                            )
                        )),
                ("fake_source",fakecr.Conf(
                            link_ids=list(range(NUMBER_OF_DATA_PRODUCERS)),
                            # input_limit=10485100, # default
                            rate_khz = CLOCK_SPEED_HZ/(25*12*DATA_RATE_SLOWDOWN_FACTOR*1000),
                            raw_type = "wib",
                            data_filename = DATA_FILE,
                            queue_timeout_ms = QUEUE_POP_WAIT_MS
                        )),
                ("flxcard_0",flxcr.Conf(
                            card_id=0,
                            logical_unit=0,
                            dma_id=0,
                            chunk_trailer_size= 32,
                            dma_block_size_kb= 4,
                            dma_memory_size_gb= 4,
                            numa_id=0,
                            num_links=min(5,NUMBER_OF_DATA_PRODUCERS)
                        )),
                ("flxcard_1",flxcr.Conf(
                            card_id=0,
                            logical_unit=1,
                            dma_id=0,
                            chunk_trailer_size= 32,
                            dma_block_size_kb= 4,
                            dma_memory_size_gb= 4,
                            numa_id=0,
                            num_links=max(0, NUMBER_OF_DATA_PRODUCERS-5)
                        )),
            ] + [
                (f"datahandler_{idx}", dlh.Conf(
                        raw_type = "wib",
                        emulator_mode = EMULATOR_MODE,
                        # fake_trigger_flag=0, # default
                        source_queue_timeout_ms= QUEUE_POP_WAIT_MS,
                        latency_buffer_size = 3*CLOCK_SPEED_HZ/(25*12*DATA_RATE_SLOWDOWN_FACTOR),
                        pop_limit_pct = 0.8,
                        pop_size_pct = 0.1,
                        apa_number = 0,
                        link_number = idx
                        )) for idx in range(NUMBER_OF_DATA_PRODUCERS)
            ])

    startpars = rccmd.StartParams(run=RUN_NUMBER, trigger_interval_ticks=trg_interval_ticks, disable_data_storage=DISABLE_OUTPUT)
    startcmd = mrccmd("start", "CONFIGURED", "RUNNING", [
            ("qton_token", startpars),
            ("datawriter", startpars),
            ("ffr", startpars),
            ("qton_timesync", startpars),
            ("datahandler_.*", startpars),
            ("fake_source", startpars),
            ("flxcard.*", startpars),
            ("rqg", startpars),
            ("ntoq_trigdec", startpars),
        ])

    stopcmd = mrccmd("stop", "RUNNING", "CONFIGURED", [
            ("ntoq_trigdec", None),
            ("rqg", None),
            ("flxcard.*", None),
            ("fake_source", None),
            ("datahandler_.*", None),
            ("qton_timesync", None),
            ("ffr", None),
            ("datawriter", None),
            ("qton_token", None),
        ])

    pausecmd = mrccmd("pause", "RUNNING", "RUNNING", [
            ("", None)
        ])

    resumecmd = mrccmd("resume", "RUNNING", "RUNNING", [
            ("tde", tde.ResumeParams(
                            trigger_interval_ticks=trg_interval_ticks
                        ))
        ])

    scrapcmd = mrccmd("scrap", "CONFIGURED", "INITIAL", [
            ("", None)
        ])

    # Create a list of commands
    cmd_seq = [initcmd, confcmd, startcmd, stopcmd, pausecmd, resumecmd, scrapcmd]

    # Print them as json (to be improved/moved out)
    jstr = json.dumps([c.pod() for c in cmd_seq], indent=4, sort_keys=True)
    return jstr
Exemple #4
0
def generate(NW_SPECS,
             RU_CONFIG=[],
             HOSTIDX=0,
             RUN_NUMBER=333,
             OUTPUT_PATH=".",
             TOKEN_COUNT=0,
             SYSTEM_TYPE="TPC",
             SOFTWARE_TPG_ENABLED=False,
             TPSET_WRITING_ENABLED=False,
             PARTITION="UNKNOWN",
             OPERATIONAL_ENVIRONMENT="swtest",
             TPC_REGION_NAME_PREFIX="APA",
             MAX_FILE_SIZE=4 * 1024 * 1024 * 1024):
    """Generate the json configuration for the readout and DF process"""

    cmd_data = {}

    required_eps = {PARTITION + f'.trigdec_{HOSTIDX}', PARTITION + '.triginh'}
    if not required_eps.issubset([nw.name for nw in NW_SPECS]):
        raise RuntimeError(
            f"ERROR: not all the required endpoints ({', '.join(required_eps)}) found in list of endpoints {' '.join([nw.name for nw in NW_SPECS])}"
        )

    # Define modules and queues
    queue_bare_specs = [
        app.QueueSpec(
            inst="trigger_decision_q", kind='FollySPSCQueue', capacity=100),
        app.QueueSpec(
            inst="trigger_record_q", kind='FollySPSCQueue', capacity=100),
        app.QueueSpec(
            inst="data_fragments_q", kind='FollyMPMCQueue', capacity=1000),
    ] + ([
        app.QueueSpec(
            inst="tpsets_from_netq", kind='FollyMPMCQueue', capacity=1000),
    ] if TPSET_WRITING_ENABLED else [])

    # Only needed to reproduce the same order as when using jsonnet
    queue_specs = app.QueueSpecs(sorted(queue_bare_specs,
                                        key=lambda x: x.inst))

    mod_specs = [
        mspec("trigdec_receiver", "TriggerDecisionReceiver", [
            app.QueueInfo(
                name="output", inst="trigger_decision_q", dir="output")
        ]),
        mspec("fragment_receiver", "FragmentReceiver", [
            app.QueueInfo(name="output", inst="data_fragments_q", dir="output")
        ]),
        mspec("trb", "TriggerRecordBuilder", [
            app.QueueInfo(name="trigger_decision_input_queue",
                          inst="trigger_decision_q",
                          dir="input"),
            app.QueueInfo(name="trigger_record_output_queue",
                          inst="trigger_record_q",
                          dir="output"),
            app.QueueInfo(name="data_fragment_input_queue",
                          inst="data_fragments_q",
                          dir="input")
        ]),
        mspec("datawriter", "DataWriter", [
            app.QueueInfo(name="trigger_record_input_queue",
                          inst="trigger_record_q",
                          dir="input")
        ]),
    ] + ([
        mspec(f"tpset_subscriber_{idx}", "NetworkToQueue", [
            app.QueueInfo(
                name="output", inst=f"tpsets_from_netq", dir="output")
        ]) for idx in range(len(RU_CONFIG))
    ] if TPSET_WRITING_ENABLED else []) + ([
        mspec("tpswriter", "TPSetWriter", [
            app.QueueInfo(
                name="tpset_source", inst="tpsets_from_netq", dir="input")
        ])
    ] if TPSET_WRITING_ENABLED else [])

    cmd_data['init'] = app.Init(queues=queue_specs,
                                modules=mod_specs,
                                nwconnections=NW_SPECS)

    total_link_count = 0
    for ru in range(len(RU_CONFIG)):
        total_link_count += RU_CONFIG[ru]["channel_count"]

    cmd_data['conf'] = acmd([
        ("trigdec_receiver",
         tdrcv.ConfParams(general_queue_timeout=QUEUE_POP_WAIT_MS,
                          connection_name=f"{PARTITION}.trigdec_{HOSTIDX}")),
        ("trb",
         trb.ConfParams(
             general_queue_timeout=QUEUE_POP_WAIT_MS,
             reply_connection_name=f"{PARTITION}.frags_{HOSTIDX}",
             map=trb.mapgeoidconnections([
                 trb.geoidinst(region=RU_CONFIG[ru]["region_id"],
                               element=idx + RU_CONFIG[ru]["start_channel"],
                               system=SYSTEM_TYPE,
                               connection_name=f"{PARTITION}.datareq_{ru}")
                 for ru in range(len(RU_CONFIG))
                 for idx in range(RU_CONFIG[ru]["channel_count"])
             ] + ([
                 trb.geoidinst(region=RU_CONFIG[ru]["region_id"],
                               element=idx + RU_CONFIG[ru]["start_channel"] +
                               total_link_count,
                               system=SYSTEM_TYPE,
                               connection_name=f"{PARTITION}.datareq_{ru}")
                 for ru in range(len(RU_CONFIG))
                 for idx in range(RU_CONFIG[ru]["channel_count"])
             ] if SOFTWARE_TPG_ENABLED else []) + ([
                 trb.geoidinst(region=RU_CONFIG[ru]["region_id"],
                               element=idx + RU_CONFIG[ru]["start_channel"],
                               system="DataSelection",
                               connection_name=f"{PARTITION}.ds_tp_datareq_0")
                 for ru in range(len(RU_CONFIG))
                 for idx in range(RU_CONFIG[ru]["channel_count"])
             ] if SOFTWARE_TPG_ENABLED else [])))),
        ("datawriter",
         dw.ConfParams(
             decision_connection=f"{PARTITION}.trigdec_{HOSTIDX}",
             token_connection=PARTITION + ".triginh",
             data_store_parameters=hdf5ds.ConfParams(
                 name="data_store",
                 version=3,
                 operational_environment=OPERATIONAL_ENVIRONMENT,
                 directory_path=OUTPUT_PATH,
                 max_file_size_bytes=MAX_FILE_SIZE,
                 disable_unique_filename_suffix=False,
                 filename_parameters=hdf5ds.FileNameParams(
                     overall_prefix=OPERATIONAL_ENVIRONMENT,
                     digits_for_run_number=6,
                     file_index_prefix="",
                     digits_for_file_index=4,
                 ),
                 file_layout_parameters=hdf5ds.FileLayoutParams(
                     trigger_record_name_prefix="TriggerRecord",
                     digits_for_trigger_number=5,
                     path_param_list=hdf5ds.PathParamList([
                         hdf5ds.PathParams(
                             detector_group_type="TPC",
                             detector_group_name="TPC",
                             region_name_prefix=TPC_REGION_NAME_PREFIX,
                             element_name_prefix="Link"),
                         hdf5ds.PathParams(detector_group_type="PDS",
                                           detector_group_name="PDS"),
                         hdf5ds.PathParams(detector_group_type="NDLArTPC",
                                           detector_group_name="NDLArTPC"),
                         hdf5ds.PathParams(detector_group_type="Trigger",
                                           detector_group_name="Trigger"),
                         hdf5ds.PathParams(detector_group_type="TPC_TP",
                                           detector_group_name="TPC",
                                           region_name_prefix="TP_APA",
                                           element_name_prefix="Link")
                     ]))))),
    ] + [
        ("fragment_receiver",
         frcv.ConfParams(general_queue_timeout=QUEUE_POP_WAIT_MS,
                         connection_name=f"{PARTITION}.frags_{HOSTIDX}")),
    ] + [(f"tpset_subscriber_{idx}",
          ntoq.Conf(msg_type="dunedaq::trigger::TPSet",
                    msg_module_name="TPSetNQ",
                    receiver_config=nor.Conf(name=f'{PARTITION}.tpsets_{idx}',
                                             subscriptions=["TPSets"])))
         for idx in range(len(RU_CONFIG))] + (
             [("tpswriter", tpsw.ConfParams(max_file_size_bytes=1000000000, )
               )] if TPSET_WRITING_ENABLED else []))

    startpars = rccmd.StartParams(run=RUN_NUMBER)
    cmd_data['start'] = acmd(
        [] + ([("tpswriter",
                startpars), ("tpset_subscriber_.*",
                             startpars)] if TPSET_WRITING_ENABLED else []) +
        [("datawriter", startpars), ("fragment_receiver", startpars),
         ("trb", startpars), ("trigdec_receiver", startpars)])

    cmd_data['stop'] = acmd([
        ("trigdec_receiver", None),
        ("trb", None),
        ("fragment_receiver", None),
        ("datawriter", None),
    ] + ([("tpset_subscriber_.*",
           None), ("tpswriter", None)] if TPSET_WRITING_ENABLED else []))

    cmd_data['pause'] = acmd([("", None)])

    cmd_data['resume'] = acmd([("", None)])

    cmd_data['scrap'] = acmd([("fragment_receiver", None),
                              ("trigdec_receiver", None),
                              ("qton_token", None)])

    cmd_data['record'] = acmd([("", None)])

    return cmd_data
def generate(NUMBER_OF_DATA_PRODUCERS=2,
             EMULATOR_MODE=False,
             DATA_RATE_SLOWDOWN_FACTOR=10,
             RUN_NUMBER=333,
             TRIGGER_RATE_HZ=1.0,
             DATA_FILE="./frames.bin",
             OUTPUT_PATH=".",
             DISABLE_OUTPUT=False,
             TOKEN_COUNT=10):

    trigger_interval_ticks = math.floor(
        (1 / TRIGGER_RATE_HZ) * CLOCK_SPEED_HZ / DATA_RATE_SLOWDOWN_FACTOR)

    # Define modules and queues
    queue_bare_specs = [
        app.QueueSpec(inst="time_sync_q", kind='FollyMPMCQueue', capacity=100),
        app.QueueSpec(inst="token_q", kind='FollySPSCQueue', capacity=20),
        app.QueueSpec(
            inst="trigger_decision_q", kind='FollySPSCQueue', capacity=20),
        app.QueueSpec(inst="trigger_decision_copy_for_bookkeeping",
                      kind='FollySPSCQueue',
                      capacity=20),
        app.QueueSpec(
            inst="trigger_record_q", kind='FollySPSCQueue', capacity=20),
        app.QueueSpec(
            inst="data_fragments_q", kind='FollyMPMCQueue', capacity=100),
    ] + [
        app.QueueSpec(
            inst=f"data_requests_{idx}", kind='FollySPSCQueue', capacity=20)
        for idx in range(NUMBER_OF_DATA_PRODUCERS)
    ] + [
        app.QueueSpec(inst=f"wib_fake_link_{idx}",
                      kind='FollySPSCQueue',
                      capacity=100000)
        for idx in range(NUMBER_OF_DATA_PRODUCERS)
    ]

    # Only needed to reproduce the same order as when using jsonnet
    queue_specs = app.QueueSpecs(sorted(queue_bare_specs,
                                        key=lambda x: x.inst))

    mod_specs = [
        mspec("tde", "TriggerDecisionEmulator", [
            app.QueueInfo(
                name="time_sync_source", inst="time_sync_q", dir="input"),
            app.QueueInfo(name="token_source", inst="token_q", dir="input"),
            app.QueueInfo(name="trigger_decision_sink",
                          inst="trigger_decision_q",
                          dir="output"),
        ]),
        mspec("rqg", "RequestGenerator", [
            app.QueueInfo(name="trigger_decision_input_queue",
                          inst="trigger_decision_q",
                          dir="input"),
            app.QueueInfo(name="trigger_decision_for_event_building",
                          inst="trigger_decision_copy_for_bookkeeping",
                          dir="output"),
        ] + [
            app.QueueInfo(name=f"data_request_{idx}_output_queue",
                          inst=f"data_requests_{idx}",
                          dir="output")
            for idx in range(NUMBER_OF_DATA_PRODUCERS)
        ]),
        mspec("ffr", "FragmentReceiver", [
            app.QueueInfo(name="trigger_decision_input_queue",
                          inst="trigger_decision_copy_for_bookkeeping",
                          dir="input"),
            app.QueueInfo(name="trigger_record_output_queue",
                          inst="trigger_record_q",
                          dir="output"),
            app.QueueInfo(name="data_fragment_input_queue",
                          inst="data_fragments_q",
                          dir="input"),
        ]),
        mspec("datawriter", "DataWriter", [
            app.QueueInfo(name="trigger_record_input_queue",
                          inst="trigger_record_q",
                          dir="input"),
            app.QueueInfo(
                name="token_output_queue", inst="token_q", dir="output"),
        ]),
        mspec("fake_source", "FakeCardReader", [
            app.QueueInfo(name=f"output_{idx}",
                          inst=f"wib_fake_link_{idx}",
                          dir="output")
            for idx in range(NUMBER_OF_DATA_PRODUCERS)
        ]),
    ] + [
        mspec(f"datahandler_{idx}", "DataLinkHandler", [
            app.QueueInfo(
                name="raw_input", inst=f"wib_fake_link_{idx}", dir="input"),
            app.QueueInfo(name="timesync", inst="time_sync_q", dir="output"),
            app.QueueInfo(
                name="requests", inst=f"data_requests_{idx}", dir="input"),
            app.QueueInfo(
                name="fragments", inst="data_fragments_q", dir="output"),
        ]) for idx in range(NUMBER_OF_DATA_PRODUCERS)
    ]

    init_specs = app.Init(queues=queue_specs, modules=mod_specs)

    jstr = json.dumps(init_specs.pod(), indent=4, sort_keys=True)
    print(jstr)

    initcmd = rccmd.RCCommand(id=basecmd.CmdId("init"),
                              entry_state="NONE",
                              exit_state="INITIAL",
                              data=init_specs)

    if TOKEN_COUNT > 0:
        df_token_count = 0
        trigemu_token_count = TOKEN_COUNT
    else:
        df_token_count = -1 * TOKEN_COUNT
        trigemu_token_count = 0

    confcmd = mrccmd(
        "conf",
        "INITIAL",
        "CONFIGURED",
        [
            (
                "tde",
                tde.ConfParams(
                    links=[idx for idx in range(NUMBER_OF_DATA_PRODUCERS)],
                    min_links_in_request=NUMBER_OF_DATA_PRODUCERS,
                    max_links_in_request=NUMBER_OF_DATA_PRODUCERS,
                    min_readout_window_ticks=1200,
                    max_readout_window_ticks=1200,
                    trigger_window_offset=1000,
                    # The delay is set to put the trigger well within the latency buff
                    trigger_delay_ticks=math.floor(
                        2 * CLOCK_SPEED_HZ / DATA_RATE_SLOWDOWN_FACTOR),
                    # We divide the trigger interval by
                    # DATA_RATE_SLOWDOWN_FACTOR so the triggers are still
                    # emitted per (wall-clock) second, rather than being
                    # spaced out further
                    trigger_interval_ticks=trigger_interval_ticks,
                    clock_frequency_hz=CLOCK_SPEED_HZ /
                    DATA_RATE_SLOWDOWN_FACTOR,
                    initial_token_count=trigemu_token_count)),
            ("rqg",
             rqg.ConfParams(map=rqg.mapgeoidqueue([
                 rqg.geoidinst(
                     apa=0, link=idx, queueinstance=f"data_requests_{idx}")
                 for idx in range(NUMBER_OF_DATA_PRODUCERS)
             ]))),
            ("ffr", ffr.ConfParams(general_queue_timeout=QUEUE_POP_WAIT_MS)),
            (
                "datawriter",
                dw.ConfParams(
                    initial_token_count=df_token_count,
                    data_store_parameters=hdf5ds.ConfParams(
                        name="data_store",
                        # type = "HDF5DataStore", # default
                        directory_path=OUTPUT_PATH,  # default
                        # mode = "all-per-file", # default
                        max_file_size_bytes=1073741824,
                        filename_parameters=hdf5ds.HDF5DataStoreFileNameParams(
                            overall_prefix="swtest",
                            digits_for_run_number=6,
                            file_index_prefix="",
                            digits_for_file_index=4,
                        ),
                        file_layout_parameters=hdf5ds.
                        HDF5DataStoreFileLayoutParams(
                            trigger_record_name_prefix="TriggerRecord",
                            digits_for_trigger_number=5,
                            digits_for_apa_number=3,
                            digits_for_link_number=2,
                        )))),
            (
                "fake_source",
                fcr.Conf(
                    link_ids=list(range(NUMBER_OF_DATA_PRODUCERS)),
                    # input_limit=10485100, # default
                    rate_khz=CLOCK_SPEED_HZ /
                    (25 * 12 * DATA_RATE_SLOWDOWN_FACTOR * 1000),
                    raw_type="wib",
                    data_filename=DATA_FILE,
                    queue_timeout_ms=QUEUE_POP_WAIT_MS)),
        ] + [
            (
                f"datahandler_{idx}",
                dlh.Conf(
                    raw_type="wib",
                    emulator_mode=EMULATOR_MODE,
                    # fake_trigger_flag=0, # default
                    source_queue_timeout_ms=QUEUE_POP_WAIT_MS,
                    latency_buffer_size=3 * CLOCK_SPEED_HZ /
                    (25 * 12 * DATA_RATE_SLOWDOWN_FACTOR),
                    pop_limit_pct=0.8,
                    pop_size_pct=0.1,
                    apa_number=0,
                    link_number=idx))
            for idx in range(NUMBER_OF_DATA_PRODUCERS)
        ])

    jstr = json.dumps(confcmd.pod(), indent=4, sort_keys=True)
    print(jstr)

    startpars = rccmd.StartParams(
        run=RUN_NUMBER,
        trigger_interval_ticks=trigger_interval_ticks,
        disable_data_storage=DISABLE_OUTPUT)
    startcmd = mrccmd("start", "CONFIGURED", "RUNNING", [
        ("datawriter", startpars),
        ("ffr", startpars),
        ("datahandler_.*", startpars),
        ("fake_source", startpars),
        ("rqg", startpars),
        ("tde", startpars),
    ])

    jstr = json.dumps(startcmd.pod(), indent=4, sort_keys=True)
    print("=" * 80 + "\nStart\n\n", jstr)

    stopcmd = mrccmd("stop", "RUNNING", "CONFIGURED", [
        ("tde", None),
        ("rqg", None),
        ("fake_source", None),
        ("datahandler_.*", None),
        ("ffr", None),
        ("datawriter", None),
    ])

    jstr = json.dumps(stopcmd.pod(), indent=4, sort_keys=True)
    print("=" * 80 + "\nStop\n\n", jstr)

    pausecmd = mrccmd("pause", "RUNNING", "RUNNING", [("", None)])

    jstr = json.dumps(pausecmd.pod(), indent=4, sort_keys=True)
    print("=" * 80 + "\nPause\n\n", jstr)

    resumecmd = mrccmd(
        "resume", "RUNNING", "RUNNING",
        [("tde",
          tde.ResumeParams(trigger_interval_ticks=trigger_interval_ticks))])

    jstr = json.dumps(resumecmd.pod(), indent=4, sort_keys=True)
    print("=" * 80 + "\nResume\n\n", jstr)

    scrapcmd = mrccmd("scrap", "CONFIGURED", "INITIAL", [("", None)])

    jstr = json.dumps(scrapcmd.pod(), indent=4, sort_keys=True)
    print("=" * 80 + "\nScrap\n\n", jstr)

    # Create a list of commands
    cmd_seq = [
        initcmd, confcmd, startcmd, stopcmd, pausecmd, resumecmd, scrapcmd
    ]

    # Print them as json (to be improved/moved out)
    jstr = json.dumps([c.pod() for c in cmd_seq], indent=4, sort_keys=True)
    return jstr