示例#1
0
def generate(INPUT_FILE: str, SLOWDOWN_FACTOR: float, NETWORK_ENDPOINTS: dict):
    cmd_data = {}

    # Derived parameters
    CLOCK_FREQUENCY_HZ = 50000000 / SLOWDOWN_FACTOR

    # Define modules and queues
    queue_specs = [
        app.QueueSpec(inst="tpset_q", kind='FollySPSCQueue', capacity=1000)
    ]

    mod_specs = [
        mspec(
            "tpm", "TriggerPrimitiveMaker",
            [app.QueueInfo(name="tpset_sink", inst="tpset_q", dir="output")]),
        mspec("qton", "QueueToNetwork",
              [app.QueueInfo(name="input", inst="tpset_q", dir="input")])
    ]

    cmd_data['init'] = app.Init(queues=queue_specs, modules=mod_specs)

    cmd_data['conf'] = acmd([
        (
            "tpm",
            tpm.ConfParams(
                filename=INPUT_FILE,
                number_of_loops=-1,  # Infinite
                tpset_time_offset=0,
                tpset_time_width=10000,  # 0.2 ms
                clock_frequency_hz=CLOCK_FREQUENCY_HZ)),
        ("qton",
         qton.Conf(msg_type="dunedaq::trigger::TPSet",
                   msg_module_name="TPSetNQ",
                   sender_config=nos.Conf(ipm_plugin_type="ZmqPublisher",
                                          address=NETWORK_ENDPOINTS["tpset"],
                                          topic="foo",
                                          stype="msgpack")))
    ])

    startpars = rccmd.StartParams(run=1, disable_data_storage=False)
    cmd_data['start'] = acmd([
        ("qton", startpars),
        ("tpm", startpars),
    ])

    cmd_data['pause'] = acmd([])

    cmd_data['resume'] = acmd([])

    cmd_data['stop'] = acmd([
        ("tpm", None),
        ("qton", None),
    ])

    cmd_data['scrap'] = acmd([
        #     ("tpm", None),
    ])

    return cmd_data
示例#2
0
def add_network(app_name, the_system, verbose=False):
    """Add the necessary QueueToNetwork and NetworkToQueue objects to the
       application named `app_name`, based on the inter-application
       connections specified in `the_system`. NB `the_system` is modified
       in-place."""

    # if the_system.network_endpoints is None:
    #     the_system.network_endpoints=assign_network_endpoints(the_system)

    if verbose:
        console.log(f"---- add_network for {app_name} ----")
    app = the_system.apps[app_name]

    modules_with_network = deepcopy(app.modulegraph.modules)

    unconnected_endpoints = set(app.modulegraph.endpoints.keys())

    if verbose:
        console.log(f"Endpoints to connect are: {unconnected_endpoints}")

    for conn_name, app_connection in the_system.app_connections.items():
        if verbose:
            console.log(
                f"conn_name {conn_name}, app_connection {app_connection}")

        # Create the nwmgr connection if it doesn't already exist
        if not the_system.has_network_endpoint(
                app_connection.nwmgr_connection):
            # IPM connections have the following confusing behaviour:
            # whether the connection is pub/sub or direct connection
            # is determined by whether the list of topics is empty;
            # and the end that binds is upstream for pub/sub
            # connections and downstream for direct connections
            is_pubsub = app_connection.topics != []
            bind_host = app_name if is_pubsub else app_connection.receivers[
                0].split(".")[0]
            port = the_system.next_unassigned_port()
            address = f"tcp://{{host_{bind_host}}}:{port}"
            if verbose:
                console.log(
                    f"Assigning address {address} for connection {app_connection.nwmgr_connection}"
                )
            the_system.network_endpoints.append(
                nwmgr.Connection(name=app_connection.nwmgr_connection,
                                 topics=app_connection.topics,
                                 address=address))
        from_app, from_endpoint = conn_name.split(".", maxsplit=1)

        if from_app == app_name:
            if from_endpoint in unconnected_endpoints:
                unconnected_endpoints.remove(from_endpoint)
            from_endpoint_internal = resolve_endpoint(app, from_endpoint,
                                                      Direction.OUT)
            if from_endpoint_internal is None:
                # The module.endpoint for this external endpoint was
                # specified as None, so we assume it was a direct
                # nwmgr sender, and don't make a qton for it
                if verbose:
                    console.log(
                        f"{conn_name} specifies its internal endpoint as None, so not creating a QtoN for it"
                    )
                continue
            from_endpoint_module_name, from_endpoint_sink = from_endpoint_internal.split(
                ".")
            # We're a publisher or sender. Make the queue to network
            qton_name = conn_name.replace(".", "_")
            qton_name = make_unique_name(qton_name, modules_with_network)

            if verbose:
                console.log(
                    f"Adding QueueToNetwork named {qton_name} connected to {from_endpoint_internal} in app {app_name}"
                )
            nwmgr_connection_name = app_connection.nwmgr_connection
            nwmgr_connection = the_system.get_network_endpoint(
                nwmgr_connection_name)
            topic = nwmgr_connection.topics[
                0] if nwmgr_connection.topics else ""
            modules_with_network.append(
                DAQModule(
                    name=qton_name,
                    plugin="QueueToNetwork",
                    connections={},  # No outgoing connections
                    conf=qton.Conf(
                        msg_type=app_connection.msg_type,
                        msg_module_name=app_connection.msg_module_name,
                        sender_config=nos.Conf(name=nwmgr_connection_name,
                                               topic=topic))))
            # Connect the module to the QueueToNetwork
            from_endpoint_module = None
            for mod in modules_with_network:
                if mod.name == from_endpoint_module_name:
                    from_endpoint_module = mod
                    break
            mod_connections = from_endpoint_module.connections
            mod_connections[from_endpoint_sink] = Connection(
                f"{qton_name}.input")

        if verbose:
            console.log(
                f"app_connection.receivers is {app_connection.receivers}")
        for receiver in app_connection.receivers:
            to_app, to_endpoint = receiver.split(".", maxsplit=1)
            if to_app == app_name:
                if to_endpoint in unconnected_endpoints:
                    unconnected_endpoints.remove(to_endpoint)
                to_endpoint_internal = resolve_endpoint(
                    app, to_endpoint, Direction.IN)
                if to_endpoint_internal is None:
                    # The module.endpoint for this external endpoint was
                    # specified as None, so we assume it was a direct
                    # nwmgr sender, and don't make a ntoq for it
                    if verbose:
                        console.log(
                            f"{to_endpoint} specifies its internal endpoint as None, so not creating a NtoQ for it"
                        )
                    continue

                ntoq_name = receiver.replace(".", "_")
                ntoq_name = make_unique_name(ntoq_name, modules_with_network)

                if verbose:
                    console.log(
                        f"Adding NetworkToQueue named {ntoq_name} connected to {to_endpoint_internal} in app {app_name}"
                    )

                nwmgr_connection_name = app_connection.nwmgr_connection
                nwmgr_connection = the_system.get_network_endpoint(
                    nwmgr_connection_name)

                modules_with_network.append(
                    DAQModule(
                        name=ntoq_name,
                        plugin="NetworkToQueue",
                        connections={
                            "output": Connection(to_endpoint_internal)
                        },
                        conf=ntoq.Conf(
                            msg_type=app_connection.msg_type,
                            msg_module_name=app_connection.msg_module_name,
                            receiver_config=nor.Conf(
                                name=nwmgr_connection_name,
                                subscriptions=nwmgr_connection.topics))))

    if unconnected_endpoints:
        # TODO: Use proper logging
        console.log(
            f"Warning: the following endpoints of {app_name} were not connected to anything: {unconnected_endpoints}"
        )

    app.modulegraph.modules = modules_with_network
示例#3
0
def generate(NW_SPECS: list,
             RUN_NUMBER=333,
             CLOCK_SPEED_HZ: int = 50000000,
             TRIGGER_RATE_HZ: int = 1,
             CONTROL_HSI_HARDWARE=False,
             READOUT_PERIOD_US: int = 1e3,
             HSI_ENDPOINT_ADDRESS=1,
             HSI_ENDPOINT_PARTITION=0,
             HSI_RE_MASK=0x20000,
             HSI_FE_MASK=0,
             HSI_INV_MASK=0,
             HSI_SOURCE=1,
             CONNECTIONS_FILE="${TIMING_SHARE}/config/etc/connections.xml",
             HSI_DEVICE_NAME="BOREAS_TLU",
             UHAL_LOG_LEVEL="notice",
             PARTITION="UNKNOWN"):
    """
    { item_description }
    """
    cmd_data = {}

    required_eps = {PARTITION + '.hsievent'}
    if CONTROL_HSI_HARDWARE:
        required_eps.add(PARTITION + '.hsicmds')

    if not required_eps.issubset([nw.name for nw in NW_SPECS]):
        raise RuntimeError(
            f"ERROR: not all the required endpoints ({', '.join(required_eps)}) found in list of endpoints {' '.join([nw.name for nw in NW_SPECS])}"
        )

    # Define modules and queues
    queue_bare_specs = []

    if CONTROL_HSI_HARDWARE:
        queue_bare_specs.extend([
            app.QueueSpec(inst="hw_cmds_q_to_net",
                          kind='FollySPSCQueue',
                          capacity=100)
        ])

    # Only needed to reproduce the same order as when using jsonnet
    queue_specs = app.QueueSpecs(sorted(queue_bare_specs,
                                        key=lambda x: x.inst))

    hsi_controller_init_data = hsic.InitParams(
        qinfos=app.QueueInfos([
            app.QueueInfo(name="hardware_commands_out",
                          inst="hw_cmds_q_to_net",
                          dir="output")
        ]),
        device=HSI_DEVICE_NAME,
    )
    mod_specs = [
        mspec("hsir", "HSIReadout", []),
    ]

    if CONTROL_HSI_HARDWARE:
        hsi_controller_init_data = hsic.InitParams(
            qinfos=app.QueueInfos([
                app.QueueInfo(name="hardware_commands_out",
                              inst="hw_cmds_q_to_net",
                              dir="output")
            ]),
            device=HSI_DEVICE_NAME,
        )
        mod_specs.extend([
            mspec("qton_hw_cmds", "QueueToNetwork", [
                app.QueueInfo(
                    name="input", inst="hw_cmds_q_to_net", dir="input")
            ]),
            app.ModSpec(inst="hsic",
                        plugin="HSIController",
                        data=hsi_controller_init_data)
        ])

    cmd_data['init'] = app.Init(queues=queue_specs,
                                modules=mod_specs,
                                nwconnections=NW_SPECS)

    conf_cmds = [
        ("hsir",
         hsi.ConfParams(
             connections_file=CONNECTIONS_FILE,
             readout_period=READOUT_PERIOD_US,
             hsi_device_name=HSI_DEVICE_NAME,
             uhal_log_level=UHAL_LOG_LEVEL,
             hsievent_connection_name=f"{PARTITION}.hsievent",
         )),
    ]

    trigger_interval_ticks = 0
    if TRIGGER_RATE_HZ > 0:
        trigger_interval_ticks = math.floor(
            (1 / TRIGGER_RATE_HZ) * CLOCK_SPEED_HZ)
    elif CONTROL_HSI_HARDWARE:
        console.log(
            'WARNING! Emulated trigger rate of 0 will not disable signal emulation in real HSI hardware! To disable emulated HSI triggers, use  option: "--hsi-source 0" or mask all signal bits',
            style="bold red")

    if CONTROL_HSI_HARDWARE:
        conf_cmds.extend([
            ("qton_hw_cmds",
             qton.Conf(msg_type="dunedaq::timinglibs::timingcmd::TimingHwCmd",
                       msg_module_name="TimingHwCmdNQ",
                       sender_config=nos.Conf(name=PARTITION + ".hsicmds",
                                              stype="msgpack"))),
            ("hsic",
             hsic.ConfParams(
                 clock_frequency=CLOCK_SPEED_HZ,
                 trigger_interval_ticks=trigger_interval_ticks,
                 address=HSI_ENDPOINT_ADDRESS,
                 partition=HSI_ENDPOINT_PARTITION,
                 rising_edge_mask=HSI_RE_MASK,
                 falling_edge_mask=HSI_FE_MASK,
                 invert_edge_mask=HSI_INV_MASK,
                 data_source=HSI_SOURCE,
             )),
        ])
    cmd_data['conf'] = acmd(conf_cmds)

    startpars = rccmd.StartParams(
        run=RUN_NUMBER, trigger_interval_ticks=trigger_interval_ticks)
    resumepars = rccmd.ResumeParams(
        trigger_interval_ticks=trigger_interval_ticks)

    cmd_data['start'] = acmd([("hsi.*", startpars), ("qton_.*", startpars)])

    cmd_data['stop'] = acmd([("hsi.*", None), ("qton.*", None)])

    cmd_data['pause'] = acmd([("", None)])

    if CONTROL_HSI_HARDWARE:
        cmd_data['resume'] = acmd([("hsic", resumepars)])
    else:
        cmd_data['resume'] = acmd([("", None)])

    cmd_data['scrap'] = acmd([("", None)])

    cmd_data['record'] = acmd([("", None)])

    return cmd_data
示例#4
0
def generate(FRONTEND_TYPE='wib',
             NUMBER_OF_DATA_PRODUCERS=1,
             NUMBER_OF_TP_PRODUCERS=1,
             DATA_RATE_SLOWDOWN_FACTOR=1,
             RUN_NUMBER=333,
             DATA_FILE="./frames.bin"):

    cmd_data = {}

    # Define modules and queues
    queue_specs = [
        app.QueueSpec(inst="time_sync_q", kind='FollyMPMCQueue', capacity=100),
        app.QueueSpec(
            inst="data_fragments_q", kind='FollyMPMCQueue', capacity=100),
    ] + [
        app.QueueSpec(
            inst=f"data_requests_{idx}", kind='FollySPSCQueue', capacity=1000)
        for idx in range(NUMBER_OF_DATA_PRODUCERS)
    ] + [
        app.QueueSpec(inst=f"{FRONTEND_TYPE}_link_{idx}",
                      kind='FollySPSCQueue',
                      capacity=100000)
        for idx in range(NUMBER_OF_DATA_PRODUCERS)
    ] + [
        app.QueueSpec(
            inst=f"tp_link_{idx}", kind='FollySPSCQueue', capacity=100000)
        for idx in range(NUMBER_OF_DATA_PRODUCERS, NUMBER_OF_DATA_PRODUCERS +
                         NUMBER_OF_TP_PRODUCERS)
    ] + [
        app.QueueSpec(inst=f"{FRONTEND_TYPE}_recording_link_{idx}",
                      kind='FollySPSCQueue',
                      capacity=100000)
        for idx in range(NUMBER_OF_DATA_PRODUCERS)
    ] + [
        app.QueueSpec(
            inst=f"tp_queue_{idx}", kind='FollySPSCQueue', capacity=100000)
        for idx in range(NUMBER_OF_DATA_PRODUCERS)
    ] + [
        app.QueueSpec(
            inst=f"tp_data_requests", kind='FollySPSCQueue', capacity=1000)
    ] + [
        app.QueueSpec(
            inst="tp_recording_link", kind='FollySPSCQueue', capacity=1000)
    ] + [
        app.QueueSpec(
            inst=f"tpset_link_{idx}", kind='FollySPSCQueue', capacity=10000)
        for idx in range(NUMBER_OF_DATA_PRODUCERS)
    ]

    mod_specs = [
        mspec("fake_source", "FakeCardReader", [
            app.QueueInfo(name=f"output_{idx}",
                          inst=f"{FRONTEND_TYPE}_link_{idx}",
                          dir="output")
            for idx in range(NUMBER_OF_DATA_PRODUCERS)
        ]),
    ] + [
        mspec(f"datahandler_{idx}", "DataLinkHandler", [
            app.QueueInfo(name="raw_input",
                          inst=f"{FRONTEND_TYPE}_link_{idx}",
                          dir="input"),
            app.QueueInfo(name="timesync", inst="time_sync_q", dir="output"),
            app.QueueInfo(
                name="requests", inst=f"data_requests_{idx}", dir="input"),
            app.QueueInfo(
                name="fragments", inst="data_fragments_q", dir="output"),
            app.QueueInfo(name="raw_recording",
                          inst=f"{FRONTEND_TYPE}_recording_link_{idx}",
                          dir="output"),
            app.QueueInfo(name="tp_out", inst=f"tp_queue_{idx}", dir="output"),
            app.QueueInfo(
                name="tpset_out", inst=f"tpset_link_{idx}", dir="output")
        ]) for idx in range(NUMBER_OF_DATA_PRODUCERS)
    ] + [
        mspec(f"data_recorder_{idx}", "DataRecorder", [
            app.QueueInfo(name="raw_recording",
                          inst=f"{FRONTEND_TYPE}_recording_link_{idx}",
                          dir="input")
        ]) for idx in range(NUMBER_OF_DATA_PRODUCERS)
    ] + [
        mspec(f"timesync_consumer", "TimeSyncConsumer", [
            app.QueueInfo(name="input_queue", inst=f"time_sync_q", dir="input")
        ])
    ] + [
        mspec(f"fragment_consumer", "FragmentConsumer", [
            app.QueueInfo(
                name="input_queue", inst=f"data_fragments_q", dir="input")
        ])
    ] + [
        mspec(f"tp_handler_{idx}", "DataLinkHandler", [
            app.QueueInfo(
                name="raw_input", inst=f"tp_queue_{idx}", dir="input"),
            app.QueueInfo(name="timesync", inst="time_sync_q", dir="output"),
            app.QueueInfo(
                name="requests", inst="tp_data_requests", dir="input"),
            app.QueueInfo(
                name="fragments", inst="data_fragments_q", dir="output"),
            app.QueueInfo(
                name="raw_recording", inst="tp_recording_link", dir="output")
        ]) for idx in range(NUMBER_OF_DATA_PRODUCERS)
    ] + [
        mspec(f"tpset_publisher_{idx}", "QueueToNetwork", [
            app.QueueInfo(name="input", inst=f"tpset_link_{idx}", dir="input")
        ]) for idx in range(NUMBER_OF_DATA_PRODUCERS)
    ]

    cmd_data['init'] = app.Init(queues=queue_specs, modules=mod_specs)

    cmd_data['conf'] = acmd([
        (
            "fake_source",
            fcr.Conf(
                link_confs=[
                    fcr.LinkConfiguration(
                        geoid=fcr.GeoID(system="TPC", region=0, element=idx),
                        slowdown=DATA_RATE_SLOWDOWN_FACTOR,
                        queue_name=f"output_{idx}",
                        data_filename=DATA_FILE,
                        input_limit=10000000000,
                    ) for idx in range(NUMBER_OF_DATA_PRODUCERS)
                ],
                # input_limit=10485100, # default
                queue_timeout_ms=QUEUE_POP_WAIT_MS,
                set_t0_to=0)),
    ] + [(f"datahandler_{idx}",
          dlh.Conf(source_queue_timeout_ms=QUEUE_POP_WAIT_MS,
                   fake_trigger_flag=1,
                   latency_buffer_size=3 * CLOCK_SPEED_HZ /
                   (25 * 12 * DATA_RATE_SLOWDOWN_FACTOR),
                   pop_limit_pct=0.8,
                   pop_size_pct=0.1,
                   apa_number=0,
                   link_number=idx))
         for idx in range(NUMBER_OF_DATA_PRODUCERS)] +
                            [(f"data_recorder_{idx}",
                              bfs.Conf(output_file=f"output_{idx}.out",
                                       stream_buffer_size=8388608))
                             for idx in range(NUMBER_OF_DATA_PRODUCERS)] +
                            [(f"tp_handler_{idx}",
                              dlh.Conf(
                                  source_queue_timeout_ms=QUEUE_POP_WAIT_MS,
                                  fake_trigger_flag=1,
                                  latency_buffer_size=3 * CLOCK_SPEED_HZ /
                                  (25 * 12 * DATA_RATE_SLOWDOWN_FACTOR),
                                  pop_limit_pct=0.8,
                                  pop_size_pct=0.1,
                                  apa_number=0,
                                  link_number=0))
                             for idx in range(NUMBER_OF_DATA_PRODUCERS)] +
                            [(f"tpset_publisher_{idx}",
                              qton.Conf(msg_type="dunedaq::trigger::TPSet",
                                        msg_module_name="TPSetNQ",
                                        sender_config=nos.Conf(
                                            ipm_plugin_type="ZmqPublisher",
                                            address='tcp://127.0.0.1:' +
                                            str(5000 + idx),
                                            topic="foo",
                                            stype="msgpack")))
                             for idx in range(NUMBER_OF_DATA_PRODUCERS)])

    startpars = rccmd.StartParams(run=RUN_NUMBER)
    cmd_data['start'] = acmd([("datahandler_.*", startpars),
                              ("fake_source", startpars),
                              ("data_recorder_.*", startpars),
                              ("timesync_consumer", startpars),
                              ("fragment_consumer", startpars),
                              ("tp_handler_.*", startpars),
                              ("tpset_publisher_.*", startpars)])

    cmd_data['pause'] = acmd([])

    cmd_data['resume'] = acmd([])

    cmd_data['stop'] = acmd([("fake_source", None), ("datahandler_.*", None),
                             ("data_recorder_.*", None),
                             ("timesync_consumer", None),
                             ("fragment_consumer", None),
                             ("tp_handler_.*", None),
                             ("tpset_publisher_.*", None)])

    cmd_data['scrap'] = acmd([("fake_source", None), ("datahandler_.*", None),
                              ("data_recorder_.*", None),
                              ("timesync_consumer", None),
                              ("fragment_consumer", None),
                              ("tp_handler_.*", None),
                              ("tpset_publisher_.*", None)])

    return cmd_data
示例#5
0
def generate(
    FRONTEND_TYPE="pacman",
    NUMBER_OF_DATA_PRODUCERS=1,
    NUMBER_OF_TP_PRODUCERS=1,
    DATA_RATE_SLOWDOWN_FACTOR=1,
    ENABLE_SOFTWARE_TPG=False,
    RUN_NUMBER=333,
    DATA_FILE="./frames.bin",
    TP_DATA_FILE="./tp_frames.bin",
):

    # Define modules and queues
    queue_bare_specs = ([
        app.QueueSpec(inst="time_sync_q", kind="FollyMPMCQueue", capacity=100),
        app.QueueSpec(
            inst="data_fragments_q", kind="FollyMPMCQueue", capacity=100),
        app.QueueSpec(
            inst="errored_frames_q", kind="FollyMPMCQueue", capacity=10000),
    ] + [
        app.QueueSpec(
            inst=f"data_requests_{idx}", kind="FollySPSCQueue", capacity=1000)
        for idx in range(NUMBER_OF_DATA_PRODUCERS)
    ] + [
        app.QueueSpec(
            inst=f"{FRONTEND_TYPE}_link_{idx}",
            kind="FollySPSCQueue",
            capacity=100000,
        ) for idx in range(NUMBER_OF_DATA_PRODUCERS)
    ] + [
        app.QueueSpec(
            inst=f"raw_tp_link_{idx}", kind="FollySPSCQueue", capacity=100000)
        for idx in range(
            NUMBER_OF_DATA_PRODUCERS,
            NUMBER_OF_DATA_PRODUCERS + NUMBER_OF_TP_PRODUCERS,
        )
    ] + [
        app.QueueSpec(
            inst=f"sw_tp_queue_{idx}", kind="FollySPSCQueue", capacity=100000)
        for idx in range(NUMBER_OF_DATA_PRODUCERS)
    ] + [
        app.QueueSpec(
            inst=f"tp_data_requests", kind="FollySPSCQueue", capacity=1000)
    ] + [
        app.QueueSpec(
            inst=f"tpset_link_{idx}", kind="FollySPSCQueue", capacity=10000)
        for idx in range(NUMBER_OF_DATA_PRODUCERS)
    ])

    # Only needed to reproduce the same order as when using jsonnet
    queue_specs = app.QueueSpecs(sorted(queue_bare_specs,
                                        key=lambda x: x.inst))

    mod_specs = ([
        mspec("fake_source", "PacmanCardReader", [
            app.QueueInfo(
                name=f"output_{idx}",
                inst=f"{FRONTEND_TYPE}_link_{idx}",
                dir="output",
            ) for idx in range(NUMBER_OF_DATA_PRODUCERS)
        ]),
    ] + [
        mspec(
            f"datahandler_{idx}",
            "DataLinkHandler",
            [
                app.QueueInfo(
                    name="raw_input",
                    inst=f"{FRONTEND_TYPE}_link_{idx}",
                    dir="input",
                ),
                app.QueueInfo(
                    name="timesync", inst="time_sync_q", dir="output"),
                app.QueueInfo(name="data_requests_0",
                              inst=f"data_requests_{idx}",
                              dir="input"),
                app.QueueInfo(name="fragment_queue",
                              inst="data_fragments_q",
                              dir="output"),
                app.QueueInfo(
                    name="tp_out", inst=f"sw_tp_queue_{idx}", dir="output"),
                app.QueueInfo(
                    name="tpset_out", inst=f"tpset_link_{idx}", dir="output"),
                app.QueueInfo(name="errored_frames",
                              inst="errored_frames_q",
                              dir="output"),
            ],
        ) for idx in range(NUMBER_OF_DATA_PRODUCERS)
    ] + [
        mspec(
            f"timesync_consumer",
            "TimeSyncConsumer",
            [
                app.QueueInfo(
                    name="input_queue", inst=f"time_sync_q", dir="input")
            ],
        )
    ] + [
        mspec(
            f"fragment_consumer",
            "FragmentConsumer",
            [
                app.QueueInfo(
                    name="input_queue", inst=f"data_fragments_q", dir="input")
            ],
        )
    ] + [
        mspec(
            f"sw_tp_handler_{idx}",
            "DataLinkHandler",
            [
                app.QueueInfo(
                    name="raw_input", inst=f"sw_tp_queue_{idx}", dir="input"),
                app.QueueInfo(
                    name="timesync", inst="time_sync_q", dir="output"),
                app.QueueInfo(
                    name="requests", inst="tp_data_requests", dir="input"),
                app.QueueInfo(name="fragment_queue",
                              inst="data_fragments_q",
                              dir="output"),
            ],
        ) for idx in range(NUMBER_OF_DATA_PRODUCERS)
    ] + [
        mspec(
            f"tpset_publisher_{idx}",
            "QueueToNetwork",
            [
                app.QueueInfo(
                    name="input", inst=f"tpset_link_{idx}", dir="input")
            ],
        ) for idx in range(NUMBER_OF_DATA_PRODUCERS)
    ] + [
        mspec(
            f"raw_tp_handler_{idx}",
            "DataLinkHandler",
            [
                app.QueueInfo(
                    name="raw_input", inst=f"raw_tp_link_{idx}", dir="input"),
                app.QueueInfo(
                    name="timesync", inst="time_sync_q", dir="output"),
            ],
        ) for idx in range(
            NUMBER_OF_DATA_PRODUCERS,
            NUMBER_OF_DATA_PRODUCERS + NUMBER_OF_TP_PRODUCERS,
        )
    ] + [
        mspec(
            "errored_frame_consumer",
            "ErroredFrameConsumer",
            [
                app.QueueInfo(
                    name="input_queue", inst="errored_frames_q", dir="input")
            ],
        )
    ])

    nw_specs = [
        nwmgr.Connection(name=f"tpsets_{idx}",
                         topics=["foo"],
                         address="tcp://127.0.0.1:" + str(5000 + idx))
        for idx in range(NUMBER_OF_DATA_PRODUCERS)
    ]
    nw_specs.append(
        nwmgr.Connection(name="timesync",
                         topics=["Timesync"],
                         address="tcp://127.0.0.1:6000"))

    init_specs = app.Init(queues=queue_specs,
                          modules=mod_specs,
                          nwconnections=nw_specs)

    jstr = json.dumps(init_specs.pod(), indent=4, sort_keys=True)
    print(jstr)

    initcmd = rccmd.RCCommand(
        id=basecmd.CmdId("init"),
        entry_state="NONE",
        exit_state="INITIAL",
        data=init_specs,
    )

    confcmd = mrccmd(
        "conf",
        "INITIAL",
        "CONFIGURED",
        [
            (
                "fake_source",
                pcr.Conf(link_confs=[
                    pcr.LinkConfiguration(geoid=pcr.GeoID(
                        system="kNDLarTPC", region=0, element=idx), )
                    for idx in range(NUMBER_OF_DATA_PRODUCERS)
                ] + [
                    pcr.LinkConfiguration(geoid=sec.GeoID(
                        system="TPC", region=0, element=idx), )
                    for idx in range(
                        NUMBER_OF_DATA_PRODUCERS,
                        NUMBER_OF_DATA_PRODUCERS + NUMBER_OF_TP_PRODUCERS,
                    )
                ],
                         # input_limit=10485100, # default
                         ),
            ),
        ] + [(
            f"datahandler_{idx}",
            rconf.Conf(
                readoutmodelconf=rconf.ReadoutModelConf(
                    source_queue_timeout_ms=QUEUE_POP_WAIT_MS,
                    fake_trigger_flag=1,
                    region_id=0,
                    element_id=idx,
                    timesync_connection_name=f"timesync",
                    timesync_topic_name="Timesync",
                ),
                latencybufferconf=rconf.LatencyBufferConf(
                    latency_buffer_size=3 * CLOCK_SPEED_HZ /
                    (25 * 12 * DATA_RATE_SLOWDOWN_FACTOR),
                    region_id=0,
                    element_id=idx,
                ),
                rawdataprocessorconf=rconf.RawDataProcessorConf(
                    region_id=0,
                    element_id=idx,
                    enable_software_tpg=ENABLE_SOFTWARE_TPG,
                    error_counter_threshold=100,
                    error_reset_freq=10000,
                ),
                requesthandlerconf=rconf.RequestHandlerConf(
                    latency_buffer_size=3 * CLOCK_SPEED_HZ /
                    (25 * 12 * DATA_RATE_SLOWDOWN_FACTOR),
                    pop_limit_pct=0.8,
                    pop_size_pct=0.1,
                    region_id=0,
                    element_id=idx,
                    output_file=f"output_{idx}.out",
                    stream_buffer_size=8388608,
                    enable_raw_recording=True,
                    use_o_direct=False,
                ),
            ),
        ) for idx in range(NUMBER_OF_DATA_PRODUCERS)] + [(
            f"sw_tp_handler_{idx}",
            rconf.Conf(
                readoutmodelconf=rconf.ReadoutModelConf(
                    source_queue_timeout_ms=QUEUE_POP_WAIT_MS,
                    fake_trigger_flag=1,
                    region_id=0,
                    element_id=idx,
                ),
                latencybufferconf=rconf.LatencyBufferConf(
                    latency_buffer_size=3 * CLOCK_SPEED_HZ /
                    (25 * 12 * DATA_RATE_SLOWDOWN_FACTOR),
                    region_id=0,
                    element_id=idx,
                ),
                rawdataprocessorconf=rconf.RawDataProcessorConf(
                    region_id=0,
                    element_id=idx,
                    enable_software_tpg=ENABLE_SOFTWARE_TPG,
                ),
                requesthandlerconf=rconf.RequestHandlerConf(
                    latency_buffer_size=3 * CLOCK_SPEED_HZ /
                    (25 * 12 * DATA_RATE_SLOWDOWN_FACTOR),
                    pop_limit_pct=0.8,
                    pop_size_pct=0.1,
                    region_id=0,
                    element_id=idx,
                    output_file=f"output_{idx}.out",
                    stream_buffer_size=8388608,
                    enable_raw_recording=False,
                    use_o_direct=False,
                ),
            ),
        ) for idx in range(NUMBER_OF_DATA_PRODUCERS)] + [(
            f"raw_tp_handler_{idx}",
            rconf.Conf(
                readoutmodelconf=rconf.ReadoutModelConf(
                    source_queue_timeout_ms=QUEUE_POP_WAIT_MS,
                    fake_trigger_flag=1,
                    region_id=0,
                    element_id=idx,
                ),
                latencybufferconf=rconf.LatencyBufferConf(
                    latency_buffer_size=3 * CLOCK_SPEED_HZ /
                    (25 * 12 * DATA_RATE_SLOWDOWN_FACTOR),
                    region_id=0,
                    element_id=idx,
                ),
                rawdataprocessorconf=rconf.RawDataProcessorConf(
                    region_id=0,
                    element_id=idx,
                    enable_software_tpg=ENABLE_SOFTWARE_TPG,
                ),
                requesthandlerconf=rconf.RequestHandlerConf(
                    latency_buffer_size=3 * CLOCK_SPEED_HZ /
                    (25 * 12 * DATA_RATE_SLOWDOWN_FACTOR),
                    pop_limit_pct=0.8,
                    pop_size_pct=0.1,
                    region_id=0,
                    element_id=idx,
                    output_file=f"output_{idx}.out",
                    stream_buffer_size=8388608,
                    enable_raw_recording=False,
                    use_o_direct=False,
                ),
            ),
        ) for idx in range(
            NUMBER_OF_DATA_PRODUCERS,
            NUMBER_OF_DATA_PRODUCERS + NUMBER_OF_TP_PRODUCERS,
        )] + [(
            f"tpset_publisher_{idx}",
            qton.Conf(
                msg_type="dunedaq::trigger::TPSet",
                msg_module_name="TPSetNQ",
                sender_config=nos.Conf(
                    name=f"tpsets_{idx}",
                    topic="foo",
                    stype="msgpack",
                ),
            ),
        ) for idx in range(NUMBER_OF_DATA_PRODUCERS)],
    )

    jstr = json.dumps(confcmd.pod(), indent=4, sort_keys=True)
    print(jstr)

    startpars = rccmd.StartParams(run=RUN_NUMBER)
    startcmd = mrccmd(
        "start",
        "CONFIGURED",
        "RUNNING",
        [
            ("datahandler_.*", startpars),
            ("fake_source", startpars),
            ("data_recorder_.*", startpars),
            ("timesync_consumer", startpars),
            ("fragment_consumer", startpars),
            ("sw_tp_handler_.*", startpars),
            ("raw_tp_handler_.*", startpars),
            ("tpset_publisher_.*", startpars),
            ("errored_frame_consumer", startpars),
        ],
    )

    jstr = json.dumps(startcmd.pod(), indent=4, sort_keys=True)
    print("=" * 80 + "\nStart\n\n", jstr)

    stopcmd = mrccmd(
        "stop",
        "RUNNING",
        "CONFIGURED",
        [
            ("fake_source", None),
            ("datahandler_.*", None),
            ("data_recorder_.*", None),
            ("timesync_consumer", None),
            ("fragment_consumer", None),
            ("sw_tp_handler_.*", None),
            ("raw_tp_handler_.*", None),
            ("tpset_publisher_.*", None),
            ("errored_frame_consumer", None),
        ],
    )

    jstr = json.dumps(stopcmd.pod(), indent=4, sort_keys=True)
    print("=" * 80 + "\nStop\n\n", jstr)

    scrapcmd = mrccmd("scrap", "CONFIGURED", "INITIAL", [("", None)])

    jstr = json.dumps(scrapcmd.pod(), indent=4, sort_keys=True)
    print("=" * 80 + "\nScrap\n\n", jstr)

    # Create a list of commands
    cmd_seq = [initcmd, confcmd, startcmd, stopcmd, scrapcmd]

    record_cmd = mrccmd(
        "record",
        "RUNNING",
        "RUNNING",
        [("datahandler_.*", rconf.RecordingParams(duration=10))],
    )

    jstr = json.dumps(record_cmd.pod(), indent=4, sort_keys=True)
    print("=" * 80 + "\nRecord\n\n", jstr)

    cmd_seq.append(record_cmd)

    # Print them as json (to be improved/moved out)
    jstr = json.dumps([c.pod() for c in cmd_seq], indent=4, sort_keys=True)
    return jstr
示例#6
0
    def __init__(self,
                 # NW_SPECS: list,
                 
                 SOFTWARE_TPG_ENABLED: bool = False,
                 RU_CONFIG: list = [],

                 ACTIVITY_PLUGIN: str = 'TriggerActivityMakerPrescalePlugin',
                 ACTIVITY_CONFIG: dict = dict(prescale=10000),

                 CANDIDATE_PLUGIN: str = 'TriggerCandidateMakerPrescalePlugin',
                 CANDIDATE_CONFIG: int = dict(prescale=10),

                 TOKEN_COUNT: int = 10,
                 SYSTEM_TYPE = 'wib',
                 TTCM_S1: int = 1,
                 TTCM_S2: int = 2,
                 TRIGGER_WINDOW_BEFORE_TICKS: int = 1000,
                 TRIGGER_WINDOW_AFTER_TICKS: int = 1000,
                 PARTITION="UNKNOWN",
                 HOST="localhost"
                 ):
        """
        { item_description }
        """
        
        # Generate schema for the maker plugins on the fly in the temptypes module
        make_moo_record(ACTIVITY_CONFIG , 'ActivityConf' , 'temptypes')
        make_moo_record(CANDIDATE_CONFIG, 'CandidateConf', 'temptypes')
        import temptypes

        modules = []
    
        if SOFTWARE_TPG_ENABLED:
            connections_request_receiver = {}
            connections_tpset_receiver = {}
            for ru in range(len(RU_CONFIG)):
                for idy in range(RU_CONFIG[ru]["channel_count"]):
                    connections_request_receiver[f'output_{ru}_{idy}'] = Connection(f'buf{ru}_{idy}.data_request_q{ru}_{idy}')
                    connections_tpset_receiver  [f'output_{ru}_{idy}'] = Connection(f'buf{ru}_{idy}.tpset_q_for_buf{ru}_{idy}')

            config_request_receiver = rrcv.ConfParams(map = [rrcv.geoidinst(region=RU_CONFIG[ru]["region_id"],
                                                                            element=idy+RU_CONFIG[ru]["start_channel"],
                                                                            system="DataSelection",
                                                                            queueinstance=f"data_request_q{ru}_{idy}")
                                                             for ru in range(len(RU_CONFIG)) for idy in range(RU_CONFIG[ru]["channel_count"])],
                                                      general_queue_timeout = 100,
                                                      connection_name = f"{PARTITION}.ds_tp_datareq_0")
            
            config_tpset_receiver = tpsrcv.ConfParams(map = [tpsrcv.geoidinst(region=RU_CONFIG[ru]["region_id"],
                                                                              element=idy+RU_CONFIG[ru]["start_channel"],
                                                                              system=SYSTEM_TYPE,
                                                                              queueinstance=f"tpset_q_for_buf{ru}_{idy}")
                                                             for ru in range(len(RU_CONFIG)) for idy in range(RU_CONFIG[ru]["channel_count"])],
                                                      general_queue_timeout = 100,
                                                      topic = f"TPSets")
    
            config_qton_fragment = qton.Conf(msg_type="std::unique_ptr<dunedaq::daqdataformats::Fragment>",
                                             msg_module_name="FragmentNQ",
                                             sender_config=nos.Conf(name=f"{PARTITION}.frags_tpset_ds_0",stype="msgpack"))

            config_tcm =  tcm.Conf(candidate_maker=CANDIDATE_PLUGIN,
                                   candidate_maker_config=temptypes.CandidateConf(**CANDIDATE_CONFIG))
            
            modules += [DAQModule(name = 'request_receiver',
                               plugin = 'RequestReceiver',
                               connections = connections_request_receiver,
                               conf = config_request_receiver),
                        
                        DAQModule(name = 'tpset_receiver',
                               plugin = 'TPSetReceiver',
                               connections = connections_tpset_receiver,
                               conf = config_tpset_receiver),
                        
                        DAQModule(name = 'qton_fragments',
                               plugin = 'QueueToNetwork',
                               connections = {}, # all the incoming links in TPSetBufferCreators
                               conf = config_qton_fragment),
                        
                        DAQModule(name = 'tcm',
                               plugin = 'TriggerCandidateMaker',
                               connections = {#'input' : Connection(f'tcm.taset_q'),
                                   'output': Connection(f'mlt.trigger_candidate_q')},
                               conf = config_tcm)]
            
            for ru in range(len(RU_CONFIG)):
                
                modules += [DAQModule(name = f'tpset_subscriber_{ru}',
                                   plugin = 'NetworkToQueue',
                                   connections = {'output': Connection(f'zip_{ru}.tpsets_from_netq_{ru}')},
                                   conf = ntoq.Conf(msg_type="dunedaq::trigger::TPSet",
                                                    msg_module_name="TPSetNQ",
                                                    receiver_config=nor.Conf(name=f'{PARTITION}.tpsets_{ru}',
                                                                             subscriptions=["TPSets"]))),
                            
                            DAQModule(name = f'zip_{ru}',
                                   plugin = 'TPZipper',
                                   connections = {# 'input' are App.network_endpoints, from RU
                                       'output': Connection(f'tam_{ru}.input')},
                                   conf = tzip.ConfParams(cardinality=RU_CONFIG[ru]['channel_count'],
                                                          max_latency_ms=1000,
                                                          region_id=0,
                                                          element_id=0)),
                            
                            DAQModule(name = f'tam_{ru}',
                                   plugin = 'TriggerActivityMaker',
                                   connections = {'output': Connection('tcm.taset_q')},
                                   conf = tam.Conf(activity_maker=ACTIVITY_PLUGIN,
                                                   geoid_region=0,  # Fake placeholder
                                                   geoid_element=0,  # Fake placeholder
                                                   window_time=10000,  # should match whatever makes TPSets, in principle
                                                   buffer_time=625000,  # 10ms in 62.5 MHz ticks
                                                   activity_maker_config=temptypes.ActivityConf(**ACTIVITY_CONFIG)))]

                for idy in range(RU_CONFIG[ru]["channel_count"]):
                    modules += [DAQModule(name = f'buf{ru}_{idy}',
                                       plugin = 'TPSetBufferCreator',
                                       connections = {#'tpset_source': Connection(f"tpset_q_for_buf{ru}_{idy}"),#already in request_receiver
                                                      #'data_request_source': Connection(f"data_request_q{ru}_{idy}"), #ditto
                                                      'fragment_sink': Connection('qton_fragments.fragment_q')},
                                       conf = buf.Conf(tpset_buffer_size=10000, region=RU_CONFIG[ru]["region_id"], element=idy + RU_CONFIG[ru]["start_channel"]))]

        modules += [DAQModule(name = 'ttcm',
                           plugin = 'TimingTriggerCandidateMaker',
                           connections={"output": Connection("mlt.trigger_candidate_q")},
                           conf=ttcm.Conf(s1=ttcm.map_t(signal_type=TTCM_S1,
                                                        time_before=TRIGGER_WINDOW_BEFORE_TICKS,
                                                        time_after=TRIGGER_WINDOW_AFTER_TICKS),
                                          s2=ttcm.map_t(signal_type=TTCM_S2,
                                                        time_before=TRIGGER_WINDOW_BEFORE_TICKS,
                                                        time_after=TRIGGER_WINDOW_AFTER_TICKS),
                                          hsievent_connection_name = PARTITION+".hsievent"))]
                    
        # We need to populate the list of links based on the fragment
        # producers available in the system. This is a bit of a
        # chicken-and-egg problem, because the trigger app itself creates
        # fragment producers (see below). Eventually when the MLT is its
        # own process, this problem will probably go away, but for now, we
        # leave the list of links here blank, and replace it in
        # util.connect_fragment_producers
        modules += [DAQModule(name = 'mlt',
                              plugin = 'ModuleLevelTrigger',
                              conf=mlt.ConfParams(links=[]))] # To be updated later - see comment above
        
        mgraph = ModuleGraph(modules)
        mgraph.add_endpoint("hsievents",  "ttcm.input", Direction.IN)
        if SOFTWARE_TPG_ENABLED:
            for idx in range(len(RU_CONFIG)):
                mgraph.add_endpoint(f"tpsets_into_chain_link{idx}", f"tpset_receiver.input", Direction.IN)
                mgraph.add_endpoint(f"tpsets_into_buffer_link{idx}", f"tpset_subscriber_{idx}.tpset_source", Direction.IN)

                mgraph.add_fragment_producer(region=0, element=idx, system="DataSelection",
                                             requests_in=f"request_receiver.data_request_source",
                                             fragments_out=f"qton_fragments.fragment_sink")


        mgraph.add_endpoint("trigger_decisions", "mlt.trigger_decision_sink", Direction.OUT)
        mgraph.add_endpoint("tokens", "mlt.token_source", Direction.IN)

        super().__init__(modulegraph=mgraph, host=HOST, name='TriggerApp')
        self.export("trigger_app.dot")
示例#7
0
def generate(NW_SPECS,
             RU_CONFIG=[],
             EMULATOR_MODE=False,
             DATA_RATE_SLOWDOWN_FACTOR=1,
             RUN_NUMBER=333,
             DATA_FILE="./frames.bin",
             FLX_INPUT=False,
             SSP_INPUT=True,
             CLOCK_SPEED_HZ=50000000,
             RUIDX=0,
             RAW_RECORDING_ENABLED=False,
             RAW_RECORDING_OUTPUT_DIR=".",
             FRONTEND_TYPE='wib',
             SYSTEM_TYPE='TPC',
             SOFTWARE_TPG_ENABLED=False,
             USE_FAKE_DATA_PRODUCERS=False,
             PARTITION="UNKNOWN",
             LATENCY_BUFFER_SIZE=499968):
    """Generate the json configuration for the readout and DF process"""

    cmd_data = {}

    required_eps = {f'{PARTITION}.timesync_{RUIDX}'}
    if not required_eps.issubset([nw.name for nw in NW_SPECS]):
        raise RuntimeError(
            f"ERROR: not all the required endpoints ({', '.join(required_eps)}) found in list of endpoints {' '.join([nw.name for nw in NW_SPECS])}"
        )

    RATE_KHZ = CLOCK_SPEED_HZ / (25 * 12 * DATA_RATE_SLOWDOWN_FACTOR * 1000)

    MIN_LINK = RU_CONFIG[RUIDX]["start_channel"]
    MAX_LINK = MIN_LINK + RU_CONFIG[RUIDX]["channel_count"]
    # Define modules and queues
    queue_bare_specs = [
        app.QueueSpec(
            inst=f"data_requests_{idx}", kind='FollySPSCQueue', capacity=100)
        for idx in range(MIN_LINK, MAX_LINK)
    ] + [
        app.QueueSpec(inst="fragment_q", kind="FollyMPMCQueue", capacity=100)
    ]

    if not USE_FAKE_DATA_PRODUCERS:
        queue_bare_specs += [
            app.QueueSpec(inst=f"{FRONTEND_TYPE}_link_{idx}",
                          kind='FollySPSCQueue',
                          capacity=100000)
            for idx in range(MIN_LINK, MAX_LINK)
        ]
    if SOFTWARE_TPG_ENABLED:
        queue_bare_specs += [
            app.QueueSpec(inst=f"sw_tp_link_{idx}",
                          kind='FollySPSCQueue',
                          capacity=100000)
            for idx in range(MIN_LINK, MAX_LINK)
        ] + [
            app.QueueSpec(
                inst=f"tpset_queue", kind='FollyMPMCQueue', capacity=10000)
        ] + [
            app.QueueSpec(
                inst=f"tp_requests_{idx}", kind='FollySPSCQueue', capacity=100)
            for idx in range(MIN_LINK, MAX_LINK)
        ]

    if FRONTEND_TYPE == 'wib':
        queue_bare_specs += [
            app.QueueSpec(inst="errored_frames_q",
                          kind="FollyMPMCQueue",
                          capacity=10000)
        ]

    # Only needed to reproduce the same order as when using jsonnet
    queue_specs = app.QueueSpecs(sorted(queue_bare_specs,
                                        key=lambda x: x.inst))

    mod_specs = [
        mspec(f"fragment_sender", "FragmentSender", [
            app.QueueInfo(name="input_queue", inst="fragment_q", dir="input")
        ])
    ]

    if SOFTWARE_TPG_ENABLED:
        mod_specs += [
            mspec(f"request_receiver", "RequestReceiver", [
                app.QueueInfo(
                    name="output", inst=f"data_requests_{idx}", dir="output")
                for idx in range(MIN_LINK, MAX_LINK)
            ] + [
                app.QueueInfo(
                    name="output", inst=f"tp_requests_{idx}", dir="output")
                for idx in range(MIN_LINK, MAX_LINK)
            ])
        ] + [
            mspec(f"tp_datahandler_{idx}", "DataLinkHandler", [
                app.QueueInfo(
                    name="raw_input", inst=f"sw_tp_link_{idx}", dir="input"),
                app.QueueInfo(name="data_requests_0",
                              inst=f"tp_requests_{idx}",
                              dir="input"),
                app.QueueInfo(
                    name="fragment_queue", inst="fragment_q", dir="output")
            ]) for idx in range(MIN_LINK, MAX_LINK)
        ] + [
            mspec(f"tpset_publisher", "QueueToNetwork", [
                app.QueueInfo(name="input", inst=f"tpset_queue", dir="input")
            ])
        ]
    else:
        mod_specs += [
            mspec(f"request_receiver", "RequestReceiver", [
                app.QueueInfo(
                    name="output", inst=f"data_requests_{idx}", dir="output")
                for idx in range(MIN_LINK, MAX_LINK)
            ])
        ]

    if FRONTEND_TYPE == 'wib':
        mod_specs += [
            mspec("errored_frame_consumer", "ErroredFrameConsumer", [
                app.QueueInfo(
                    name="input_queue", inst="errored_frames_q", dir="input")
            ])
        ]

    # There are two flags to be checked so I think a for loop
    # is the closest way to the blocks that are being used here

    for idx in range(MIN_LINK, MAX_LINK):
        if USE_FAKE_DATA_PRODUCERS:
            mod_specs = mod_specs + [
                mspec(f"fakedataprod_{idx}", "FakeDataProd", [
                    app.QueueInfo(name="data_request_input_queue",
                                  inst=f"data_requests_{idx}",
                                  dir="input"),
                ])
            ]
        else:
            ls = [
                app.QueueInfo(name="raw_input",
                              inst=f"{FRONTEND_TYPE}_link_{idx}",
                              dir="input"),
                app.QueueInfo(name="data_requests_0",
                              inst=f"data_requests_{idx}",
                              dir="input"),
                app.QueueInfo(name="fragment_queue",
                              inst="fragment_q",
                              dir="output")
            ]
            if SOFTWARE_TPG_ENABLED:
                ls.extend([
                    app.QueueInfo(name="tp_out",
                                  inst=f"sw_tp_link_{idx}",
                                  dir="output"),
                    app.QueueInfo(name="tpset_out",
                                  inst=f"tpset_queue",
                                  dir="output")
                ])

            if FRONTEND_TYPE == 'wib':
                ls.extend([
                    app.QueueInfo(name="errored_frames",
                                  inst="errored_frames_q",
                                  dir="output")
                ])

            mod_specs += [mspec(f"datahandler_{idx}", "DataLinkHandler", ls)]

    if not USE_FAKE_DATA_PRODUCERS:
        if FLX_INPUT:
            mod_specs.append(
                mspec("flxcard_0", "FelixCardReader", [
                    app.QueueInfo(name=f"output_{idx}",
                                  inst=f"{FRONTEND_TYPE}_link_{idx}",
                                  dir="output")
                    for idx in range(
                        MIN_LINK, MIN_LINK +
                        min(5, RU_CONFIG[RUIDX]["channel_count"]))
                ]))
            if RU_CONFIG[RUIDX]["channel_count"] > 5:
                mod_specs.append(
                    mspec("flxcard_1", "FelixCardReader", [
                        app.QueueInfo(name=f"output_{idx}",
                                      inst=f"{FRONTEND_TYPE}_link_{idx}",
                                      dir="output")
                        for idx in range(MIN_LINK + 5, MAX_LINK)
                    ]))
        elif SSP_INPUT:
            mod_specs.append(
                mspec("ssp_0", "SSPCardReader", [
                    app.QueueInfo(name=f"output_{idx}",
                                  inst=f"{FRONTEND_TYPE}_link_{idx}",
                                  dir="output")
                    for idx in range(MIN_LINK, MAX_LINK)
                ]))

        else:
            fake_source = "fake_source"
            card_reader = "FakeCardReader"
            if FRONTEND_TYPE == 'pacman':
                fake_source = "pacman_source"
                card_reader = "PacmanCardReader"

            mod_specs.append(
                mspec(fake_source, card_reader, [
                    app.QueueInfo(name=f"output_{idx}",
                                  inst=f"{FRONTEND_TYPE}_link_{idx}",
                                  dir="output")
                    for idx in range(MIN_LINK, MAX_LINK)
                ]))

    cmd_data['init'] = app.Init(queues=queue_specs,
                                modules=mod_specs,
                                nwconnections=NW_SPECS)

    total_link_count = 0
    for ru in range(len(RU_CONFIG)):
        total_link_count += RU_CONFIG[ru]["channel_count"]

    conf_list = [
        (
            "fake_source",
            sec.Conf(
                link_confs=[
                    sec.LinkConfiguration(
                        geoid=sec.GeoID(system=SYSTEM_TYPE,
                                        region=RU_CONFIG[RUIDX]["region_id"],
                                        element=idx),
                        slowdown=DATA_RATE_SLOWDOWN_FACTOR,
                        queue_name=f"output_{idx}",
                        data_filename=DATA_FILE,
                        emu_frame_error_rate=0,
                    ) for idx in range(MIN_LINK, MAX_LINK)
                ],
                # input_limit=10485100, # default
                queue_timeout_ms=QUEUE_POP_WAIT_MS)),
        ("pacman_source",
         pcr.Conf(link_confs=[
             pcr.LinkConfiguration(geoid=pcr.GeoID(
                 system=SYSTEM_TYPE,
                 region=RU_CONFIG[RUIDX]["region_id"],
                 element=idx), ) for idx in range(MIN_LINK, MAX_LINK)
         ],
                  zmq_receiver_timeout=10000)),
        ("flxcard_0",
         flxcr.Conf(card_id=RU_CONFIG[RUIDX]["card_id"],
                    logical_unit=0,
                    dma_id=0,
                    chunk_trailer_size=32,
                    dma_block_size_kb=4,
                    dma_memory_size_gb=4,
                    numa_id=0,
                    num_links=min(5, RU_CONFIG[RUIDX]["channel_count"]))),
        ("flxcard_1",
         flxcr.Conf(card_id=RU_CONFIG[RUIDX]["card_id"],
                    logical_unit=1,
                    dma_id=0,
                    chunk_trailer_size=32,
                    dma_block_size_kb=4,
                    dma_memory_size_gb=4,
                    numa_id=0,
                    num_links=max(0, RU_CONFIG[RUIDX]["channel_count"] - 5))),
        ("ssp_0",
         flxcr.Conf(card_id=RU_CONFIG[RUIDX]["card_id"],
                    logical_unit=0,
                    dma_id=0,
                    chunk_trailer_size=32,
                    dma_block_size_kb=4,
                    dma_memory_size_gb=4,
                    numa_id=0,
                    num_links=RU_CONFIG[RUIDX]["channel_count"])),
    ] + [
        ("request_receiver",
         rrcv.ConfParams(map=[
             rrcv.geoidinst(region=RU_CONFIG[RUIDX]["region_id"],
                            element=idx,
                            system=SYSTEM_TYPE,
                            queueinstance=f"data_requests_{idx}")
             for idx in range(MIN_LINK, MAX_LINK)
         ] + [
             rrcv.geoidinst(region=RU_CONFIG[RUIDX]["region_id"],
                            element=idx + total_link_count,
                            system=SYSTEM_TYPE,
                            queueinstance=f"tp_requests_{idx}")
             for idx in range(MIN_LINK, MAX_LINK) if SOFTWARE_TPG_ENABLED
         ],
                         general_queue_timeout=QUEUE_POP_WAIT_MS,
                         connection_name=f"{PARTITION}.datareq_{RUIDX}"))
    ] + [
        (
            f"datahandler_{idx}",
            rconf.Conf(
                readoutmodelconf=rconf.ReadoutModelConf(
                    source_queue_timeout_ms=QUEUE_POP_WAIT_MS,
                    # fake_trigger_flag=0, # default
                    region_id=RU_CONFIG[RUIDX]["region_id"],
                    element_id=idx,
                    timesync_connection_name=f"{PARTITION}.timesync_{RUIDX}",
                    timesync_topic_name="Timesync",
                ),
                latencybufferconf=rconf.LatencyBufferConf(
                    latency_buffer_alignment_size=4096,
                    latency_buffer_size=LATENCY_BUFFER_SIZE,
                    region_id=RU_CONFIG[RUIDX]["region_id"],
                    element_id=idx,
                ),
                rawdataprocessorconf=rconf.RawDataProcessorConf(
                    region_id=RU_CONFIG[RUIDX]["region_id"],
                    element_id=idx,
                    enable_software_tpg=SOFTWARE_TPG_ENABLED,
                    emulator_mode=EMULATOR_MODE,
                    error_counter_threshold=100,
                    error_reset_freq=10000),
                requesthandlerconf=rconf.RequestHandlerConf(
                    latency_buffer_size=LATENCY_BUFFER_SIZE,
                    pop_limit_pct=0.8,
                    pop_size_pct=0.1,
                    region_id=RU_CONFIG[RUIDX]["region_id"],
                    element_id=idx,
                    output_file=path.join(RAW_RECORDING_OUTPUT_DIR,
                                          f"output_{RUIDX}_{idx}.out"),
                    stream_buffer_size=8388608,
                    enable_raw_recording=RAW_RECORDING_ENABLED,
                ))) for idx in range(MIN_LINK, MAX_LINK)
    ] + [
        (
            f"tp_datahandler_{idx}",
            rconf.Conf(
                readoutmodelconf=rconf.ReadoutModelConf(
                    source_queue_timeout_ms=QUEUE_POP_WAIT_MS,
                    # fake_trigger_flag=0, default
                    region_id=RU_CONFIG[RUIDX]["region_id"],
                    element_id=total_link_count + idx,
                ),
                latencybufferconf=rconf.LatencyBufferConf(
                    latency_buffer_size=LATENCY_BUFFER_SIZE,
                    region_id=RU_CONFIG[RUIDX]["region_id"],
                    element_id=total_link_count + idx,
                ),
                rawdataprocessorconf=rconf.RawDataProcessorConf(
                    region_id=RU_CONFIG[RUIDX]["region_id"],
                    element_id=total_link_count + idx,
                    enable_software_tpg=False,
                ),
                requesthandlerconf=rconf.RequestHandlerConf(
                    latency_buffer_size=LATENCY_BUFFER_SIZE,
                    pop_limit_pct=0.8,
                    pop_size_pct=0.1,
                    region_id=RU_CONFIG[RUIDX]["region_id"],
                    element_id=total_link_count + idx,
                    # output_file = f"output_{idx + MIN_LINK}.out",
                    stream_buffer_size=100
                    if FRONTEND_TYPE == 'pacman' else 8388608,
                    enable_raw_recording=False,
                ))) for idx in range(MIN_LINK, MAX_LINK)
    ]

    if SOFTWARE_TPG_ENABLED:

        conf_list.extend([(f"tpset_publisher",
                           qton.Conf(msg_type="dunedaq::trigger::TPSet",
                                     msg_module_name="TPSetNQ",
                                     sender_config=nos.Conf(
                                         name=f"{PARTITION}.tpsets_{RUIDX}",
                                         topic="TPSets",
                                         stype="msgpack")))])

    if USE_FAKE_DATA_PRODUCERS:
        conf_list.extend([
            (f"fakedataprod_{idx}",
             fdp.ConfParams(
                 system_type=SYSTEM_TYPE,
                 apa_number=RU_CONFIG[RUIDX]["region_id"],
                 link_number=idx,
                 time_tick_diff=25,
                 frame_size=464,
                 response_delay=0,
                 timesync_connection_name=f"{PARTITION}.timesync_{RUIDX}",
                 timesync_topic_name="Timesync",
                 fragment_type="FakeData"))
            for idx in range(MIN_LINK, MAX_LINK)
        ])

    conf_list.extend([("fragment_sender", None)])

    cmd_data['conf'] = acmd(conf_list)

    startpars = rccmd.StartParams(run=RUN_NUMBER)
    cmd_data['start'] = acmd([("datahandler_.*", startpars),
                              ("fake_source", startpars),
                              ("pacman_source", startpars),
                              ("flxcard.*", startpars),
                              ("request_receiver", startpars),
                              ("ssp.*", startpars),
                              ("ntoq_trigdec", startpars),
                              (f"tp_datahandler_.*", startpars),
                              (f"tpset_publisher", startpars),
                              ("fakedataprod_.*", startpars),
                              ("fragment_sender", startpars),
                              ("errored_frame_consumer", startpars)])

    cmd_data['stop'] = acmd([("request_receiver", None), ("flxcard.*", None),
                             ("ssp.*", None), ("fake_source", None),
                             ("pacman_source", None), ("datahandler_.*", None),
                             (f"tp_datahandler_.*", None),
                             (f"tpset_publisher", None),
                             ("fakedataprod_.*", None),
                             ("fragment_sender", None),
                             ("errored_frame_consumer", None)])

    cmd_data['pause'] = acmd([("", None)])

    cmd_data['resume'] = acmd([("", None)])

    cmd_data['scrap'] = acmd([("", None)])

    cmd_data['record'] = acmd([("", None)])

    return cmd_data