コード例 #1
0
def generate(TRIGGER_RATE_HZ: float = 1.0,
             OUTPUT_PATH: str = ".",
             TOKEN_COUNT: int = 10,
             CLOCK_SPEED_HZ: int = 50000000,
             FORGET_DECISION_PROB: float = 0.0,
             HOLD_DECISION_PROB: float = 0.0,
             HOLD_MAX_SIZE: int = 0,
             HOLD_MIN_SIZE: int = 0,
             HOLD_MIN_MS: int = 0,
             RELEASE_RANDOMLY_PROB: float = 0.0):
    """
    { item_description }
    """
    cmd_data = {}

    # Derived parameters
    TRIGGER_INTERVAL_NS = math.floor((1e9 / TRIGGER_RATE_HZ))

    # Define modules and queues
    queue_bare_specs = [
        app.QueueSpec(inst="hsievent_q", kind='FollyMPMCQueue', capacity=1000),
        app.QueueSpec(inst="time_sync_q", kind='FollySPSCQueue',
                      capacity=1000),
        app.QueueSpec(inst="token_q", kind='FollySPSCQueue', capacity=2000),
        app.QueueSpec(inst="trigger_decision_q",
                      kind='FollySPSCQueue',
                      capacity=2000),
        app.QueueSpec(inst="trigger_candidate_q",
                      kind='FollyMPMCQueue',
                      capacity=2000),  #No MPSC Queue?
    ]

    # Only needed to reproduce the same order as when using jsonnet
    queue_specs = app.QueueSpecs(sorted(queue_bare_specs,
                                        key=lambda x: x.inst))

    mod_specs = [
        mspec("fdf", "FakeDataFlow", [
            app.QueueInfo(name="trigger_decision_source",
                          inst="trigger_decision_q",
                          dir="input"),
            app.QueueInfo(
                name="trigger_complete_sink", inst="token_q", dir="output"),
        ]),
        mspec("mlt", "ModuleLevelTrigger", [
            app.QueueInfo(name="token_source", inst="token_q", dir="input"),
            app.QueueInfo(name="trigger_decision_sink",
                          inst="trigger_decision_q",
                          dir="output"),
            app.QueueInfo(name="trigger_candidate_source",
                          inst="trigger_candidate_q",
                          dir="output"),
        ]),
        mspec("ftsdg", "FakeTimeStampedDataGenerator", [
            app.QueueInfo(
                name="hsievent_sink", inst="hsievent_q", dir="output"),
        ]),
        mspec("ttcm", "TimingTriggerCandidateMaker", [
            app.QueueInfo(name="input", inst="hsievent_q", dir="input"),
            app.QueueInfo(
                name="output", inst="trigger_candidate_q", dir="output"),
        ]),
    ]

    cmd_data['init'] = app.Init(queues=queue_specs, modules=mod_specs)

    cmd_data['conf'] = acmd([
        ("fdf",
         fdf.ConfParams(hold_max_size=HOLD_MAX_SIZE,
                        hold_min_size=HOLD_MIN_SIZE,
                        hold_min_ms=HOLD_MIN_MS,
                        release_randomly_prob=RELEASE_RANDOMLY_PROB,
                        forget_decision_prob=FORGET_DECISION_PROB,
                        hold_decision_prob=HOLD_DECISION_PROB)),
        ("mlt",
         mlt.ConfParams(links=[idx for idx in range(3)],
                        initial_token_count=TOKEN_COUNT)),
        ("ftsdg",
         ftsdg.Conf(sleep_time=TRIGGER_INTERVAL_NS, frequency=CLOCK_SPEED_HZ)),
        ("ttcm", ttcm.Conf()),
    ])

    startpars = rccmd.StartParams(run=1, disable_data_storage=False)
    cmd_data['start'] = acmd([
        ("fdf", startpars),
        ("mlt", startpars),
        ("ftsdg", startpars),
        ("ttcm", startpars),
    ])

    # We issue stop commands in the order "upstream to downstream" so
    # that each module can drain its input queue at stop, and be
    # guaranteed to get all inputs (at least when everything lives in
    # the same module)
    cmd_data['stop'] = acmd([
        ("ftsdg", None),
        ("ttcm", None),
        ("mlt", None),
        ("fdf", None),
    ])

    cmd_data['pause'] = acmd([("", None)])

    resumepars = rccmd.ResumeParams(trigger_interval_ticks=50000000)
    cmd_data['resume'] = acmd([("mlt", resumepars)])

    cmd_data['scrap'] = acmd([("", None)])

    return cmd_data
コード例 #2
0
def generate(NW_SPECS: list,
             SOFTWARE_TPG_ENABLED: bool = False,
             RU_CONFIG: list = [],
             ACTIVITY_PLUGIN: str = 'TriggerActivityMakerPrescalePlugin',
             ACTIVITY_CONFIG: dict = dict(prescale=10000),
             CANDIDATE_PLUGIN: str = 'TriggerCandidateMakerPrescalePlugin',
             CANDIDATE_CONFIG: int = dict(prescale=10),
             TOKEN_COUNT: int = 10,
             DF_COUNT: int = 1,
             SYSTEM_TYPE='wib',
             TTCM_S1: int = 1,
             TTCM_S2: int = 2,
             TRIGGER_WINDOW_BEFORE_TICKS: int = 1000,
             TRIGGER_WINDOW_AFTER_TICKS: int = 1000,
             PARTITION="UNKNOWN"):
    """
    { item_description }
    """
    cmd_data = {}

    required_eps = {PARTITION + '.hsievent'}
    if not required_eps.issubset([nw.name for nw in NW_SPECS]):
        raise RuntimeError(
            f"ERROR: not all the required endpoints ({', '.join(required_eps)}) found in list of endpoints {' '.join([nw.name for nw in NW_SPECS])}"
        )

    # Define modules and queues
    queue_bare_specs = [
        app.QueueSpec(inst='trigger_candidate_q',
                      kind='FollyMPMCQueue',
                      capacity=1000),
        app.QueueSpec(inst='trigger_decision_q',
                      kind='FollySPSCQueue',
                      capacity=2)
    ]

    if SOFTWARE_TPG_ENABLED:
        queue_bare_specs.extend([
            app.QueueSpec(inst=f"fragment_q",
                          kind='FollyMPMCQueue',
                          capacity=1000),
            app.QueueSpec(inst=f'taset_q',
                          kind='FollyMPMCQueue',
                          capacity=1000),
        ])
        for ru in range(len(RU_CONFIG)):
            queue_bare_specs.extend([
                app.QueueSpec(inst=f"tpsets_from_netq_{ru}",
                              kind='FollySPSCQueue',
                              capacity=1000),
                app.QueueSpec(inst=f'zipped_tpset_q_{ru}',
                              kind='FollySPSCQueue',
                              capacity=1000),
            ])
            for idx in range(RU_CONFIG[ru]["channel_count"]):
                queue_bare_specs.extend([
                    app.QueueSpec(inst=f"tpset_q_for_buf{ru}_{idx}",
                                  kind='FollySPSCQueue',
                                  capacity=1000),
                    app.QueueSpec(inst=f"data_request_q{ru}_{idx}",
                                  kind='FollySPSCQueue',
                                  capacity=1000),
                ])

    # Only needed to reproduce the same order as when using jsonnet
    queue_specs = app.QueueSpecs(sorted(queue_bare_specs,
                                        key=lambda x: x.inst))

    mod_specs = []

    if SOFTWARE_TPG_ENABLED:
        mod_specs.extend([
            mspec(f"request_receiver", "RequestReceiver", [
                app.QueueInfo(name="output",
                              inst=f"data_request_q{ru}_{idy}",
                              dir="output") for ru in range(len(RU_CONFIG))
                for idy in range(RU_CONFIG[ru]["channel_count"])
            ])
        ] + [
            mspec(f"tpset_receiver", "TPSetReceiver", [
                app.QueueInfo(name="output",
                              inst=f"tpset_q_for_buf{ru}_{idy}",
                              dir="output") for ru in range(len(RU_CONFIG))
                for idy in range(RU_CONFIG[ru]["channel_count"])
            ])
        ] + [
            mspec(f"fragment_sender", "FragmentSender", [
                app.QueueInfo(
                    name="input_queue", inst=f"fragment_q", dir="input")
            ]),
            mspec(
                f'tcm',
                'TriggerCandidateMaker',
                [  # TASet -> TC
                    app.QueueInfo(name='input', inst=f'taset_q', dir='input'),
                    app.QueueInfo(name='output',
                                  inst=f'trigger_candidate_q',
                                  dir='output'),
                ])
        ])
        for ru in range(len(RU_CONFIG)):
            mod_specs.extend([
                mspec(f"tpset_subscriber_{ru}", "NetworkToQueue", [
                    app.QueueInfo(name="output",
                                  inst=f"tpsets_from_netq_{ru}",
                                  dir="output")
                ]),
                mspec(
                    f"zip_{ru}",
                    "TPZipper",
                    [
                        app.QueueInfo(name="input",
                                      inst=f"tpsets_from_netq_{ru}",
                                      dir="input"),
                        app.QueueInfo(
                            name="output",
                            inst=f"zipped_tpset_q_{ru}",
                            dir="output"
                        ),  #FIXME need to fanout this zipped_tpset_q if using multiple algorithms
                    ]),

                ### Algorithm(s)
                mspec(
                    f'tam_{ru}',
                    'TriggerActivityMaker',
                    [  # TPSet -> TASet
                        app.QueueInfo(name='input',
                                      inst=f'zipped_tpset_q_{ru}',
                                      dir='input'),
                        app.QueueInfo(
                            name='output', inst=f'taset_q', dir='output'),
                    ]),
            ])
            for idy in range(RU_CONFIG[ru]["channel_count"]):
                mod_specs.extend([
                    mspec(f"buf{ru}_{idy}", "TPSetBufferCreator", [
                        app.QueueInfo(name="tpset_source",
                                      inst=f"tpset_q_for_buf{ru}_{idy}",
                                      dir="input"),
                        app.QueueInfo(name="data_request_source",
                                      inst=f"data_request_q{ru}_{idy}",
                                      dir="input"),
                        app.QueueInfo(name="fragment_sink",
                                      inst=f"fragment_q",
                                      dir="output"),
                    ])
                ])

    mod_specs += ([

        ### Timing TCs
        mspec("ttcm", "TimingTriggerCandidateMaker", [
            app.QueueInfo(
                name="output", inst="trigger_candidate_q", dir="output"),
        ]),

        ### Module level trigger
        mspec("mlt", "ModuleLevelTrigger", [
            app.QueueInfo(name="trigger_candidate_source",
                          inst="trigger_candidate_q",
                          dir="input"),
            app.QueueInfo(name="trigger_decision_sink",
                          inst="trigger_decision_q",
                          dir="output"),
        ]),

        ### DFO
        mspec("dfo", "DataFlowOrchestrator", [
            app.QueueInfo(name="trigger_decision_queue",
                          inst="trigger_decision_q",
                          dir="input"),
        ]),
    ])

    cmd_data['init'] = app.Init(queues=queue_specs,
                                modules=mod_specs,
                                nwconnections=NW_SPECS)

    # Generate schema for the maker plugins on the fly in the temptypes module
    make_moo_record(ACTIVITY_CONFIG, 'ActivityConf', 'temptypes')
    make_moo_record(CANDIDATE_CONFIG, 'CandidateConf', 'temptypes')
    import temptypes

    tp_confs = []

    if SOFTWARE_TPG_ENABLED:
        tp_confs.extend([
            ("request_receiver",
             rrcv.ConfParams(map=[
                 rrcv.geoidinst(region=RU_CONFIG[ru]["region_id"],
                                element=idy + RU_CONFIG[ru]["start_channel"],
                                system="DataSelection",
                                queueinstance=f"data_request_q{ru}_{idy}")
                 for ru in range(len(RU_CONFIG))
                 for idy in range(RU_CONFIG[ru]["channel_count"])
             ],
                             general_queue_timeout=100,
                             connection_name=f"{PARTITION}.ds_tp_datareq_0")),
            ("tpset_receiver",
             tpsrcv.ConfParams(map=[
                 tpsrcv.geoidinst(region=RU_CONFIG[ru]["region_id"],
                                  element=idy + RU_CONFIG[ru]["start_channel"],
                                  system=SYSTEM_TYPE,
                                  queueinstance=f"tpset_q_for_buf{ru}_{idy}")
                 for ru in range(len(RU_CONFIG))
                 for idy in range(RU_CONFIG[ru]["channel_count"])
             ],
                               general_queue_timeout=100,
                               topic=f"TPSets")),
            (f"fragment_sender", None),
            (f'tcm',
             tcm.Conf(candidate_maker=CANDIDATE_PLUGIN,
                      candidate_maker_config=temptypes.CandidateConf(
                          **CANDIDATE_CONFIG))),
        ])
        for idx in range(len(RU_CONFIG)):
            tp_confs.extend([
                (f"tpset_subscriber_{idx}",
                 ntoq.Conf(msg_type="dunedaq::trigger::TPSet",
                           msg_module_name="TPSetNQ",
                           receiver_config=nor.Conf(
                               name=f'{PARTITION}.tpsets_{idx}',
                               subscriptions=["TPSets"]))),
                (
                    f"zip_{idx}",
                    tzip.ConfParams(
                        cardinality=RU_CONFIG[idx]["channel_count"],
                        max_latency_ms=1000,
                        region_id=0,  # Fake placeholder
                        element_id=0  # Fake placeholder
                    )),

                ### Algorithms
                (
                    f'tam_{idx}',
                    tam.Conf(
                        activity_maker=ACTIVITY_PLUGIN,
                        geoid_region=0,  # Fake placeholder
                        geoid_element=0,  # Fake placeholder
                        window_time=
                        10000,  # should match whatever makes TPSets, in principle
                        buffer_time=625000,  # 10ms in 62.5 MHz ticks
                        activity_maker_config=temptypes.ActivityConf(
                            **ACTIVITY_CONFIG))),
            ])
            for idy in range(RU_CONFIG[idx]["channel_count"]):
                tp_confs.extend([
                    (f"buf{idx}_{idy}",
                     buf.Conf(tpset_buffer_size=10000,
                              region=RU_CONFIG[idx]["region_id"],
                              element=idy + RU_CONFIG[idx]["start_channel"]))
                ])

    total_link_count = 0
    for ru in range(len(RU_CONFIG)):
        total_link_count += RU_CONFIG[ru]["channel_count"]

    cmd_data['conf'] = acmd(tp_confs + [

        ### Timing TCs
        ("ttcm",
         ttcm.Conf(
             s1=ttcm.map_t(signal_type=TTCM_S1,
                           time_before=TRIGGER_WINDOW_BEFORE_TICKS,
                           time_after=TRIGGER_WINDOW_AFTER_TICKS),
             s2=ttcm.map_t(signal_type=TTCM_S2,
                           time_before=TRIGGER_WINDOW_BEFORE_TICKS,
                           time_after=TRIGGER_WINDOW_AFTER_TICKS),
             hsievent_connection_name=PARTITION + ".hsievent",
         )),

        # Module level trigger
        (
            "mlt",
            mlt.ConfParams(
                # This line requests the raw data from upstream DAQ _and_ the raw TPs from upstream DAQ
                links=[
                    mlt.GeoID(system=SYSTEM_TYPE,
                              region=RU_CONFIG[ru]["region_id"],
                              element=RU_CONFIG[ru]["start_channel"] + idx)
                    for ru in range(len(RU_CONFIG))
                    for idx in range(RU_CONFIG[ru]["channel_count"])
                ] + ([
                    mlt.GeoID(system="DataSelection",
                              region=RU_CONFIG[ru]["region_id"],
                              element=RU_CONFIG[ru]["start_channel"] + idx)
                    for ru in range(len(RU_CONFIG))
                    for idx in range(RU_CONFIG[ru]["channel_count"])
                ] if SOFTWARE_TPG_ENABLED else []) + ([
                    mlt.GeoID(system=SYSTEM_TYPE,
                              region=RU_CONFIG[ru]["region_id"],
                              element=RU_CONFIG[ru]["start_channel"] + idx +
                              total_link_count) for ru in range(len(RU_CONFIG))
                    for idx in range(RU_CONFIG[ru]["channel_count"])
                ] if SOFTWARE_TPG_ENABLED else []), )),
        ("dfo",
         dfo.ConfParams(
             token_connection=PARTITION + ".triginh",
             dataflow_applications=[
                 dfo.app_config(
                     decision_connection=f"{PARTITION}.trigdec_{dfidx}",
                     capacity=TOKEN_COUNT) for dfidx in range(DF_COUNT)
             ])),
    ])

    # We start modules in "downstream-to-upstream" order, so that each
    # module is ready before its input starts sending data. The stop
    # order is the reverse (upstream-to-downstream), so each module
    # can process all of its input then stop, ensuring all data gets
    # processed
    start_order = ["buf.*", "dfo", "mlt", "ttcm", "ntoq_token"]

    if SOFTWARE_TPG_ENABLED:
        start_order += [
            "fragment_sender", "tcm", "tam_.*", "zip_.*",
            "tpset_subscriber_.*", "tpset_receiver", "request_receiver"
        ]

    stop_order = start_order[::-1]

    startpars = rccmd.StartParams(run=1)
    cmd_data['start'] = acmd([(m, startpars) for m in start_order])
    cmd_data['stop'] = acmd([(m, None) for m in stop_order])

    cmd_data['pause'] = acmd([("mlt", None)])

    resumepars = rccmd.ResumeParams(trigger_interval_ticks=50000000)
    cmd_data['resume'] = acmd([("mlt", resumepars)])

    cmd_data['scrap'] = acmd([("dfo", None)])

    cmd_data['record'] = acmd([("", None)])

    return cmd_data
コード例 #3
0
ファイル: conf_utils.py プロジェクト: DUNE-DAQ/appfwk
def make_app_command_data(system, app, verbose=False):
    """Given an App instance, create the 'command data' suitable for
    feeding to nanorc. The needed queues are inferred from from
    connections between modules, as are the start and stop order of the
    modules

    TODO: This should probably be split up into separate stages of
    inferring/creating the queues (which can be part of validation)
    and actually making the command data objects for nanorc.

    """

    if verbose:
        console.log(f"Making app command data for {app.name}")

    modules = app.modulegraph.modules

    module_deps = make_module_deps(modules)
    if verbose:
        console.log(f"inter-module dependencies are: {module_deps}")

    stop_order = list(nx.algorithms.dag.topological_sort(module_deps))
    start_order = stop_order[::-1]

    if verbose:
        console.log(f"Inferred module start order is {start_order}")
        console.log(f"Inferred module stop order is {stop_order}")

    command_data = {}

    queue_specs = []

    app_qinfos = defaultdict(list)

    # Infer the queues we need based on the connections between modules

    # Terminology: an "endpoint" is "module.name"
    for mod in modules:
        name = mod.name
        for from_name, downstream_connection in mod.connections.items():
            # The name might be prefixed with a "!" to indicate that it doesn't participate in dependencies. Remove that here because "!" is illegal in actual queue names
            from_name = from_name.replace("!", "")
            from_endpoint = ".".join([name, from_name])
            to_endpoint = downstream_connection.to
            if verbose:
                console.log(
                    f"Making connection from {from_endpoint} to {to_endpoint}")
            if to_endpoint is None:
                continue
            to_mod, to_name = to_endpoint.split(".")
            queue_inst = f"{from_endpoint}_to_{to_endpoint}".replace(".", "")
            # Is there already a queue connecting either endpoint? If so, we reuse it

            # TODO: This is a bit complicated. Might be nicer to find
            # the list of necessary queues in a first step, and then
            # actually make the QueueSpec/QueueInfo objects
            found_from = False
            found_to = False
            for k, v in app_qinfos.items():
                for qi in v:
                    test_endpoint = ".".join([k, qi.name])
                    if test_endpoint == from_endpoint:
                        found_from = True
                        queue_inst = qi.inst
                    if test_endpoint == to_endpoint:
                        found_to = True
                        queue_inst = qi.inst

            if not (found_from or found_to):
                queue_inst = queue_inst if downstream_connection.queue_name is None else downstream_connection.queue_name
                if verbose:
                    console.log(
                        f"downstream_connection is {downstream_connection}, its queue_name is {downstream_connection.queue_name}"
                    )
                    console.log(
                        f"Creating {downstream_connection.queue_kind}({downstream_connection.queue_capacity}) queue with name {queue_inst} connecting {from_endpoint} to {to_endpoint}"
                    )
                queue_specs.append(
                    appfwk.QueueSpec(
                        inst=queue_inst,
                        kind=downstream_connection.queue_kind,
                        capacity=downstream_connection.queue_capacity))

            if not found_from:
                if verbose:
                    console.log(
                        f"Adding output queue to module {name}: inst={queue_inst}, name={from_name}"
                    )
                app_qinfos[name].append(
                    appfwk.QueueInfo(name=from_name,
                                     inst=queue_inst,
                                     dir="output"))
            if not found_to:
                if verbose:
                    console.log(
                        f"Adding input queue to module {to_mod}: inst={queue_inst}, name={to_name}"
                    )
                app_qinfos[to_mod].append(
                    appfwk.QueueInfo(name=to_name,
                                     inst=queue_inst,
                                     dir="input"))

    if verbose:
        console.log(
            f"Creating mod_specs for {[ (mod.name, mod.plugin) for mod in modules ]}"
        )
    mod_specs = [
        mspec(mod.name, mod.plugin, app_qinfos[mod.name]) for mod in modules
    ]

    # Fill in the "standard" command entries in the command_data structure

    command_data['init'] = appfwk.Init(queues=queue_specs,
                                       modules=mod_specs,
                                       nwconnections=system.network_endpoints)

    # TODO: Conf ordering
    command_data['conf'] = acmd([(mod.name, mod.conf) for mod in modules])

    startpars = rccmd.StartParams(run=1, disable_data_storage=False)
    resumepars = rccmd.ResumeParams()

    add_one_command_data(command_data, "start", startpars, app, start_order)
    add_one_command_data(command_data, "stop", None, app, stop_order)
    add_one_command_data(command_data, "scrap", None, app, stop_order)
    add_one_command_data(command_data, "resume", resumepars, app, start_order)
    add_one_command_data(command_data, "pause", None, app, stop_order)

    # TODO: handle modules' `extra_commands`, including "record"

    return command_data
コード例 #4
0
ファイル: hsi_gen.py プロジェクト: DUNE-DAQ/minidaqapp
def generate(NW_SPECS: list,
             RUN_NUMBER=333,
             CLOCK_SPEED_HZ: int = 50000000,
             TRIGGER_RATE_HZ: int = 1,
             CONTROL_HSI_HARDWARE=False,
             READOUT_PERIOD_US: int = 1e3,
             HSI_ENDPOINT_ADDRESS=1,
             HSI_ENDPOINT_PARTITION=0,
             HSI_RE_MASK=0x20000,
             HSI_FE_MASK=0,
             HSI_INV_MASK=0,
             HSI_SOURCE=1,
             CONNECTIONS_FILE="${TIMING_SHARE}/config/etc/connections.xml",
             HSI_DEVICE_NAME="BOREAS_TLU",
             UHAL_LOG_LEVEL="notice",
             PARTITION="UNKNOWN"):
    """
    { item_description }
    """
    cmd_data = {}

    required_eps = {PARTITION + '.hsievent'}
    if CONTROL_HSI_HARDWARE:
        required_eps.add(PARTITION + '.hsicmds')

    if not required_eps.issubset([nw.name for nw in NW_SPECS]):
        raise RuntimeError(
            f"ERROR: not all the required endpoints ({', '.join(required_eps)}) found in list of endpoints {' '.join([nw.name for nw in NW_SPECS])}"
        )

    # Define modules and queues
    queue_bare_specs = []

    if CONTROL_HSI_HARDWARE:
        queue_bare_specs.extend([
            app.QueueSpec(inst="hw_cmds_q_to_net",
                          kind='FollySPSCQueue',
                          capacity=100)
        ])

    # Only needed to reproduce the same order as when using jsonnet
    queue_specs = app.QueueSpecs(sorted(queue_bare_specs,
                                        key=lambda x: x.inst))

    hsi_controller_init_data = hsic.InitParams(
        qinfos=app.QueueInfos([
            app.QueueInfo(name="hardware_commands_out",
                          inst="hw_cmds_q_to_net",
                          dir="output")
        ]),
        device=HSI_DEVICE_NAME,
    )
    mod_specs = [
        mspec("hsir", "HSIReadout", []),
    ]

    if CONTROL_HSI_HARDWARE:
        hsi_controller_init_data = hsic.InitParams(
            qinfos=app.QueueInfos([
                app.QueueInfo(name="hardware_commands_out",
                              inst="hw_cmds_q_to_net",
                              dir="output")
            ]),
            device=HSI_DEVICE_NAME,
        )
        mod_specs.extend([
            mspec("qton_hw_cmds", "QueueToNetwork", [
                app.QueueInfo(
                    name="input", inst="hw_cmds_q_to_net", dir="input")
            ]),
            app.ModSpec(inst="hsic",
                        plugin="HSIController",
                        data=hsi_controller_init_data)
        ])

    cmd_data['init'] = app.Init(queues=queue_specs,
                                modules=mod_specs,
                                nwconnections=NW_SPECS)

    conf_cmds = [
        ("hsir",
         hsi.ConfParams(
             connections_file=CONNECTIONS_FILE,
             readout_period=READOUT_PERIOD_US,
             hsi_device_name=HSI_DEVICE_NAME,
             uhal_log_level=UHAL_LOG_LEVEL,
             hsievent_connection_name=f"{PARTITION}.hsievent",
         )),
    ]

    trigger_interval_ticks = 0
    if TRIGGER_RATE_HZ > 0:
        trigger_interval_ticks = math.floor(
            (1 / TRIGGER_RATE_HZ) * CLOCK_SPEED_HZ)
    elif CONTROL_HSI_HARDWARE:
        console.log(
            'WARNING! Emulated trigger rate of 0 will not disable signal emulation in real HSI hardware! To disable emulated HSI triggers, use  option: "--hsi-source 0" or mask all signal bits',
            style="bold red")

    if CONTROL_HSI_HARDWARE:
        conf_cmds.extend([
            ("qton_hw_cmds",
             qton.Conf(msg_type="dunedaq::timinglibs::timingcmd::TimingHwCmd",
                       msg_module_name="TimingHwCmdNQ",
                       sender_config=nos.Conf(name=PARTITION + ".hsicmds",
                                              stype="msgpack"))),
            ("hsic",
             hsic.ConfParams(
                 clock_frequency=CLOCK_SPEED_HZ,
                 trigger_interval_ticks=trigger_interval_ticks,
                 address=HSI_ENDPOINT_ADDRESS,
                 partition=HSI_ENDPOINT_PARTITION,
                 rising_edge_mask=HSI_RE_MASK,
                 falling_edge_mask=HSI_FE_MASK,
                 invert_edge_mask=HSI_INV_MASK,
                 data_source=HSI_SOURCE,
             )),
        ])
    cmd_data['conf'] = acmd(conf_cmds)

    startpars = rccmd.StartParams(
        run=RUN_NUMBER, trigger_interval_ticks=trigger_interval_ticks)
    resumepars = rccmd.ResumeParams(
        trigger_interval_ticks=trigger_interval_ticks)

    cmd_data['start'] = acmd([("hsi.*", startpars), ("qton_.*", startpars)])

    cmd_data['stop'] = acmd([("hsi.*", None), ("qton.*", None)])

    cmd_data['pause'] = acmd([("", None)])

    if CONTROL_HSI_HARDWARE:
        cmd_data['resume'] = acmd([("hsic", resumepars)])
    else:
        cmd_data['resume'] = acmd([("", None)])

    cmd_data['scrap'] = acmd([("", None)])

    cmd_data['record'] = acmd([("", None)])

    return cmd_data
コード例 #5
0
def generate(TRIGGER_RATE_HZ: float = 1.0,
             OUTPUT_PATH: str = ".",
             TOKEN_COUNT: int = 10,
             CLOCK_SPEED_HZ: int = 50000000,
             FORGET_DECISION_PROB: float = 0.0,
             HOLD_DECISION_PROB: float = 0.0,
             HOLD_MAX_SIZE: int = 0,
             HOLD_MIN_SIZE: int = 0,
             HOLD_MIN_MS: int = 0,
             RELEASE_RANDOMLY_PROB: float = 0.0):
    """
    { item_description }
    """
    cmd_data = {}

    # Derived parameters
    TRG_INTERVAL_TICKS = math.floor((1 / TRIGGER_RATE_HZ) * CLOCK_SPEED_HZ)

    # Define modules and queues
    queue_bare_specs = [
        app.QueueSpec(inst="time_sync_q", kind='FollySPSCQueue', capacity=100),
        app.QueueSpec(inst="token_q", kind='FollySPSCQueue', capacity=20),
        app.QueueSpec(inst="trigger_decision_q",
                      kind='FollySPSCQueue',
                      capacity=20),
        app.QueueSpec(inst="trigger_candidate_q",
                      kind='FollyMPMCQueue',
                      capacity=20),  #No MPSC Queue?
    ]

    # Only needed to reproduce the same order as when using jsonnet
    queue_specs = app.QueueSpecs(sorted(queue_bare_specs,
                                        key=lambda x: x.inst))

    mod_specs = [
        mspec("fdf", "FakeDataFlow", [
            app.QueueInfo(name="trigger_decision_source",
                          inst="trigger_decision_q",
                          dir="input"),
            app.QueueInfo(
                name="trigger_complete_sink", inst="token_q", dir="output"),
        ]),
        mspec("mlt", "ModuleLevelTrigger", [
            app.QueueInfo(name="token_source", inst="token_q", dir="input"),
            app.QueueInfo(name="trigger_decision_sink",
                          inst="trigger_decision_q",
                          dir="output"),
            app.QueueInfo(name="trigger_candidate_source",
                          inst="trigger_candidate_q",
                          dir="output"),
        ]),
        mspec("rtcm_poisson", "RandomTriggerCandidateMaker", [
            app.QueueInfo(
                name="time_sync_source", inst="time_sync_q", dir="input"),
            app.QueueInfo(name="trigger_candidate_sink",
                          inst="trigger_candidate_q",
                          dir="output"),
        ]),
        mspec("rtcm_uniform", "RandomTriggerCandidateMaker", [
            app.QueueInfo(
                name="time_sync_source", inst="time_sync_q", dir="input"),
            app.QueueInfo(name="trigger_candidate_sink",
                          inst="trigger_candidate_q",
                          dir="output"),
        ]),
    ]

    cmd_data['init'] = app.Init(queues=queue_specs, modules=mod_specs)

    cmd_data['conf'] = acmd([
        ("fdf",
         fdf.ConfParams(hold_max_size=HOLD_MAX_SIZE,
                        hold_min_size=HOLD_MIN_SIZE,
                        hold_min_ms=HOLD_MIN_MS,
                        release_randomly_prob=RELEASE_RANDOMLY_PROB,
                        forget_decision_prob=FORGET_DECISION_PROB,
                        hold_decision_prob=HOLD_DECISION_PROB)),
        ("mlt",
         mlt.ConfParams(links=[idx for idx in range(3)],
                        initial_token_count=TOKEN_COUNT)),
        ("rtcm_poisson",
         rtcm.ConfParams(trigger_interval_ticks=TRG_INTERVAL_TICKS,
                         clock_frequency_hz=CLOCK_SPEED_HZ,
                         timestamp_method="kSystemClock",
                         time_distribution="kPoisson")),
        ("rtcm_uniform",
         rtcm.ConfParams(trigger_interval_ticks=TRG_INTERVAL_TICKS,
                         clock_frequency_hz=CLOCK_SPEED_HZ,
                         timestamp_method="kSystemClock",
                         time_distribution="kUniform")),
    ])

    startpars = rccmd.StartParams(run=1, disable_data_storage=False)
    cmd_data['start'] = acmd([
        ("fdf", startpars),
        ("mlt", startpars),
        ("rtcm_poisson", startpars),
        ("rtcm_uniform", startpars),
    ])

    cmd_data['stop'] = acmd([
        ("fdf", None),
        ("mlt", None),
        ("rtcm_poisson", None),
        ("rtcm_uniform", None),
    ])

    cmd_data['pause'] = acmd([("", None)])

    resumepars = rccmd.ResumeParams(trigger_interval_ticks=50000000)
    cmd_data['resume'] = acmd([("mlt", resumepars)])

    cmd_data['scrap'] = acmd([("", None)])

    return cmd_data
コード例 #6
0
ファイル: fake_hsi_gen.py プロジェクト: DUNE-DAQ/minidaqapp
def generate(NW_SPECS: list,
             RUN_NUMBER=333,
             CLOCK_SPEED_HZ: int = 50000000,
             DATA_RATE_SLOWDOWN_FACTOR: int = 1,
             TRIGGER_RATE_HZ: int = 1,
             HSI_DEVICE_ID: int = 0,
             MEAN_SIGNAL_MULTIPLICITY: int = 0,
             SIGNAL_EMULATION_MODE: int = 0,
             ENABLED_SIGNALS: int = 0b00000001,
             PARTITION="UNKNOWN"):
    """
    { item_description }
    """
    cmd_data = {}

    required_eps = {PARTITION + '.hsievent'}
    if not required_eps.issubset([nw.name for nw in NW_SPECS]):
        raise RuntimeError(
            f"ERROR: not all the required endpoints ({', '.join(required_eps)}) found in list of endpoints {' '.join([nw.name for nw in NW_SPECS])}"
        )

    # Define modules and queues
    queue_bare_specs = []

    # Only needed to reproduce the same order as when using jsonnet
    queue_specs = app.QueueSpecs(sorted(queue_bare_specs,
                                        key=lambda x: x.inst))

    mod_specs = [
        mspec("fhsig", "FakeHSIEventGenerator", []),
    ]

    cmd_data['init'] = app.Init(queues=queue_specs,
                                modules=mod_specs,
                                nwconnections=NW_SPECS)

    trigger_interval_ticks = 0
    if TRIGGER_RATE_HZ > 0:
        trigger_interval_ticks = math.floor(
            (1 / TRIGGER_RATE_HZ) * CLOCK_SPEED_HZ / DATA_RATE_SLOWDOWN_FACTOR)

    cmd_data['conf'] = acmd([
        ("fhsig",
         fhsig.Conf(
             clock_frequency=CLOCK_SPEED_HZ / DATA_RATE_SLOWDOWN_FACTOR,
             trigger_interval_ticks=trigger_interval_ticks,
             mean_signal_multiplicity=MEAN_SIGNAL_MULTIPLICITY,
             signal_emulation_mode=SIGNAL_EMULATION_MODE,
             enabled_signals=ENABLED_SIGNALS,
             timesync_topic="Timesync",
             hsievent_connection_name=PARTITION + ".hsievent",
         )),
    ])

    startpars = rccmd.StartParams(
        run=RUN_NUMBER, trigger_interval_ticks=trigger_interval_ticks)
    resumepars = rccmd.ResumeParams(
        trigger_interval_ticks=trigger_interval_ticks)

    cmd_data['start'] = acmd([
        ("fhsig", startpars),
    ])

    cmd_data['stop'] = acmd([
        ("fhsig", None),
    ])

    cmd_data['pause'] = acmd([("", None)])

    cmd_data['resume'] = acmd([("fhsig", resumepars)])

    cmd_data['scrap'] = acmd([("", None)])

    cmd_data['record'] = acmd([("", None)])

    return cmd_data
コード例 #7
0
ファイル: faketp_chain.py プロジェクト: DUNE-DAQ/trigger
def generate(
        INPUT_FILES: str,
        SLOWDOWN_FACTOR: float,
        
        ACTIVITY_PLUGIN: str = 'TriggerActivityMakerPrescalePlugin',
        ACTIVITY_CONFIG: dict = dict(prescale=1000),
        
        CANDIDATE_PLUGIN: str = 'TriggerCandidateMakerPrescalePlugin',
        CANDIDATE_CONFIG: int = dict(prescale=1000),
        
        TOKEN_COUNT: int = 10,
        
        FORGET_DECISION_PROB: float = 0.0,
        HOLD_DECISION_PROB: float = 0.0,
        HOLD_MAX_SIZE: int = 0,
        HOLD_MIN_SIZE: int = 0,
        HOLD_MIN_MS: int = 0,
        RELEASE_RANDOMLY_PROB: float = 0.0,
        
        CLOCK_SPEED_HZ: int = 50000000
):
    cmd_data = {}

    # Derived parameters
    CLOCK_FREQUENCY_HZ = CLOCK_SPEED_HZ / SLOWDOWN_FACTOR

    # Define modules and queues
    queue_specs = [
        app.QueueSpec(inst=f"tpset_q{i}", kind='FollySPSCQueue', capacity=1000)
        for i in range(len(INPUT_FILES))
    ] +  [
        app.QueueSpec(inst="tpset_plus_hb_q", kind='FollyMPMCQueue', capacity=1000),
        app.QueueSpec(inst='zipped_tpset_q', kind='FollyMPMCQueue', capacity=1000),
        app.QueueSpec(inst='taset_q', kind='FollySPSCQueue', capacity=1000),
        app.QueueSpec(inst='trigger_candidate_q', kind='FollyMPMCQueue', capacity=1000),
        app.QueueSpec(inst='trigger_decision_q', kind='FollySPSCQueue', capacity=1000),
        app.QueueSpec(inst='token_q', kind='FollySPSCQueue', capacity=1000),
    ]

    mod_specs = [
        mspec(f'tpm{i}', 'TriggerPrimitiveMaker', [ # File -> TPSet
            app.QueueInfo(name='tpset_sink', inst=f'tpset_q{i}', dir='output'),
        ])
        for i in range(len(INPUT_FILES))
    ] + [

        mspec(f"ftpchm{i}", "FakeTPCreatorHeartbeatMaker", [
            app.QueueInfo(name="tpset_source", inst=f"tpset_q{i}", dir="input"),
            app.QueueInfo(name="tpset_sink", inst="tpset_plus_hb_q", dir="output"),
        ]) for i in range(len(INPUT_FILES))

    ] +  [

        mspec("zip", "TPZipper", [
            app.QueueInfo(name="input", inst="tpset_plus_hb_q", dir="input"),
            app.QueueInfo(name="output", inst="zipped_tpset_q", dir="output"),
        ]),

        mspec('tam', 'TriggerActivityMaker', [ # TPSet -> TASet
            app.QueueInfo(name='input', inst='zipped_tpset_q', dir='input'),
            app.QueueInfo(name='output', inst='taset_q', dir='output'),
        ]),
        
        mspec('tcm', 'TriggerCandidateMaker', [ # TASet -> TC
            app.QueueInfo(name='input', inst='taset_q', dir='input'),
            app.QueueInfo(name='output', inst='trigger_candidate_q', dir='output'),
        ]),
        
        mspec('mlt', 'ModuleLevelTrigger', [ # TC -> TD (with sufficient tokens)
            app.QueueInfo(name='trigger_candidate_source', inst='trigger_candidate_q', dir='input'),
            app.QueueInfo(name='trigger_decision_sink', inst='trigger_decision_q', dir='output'),
            app.QueueInfo(name='token_source', inst='token_q', dir='input'),
        ]),
        
        mspec('fdf', 'FakeDataFlow', [ # TD -> Token
            app.QueueInfo(name='trigger_decision_source', inst='trigger_decision_q', dir='input'),
            app.QueueInfo(name='trigger_complete_sink', inst='token_q', dir='output'),
        ]),
    ]

    cmd_data['init'] = app.Init(queues=queue_specs, modules=mod_specs)

    make_moo_record(ACTIVITY_CONFIG,'ActivityConf','temptypes')
    make_moo_record(CANDIDATE_CONFIG,'CandidateConf','temptypes')
    import temptypes

    cmd_data['conf'] = acmd([
        (f'tpm{i}', tpm.ConfParams(
            filename=input_file,
            number_of_loops=-1, # Infinite
            tpset_time_offset=0,
            tpset_time_width=10000,
            clock_frequency_hz=CLOCK_FREQUENCY_HZ,
            maximum_wait_time_us=1000
        )) for i,input_file in enumerate(INPUT_FILES)
    ] + [
        (f"ftpchm{i}", ftpchm.Conf(
          heartbeat_interval = 100000
        )) for i in range(len(INPUT_FILES))
    ] + [
        ("zip", tzip.ConfParams(
            cardinality=len(INPUT_FILES),
            max_latency_ms=1000,
            region_id=0, # Fake placeholder
            element_id=0 # Fake placeholder
        )),
        ('tam', tam.Conf(
            activity_maker=ACTIVITY_PLUGIN,
            geoid_region=0, # Fake placeholder
            geoid_element=0, # Fake placeholder
            window_time=10000, # should match whatever makes TPSets, in principle
            buffer_time=625000, # 10ms in 62.5 MHz ticks
            activity_maker_config=temptypes.ActivityConf(**ACTIVITY_CONFIG)
        )),
        ('tcm', tcm.Conf(
            candidate_maker=CANDIDATE_PLUGIN,
            candidate_maker_config=temptypes.CandidateConf(**CANDIDATE_CONFIG)
        )),
        ('mlt', mlt.ConfParams(
            links=[],
            initial_token_count=TOKEN_COUNT                    
        )),
        ('fdf', fdf.ConfParams(
          hold_max_size = HOLD_MAX_SIZE,
          hold_min_size = HOLD_MIN_SIZE,
          hold_min_ms = HOLD_MIN_MS,
          release_randomly_prob = RELEASE_RANDOMLY_PROB,
          forget_decision_prob = FORGET_DECISION_PROB,
          hold_decision_prob = HOLD_DECISION_PROB
        )),
    ])

    startpars = rccmd.StartParams(run=1)
    cmd_data['start'] = acmd(
        [
            ('fdf', startpars),
            ('mlt', startpars),
            ('tcm', startpars),
            ('tam', startpars),
            ('zip', startpars),
        ] +
        [ (f'ftpchm{i}', startpars) for i in range(len(INPUT_FILES)) ] +
        [ (f'tpm{i}', startpars) for i in range(len(INPUT_FILES)) ]
    )

    cmd_data['pause'] = acmd([
        ('mlt', None)
    ])
    
    resumepars = rccmd.ResumeParams(trigger_interval_ticks=50000000)
    cmd_data['resume'] = acmd([
        ('mlt', resumepars)
    ])
    
    cmd_data['stop'] = acmd(
        [ (f'tpm{i}', None) for i in range(len(INPUT_FILES)) ] +
        [ (f'ftpchm{i}', None) for i in range(len(INPUT_FILES)) ] +
        [
            ('zip', None),
            ('tam', None),
            ('tcm', None),
            ('mlt', None),
            ('fdf', None)
        ]
    )

    cmd_data['scrap'] = acmd(
        [ (f'tpm{i}', None) for i in range(len(INPUT_FILES)) ] +
        [ (f'ftpchm{i}', None) for i in range(len(INPUT_FILES)) ] +
        [
            ('zip', None),
            ('tam', None),
            ('tcm', None),
            ('mlt', None),
            ('fdf', None)
        ]
    )

    return cmd_data