コード例 #1
0
def cli(partition_name, number_of_data_producers, emulator_mode,
        data_rate_slowdown_factor, run_number, trigger_rate_hz,
        trigger_window_before_ticks, trigger_window_after_ticks, token_count,
        data_file, output_path, disable_trace, use_felix, use_ssp, host_df,
        host_ru, host_trigger, host_hsi, host_timing_hw, control_timing_hw,
        timing_hw_connections_file, region_id, latency_buffer_size,
        hsi_device_name, hsi_readout_period, hsi_endpoint_address,
        hsi_endpoint_partition, hsi_re_mask, hsi_fe_mask, hsi_inv_mask,
        hsi_source, use_hsi_hw, hsi_device_id, mean_hsi_signal_multiplicity,
        hsi_signal_emulation_mode, enabled_hsi_signals, ttcm_s1, ttcm_s2,
        trigger_activity_plugin, trigger_activity_config,
        trigger_candidate_plugin, trigger_candidate_config,
        enable_raw_recording, raw_recording_output_dir, frontend_type,
        opmon_impl, enable_dqm, ers_impl, dqm_impl, pocket_url,
        enable_software_tpg, enable_tpset_writing, use_fake_data_producers,
        dqm_cmap, dqm_rawdisplay_params, dqm_meanrms_params,
        dqm_fourier_params, op_env, tpc_region_name_prefix, max_file_size,
        json_dir):

    if exists(json_dir):
        raise RuntimeError(f"Directory {json_dir} already exists")

    console.log("Loading dataflow config generator")
    from .dataflow_gen import DataFlowApp
    # if enable_dqm:
    #     console.log("Loading dqm config generator")
    #     from . import dqm_gen
    console.log("Loading readout config generator")
    from .readout_gen import ReadoutApp
    console.log("Loading trigger config generator")
    from .trigger_gen import TriggerApp
    console.log("Loading hsi config generator")
    from . import hsi_gen
    console.log("Loading fake hsi config generator")
    from .fake_hsi_gen import FakeHSIApp
    console.log("Loading timing hardware config generator")
    from .thi_gen import THIApp

    console.log(
        f"Generating configs for hosts trigger={host_trigger} dataflow={host_df} readout={host_ru} hsi={host_hsi} dqm={host_ru}"
    )

    the_system = System()

    total_number_of_data_producers = 0

    if use_ssp:
        total_number_of_data_producers = number_of_data_producers * len(
            host_ru)
        console.log(
            f"Will setup {number_of_data_producers} SSP channels per host, for a total of {total_number_of_data_producers}"
        )
    else:
        total_number_of_data_producers = number_of_data_producers * len(
            host_ru)
        console.log(
            f"Will setup {number_of_data_producers} TPC channels per host, for a total of {total_number_of_data_producers}"
        )

    if enable_software_tpg and frontend_type != 'wib':
        raise Exception(
            "Software TPG is only available for the wib at the moment!")

    if enable_software_tpg and use_fake_data_producers:
        raise Exception("Fake data producers don't support software tpg")

    if use_fake_data_producers and enable_dqm:
        raise Exception("DQM can't be used with fake data producers")

    if enable_tpset_writing and not enable_software_tpg:
        raise Exception(
            "TPSet writing can only be used when software TPG is enabled")

    if token_count > 0:
        df_token_count = 0
        trigemu_token_count = token_count
    else:
        df_token_count = -1 * token_count
        trigemu_token_count = 0

    if (len(region_id) != len(host_ru)) and (len(region_id) != 1):
        raise Exception(
            "--region-id should be specified either once only or once for each --host-ru!"
        )

    if frontend_type == 'wib' or frontend_type == 'wib2':
        system_type = 'TPC'
    elif frontend_type == 'pacman':
        system_type = 'NDLArTPC'
    else:
        system_type = 'PDS'

    if opmon_impl == 'cern':
        info_svc_uri = "influx://188.185.88.195:80/write?db=db1"
    elif opmon_impl == 'pocket':
        info_svc_uri = "influx://" + pocket_url + ":31002/write?db=influxdb"
    else:
        info_svc_uri = "file://info_${APP_NAME}_${APP_PORT}.json"

    ers_settings = dict()

    if ers_impl == 'cern':
        use_kafka = True
        ers_settings[
            "INFO"] = "erstrace,throttle,lstdout,erskafka(dqmbroadcast:9092)"
        ers_settings[
            "WARNING"] = "erstrace,throttle,lstdout,erskafka(dqmbroadcast:9092)"
        ers_settings[
            "ERROR"] = "erstrace,throttle,lstdout,erskafka(dqmbroadcast:9092)"
        ers_settings["FATAL"] = "erstrace,lstdout,erskafka(dqmbroadcast:9092)"
    elif ers_impl == 'pocket':
        use_kafka = True
        ers_settings[
            "INFO"] = "erstrace,throttle,lstdout,erskafka(" + pocket_url + ":30092)"
        ers_settings[
            "WARNING"] = "erstrace,throttle,lstdout,erskafka(" + pocket_url + ":30092)"
        ers_settings[
            "ERROR"] = "erstrace,throttle,lstdout,erskafka(" + pocket_url + ":30092)"
        ers_settings[
            "FATAL"] = "erstrace,lstdout,erskafka(" + pocket_url + ":30092)"
    else:
        use_kafka = False
        ers_settings["INFO"] = "erstrace,throttle,lstdout"
        ers_settings["WARNING"] = "erstrace,throttle,lstdout"
        ers_settings["ERROR"] = "erstrace,throttle,lstdout"
        ers_settings["FATAL"] = "erstrace,lstdout"

    dqm_kafka_address = "dqmbroadcast:9092" if dqm_impl == 'cern' else pocket_url + ":30092" if dqm_impl == 'pocket' else ''

    # network connections map
    nw_specs = [
        nwmgr.Connection(name=partition_name + ".hsievent",
                         topics=[],
                         address="tcp://{host_trigger}:12344"),
        nwmgr.Connection(name=partition_name + ".trigdec",
                         topics=[],
                         address="tcp://{host_df}:12345"),
        nwmgr.Connection(name=partition_name + ".triginh",
                         topics=[],
                         address="tcp://{host_trigger}:12346"),
        nwmgr.Connection(name=partition_name + ".frags_0",
                         topics=[],
                         address="tcp://{host_df}:12347")
    ]

    port = 12348
    if control_timing_hw:
        nw_specs.append(
            nwmgr.Connection(name=partition_name + ".hsicmds",
                             topics=[],
                             address="tcp://{host_timing_hw}:" + f"{port}"))
        port = port + 1

    if enable_software_tpg:
        nw_specs.append(
            nwmgr.Connection(name=partition_name + ".tp_frags_0",
                             topics=[],
                             address="tcp://{host_df}:" + f"{port}"))
        port = port + 1
        nw_specs.append(
            nwmgr.Connection(name=f'{partition_name}.frags_tpset_ds_0',
                             topics=[],
                             address="tcp://{host_df}:" + f"{port}"))
        port = port + 1
        nw_specs.append(
            nwmgr.Connection(name=f"{partition_name}.ds_tp_datareq_0",
                             topics=[],
                             address="tcp://{host_trigger}:" + f"{port}"))
        port = port + 1

    host_id_dict = {}
    ru_configs = []
    ru_channel_counts = {}
    for region in region_id:
        ru_channel_counts[region] = 0
    regionidx = 0

    for hostidx in range(len(host_ru)):
        if enable_software_tpg:
            nw_specs.append(
                nwmgr.Connection(name=f"{partition_name}.tpsets_{hostidx}",
                                 topics=["TPSets"],
                                 address="tcp://{host_ru" + f"{hostidx}" +
                                 "}:" + f"{port}"))
            port = port + 1

        if enable_dqm:
            nw_specs.append(
                nwmgr.Connection(name=f"{partition_name}.fragx_dqm_{hostidx}",
                                 topics=[],
                                 address="tcp://{host_ru" + f"{hostidx}" +
                                 "}:" + f"{port}"))
            port = port + 1

        nw_specs.append(
            nwmgr.Connection(name=f"{partition_name}.datareq_{hostidx}",
                             topics=[],
                             address="tcp://{host_ru" + f"{hostidx}" + "}:" +
                             f"{port}"))
        port = port + 1

        # Should end up something like 'network_endpoints[timesync_0]:
        # "tcp://{host_ru0}:12347"'
        nw_specs.append(
            nwmgr.Connection(name=f"{partition_name}.timesync_{hostidx}",
                             topics=["Timesync"],
                             address="tcp://{host_ru" + f"{hostidx}" + "}:" +
                             f"{port}"))
        port = port + 1

        cardid = 0
        if host_ru[hostidx] in host_id_dict:
            host_id_dict[host_ru[hostidx]] = host_id_dict[host_ru[hostidx]] + 1
            cardid = host_id_dict[host_ru[hostidx]]
        else:
            host_id_dict[host_ru[hostidx]] = 0
        ru_configs.append({
            "host":
            host_ru[hostidx],
            "card_id":
            cardid,
            "region_id":
            region_id[regionidx],
            "start_channel":
            ru_channel_counts[region_id[regionidx]],
            "channel_count":
            number_of_data_producers
        })
        ru_channel_counts[region_id[regionidx]] += number_of_data_producers
        if len(region_id) != 1: regionidx = regionidx + 1

    for nw in nw_specs:
        print(f'{nwmgr.Name} {nwmgr.Topic} {nwmgr.Address}')

    if control_timing_hw:
        pass
        # PL: TODO
        # timing_cmd_network_endpoints = set()
        # if use_hsi_hw:
        #     timing_cmd_network_endpoints.add(partition_name + 'hsicmds')
        # cmd_data_thi = thi_gen.generate(RUN_NUMBER = run_number,
        #     NW_SPECS=nw_specs,
        #     TIMING_CMD_NETWORK_ENDPOINTS=timing_cmd_network_endpoints,
        #     CONNECTIONS_FILE=timing_hw_connections_file,
        #     HSI_DEVICE_NAME=hsi_device_name,
        # )
        # console.log("thi cmd data:", cmd_data_thi)

    if use_hsi_hw:
        pass
        # PL: TODO
        # cmd_data_hsi = hsi_gen.generate(nw_specs,
        #     RUN_NUMBER = run_number,
        #     CLOCK_SPEED_HZ = CLOCK_SPEED_HZ,
        #     TRIGGER_RATE_HZ = trigger_rate_hz,
        #     CONTROL_HSI_HARDWARE=control_timing_hw,
        #     CONNECTIONS_FILE=timing_hw_connections_file,
        #     READOUT_PERIOD_US = hsi_readout_period,
        #     HSI_DEVICE_NAME = hsi_device_name,
        #     HSI_ENDPOINT_ADDRESS = hsi_endpoint_address,
        #     HSI_ENDPOINT_PARTITION = hsi_endpoint_partition,
        #     HSI_RE_MASK=hsi_re_mask,
        #     HSI_FE_MASK=hsi_fe_mask,
        #     HSI_INV_MASK=hsi_inv_mask,
        #     HSI_SOURCE=hsi_source,
        #     PARTITION=partition_name)
    else:
        the_system.apps["hsi"] = FakeHSIApp(
            # NW_SPECS=nw_specs,
            RUN_NUMBER=run_number,
            CLOCK_SPEED_HZ=CLOCK_SPEED_HZ,
            DATA_RATE_SLOWDOWN_FACTOR=data_rate_slowdown_factor,
            TRIGGER_RATE_HZ=trigger_rate_hz,
            HSI_DEVICE_ID=hsi_device_id,
            MEAN_SIGNAL_MULTIPLICITY=mean_hsi_signal_multiplicity,
            SIGNAL_EMULATION_MODE=hsi_signal_emulation_mode,
            ENABLED_SIGNALS=enabled_hsi_signals,
            PARTITION=partition_name,
            HOST=host_hsi)

        # the_system.apps["hsi"] = util.App(modulegraph=mgraph_hsi, host=host_hsi)
    console.log("hsi cmd data:", the_system.apps["hsi"])

    the_system.apps['trigger'] = TriggerApp(
        # NW_SPECS = nw_specs,
        SOFTWARE_TPG_ENABLED=enable_software_tpg,
        RU_CONFIG=ru_configs,
        ACTIVITY_PLUGIN=trigger_activity_plugin,
        ACTIVITY_CONFIG=eval(trigger_activity_config),
        CANDIDATE_PLUGIN=trigger_candidate_plugin,
        CANDIDATE_CONFIG=eval(trigger_candidate_config),
        TOKEN_COUNT=trigemu_token_count,
        SYSTEM_TYPE=system_type,
        TTCM_S1=ttcm_s1,
        TTCM_S2=ttcm_s2,
        TRIGGER_WINDOW_BEFORE_TICKS=trigger_window_before_ticks,
        TRIGGER_WINDOW_AFTER_TICKS=trigger_window_after_ticks,
        PARTITION=partition_name,
        HOST=host_trigger)

    # console.log("trigger cmd data:", cmd_data_trigger)

    #-------------------------------------------------------------------
    # Readout apps

    cardid = {}
    host_id_dict = {}

    ru_app_names = [
        f"ruflx{idx}" if use_felix else f"ruemu{idx}"
        for idx in range(len(host_ru))
    ]

    for hostidx in range(len(host_ru)):
        if host_ru[hostidx] in host_id_dict:
            host_id_dict[host_ru[hostidx]] = host_id_dict[host_ru[hostidx]] + 1
            cardid[hostidx] = host_id_dict[host_ru[hostidx]]
        else:
            cardid[hostidx] = 0
            host_id_dict[host_ru[hostidx]] = 0
        hostidx = hostidx + 1

    mgraphs_readout = []
    for i in range(len(host_ru)):
        ru_name = ru_app_names[i]
        the_system.apps[
            ru_name] = ReadoutApp(  # NUMBER_OF_DATA_PRODUCERS = number_of_data_producers,
                PARTITION=partition_name,
                # NW_SPECS=nw_specs,
                RU_CONFIG=ru_configs,
                # TOTAL_NUMBER_OF_DATA_PRODUCERS=total_number_of_data_producers,
                EMULATOR_MODE=emulator_mode,
                DATA_RATE_SLOWDOWN_FACTOR=data_rate_slowdown_factor,
                DATA_FILE=data_file,
                FLX_INPUT=use_felix,
                SSP_INPUT=use_ssp,
                CLOCK_SPEED_HZ=CLOCK_SPEED_HZ,
                RUIDX=i,
                # CARDID = cardid[i],
                RAW_RECORDING_ENABLED=enable_raw_recording,
                RAW_RECORDING_OUTPUT_DIR=raw_recording_output_dir,
                FRONTEND_TYPE=frontend_type,
                SYSTEM_TYPE=system_type,
                # REGION_ID = region_id,
                # DQM_ENABLED=enable_dqm,
                # DQM_KAFKA_ADDRESS=dqm_kafka_address,
                SOFTWARE_TPG_ENABLED=enable_software_tpg,
                USE_FAKE_DATA_PRODUCERS=use_fake_data_producers,
                HOST=host_ru[i])
        console.log(f"{ru_name} app: {the_system.apps[ru_name]}")

    the_system.apps['dataflow'] = DataFlowApp(
        # NW_SPECS = nw_specs,
        FRAGMENT_PRODUCERS=the_system.get_fragment_producers(),
        RU_CONFIG=ru_configs,
        RUN_NUMBER=run_number,
        OUTPUT_PATH=output_path,
        TOKEN_COUNT=df_token_count,
        SYSTEM_TYPE=system_type,
        SOFTWARE_TPG_ENABLED=enable_software_tpg,
        TPSET_WRITING_ENABLED=enable_tpset_writing,
        PARTITION=partition_name,
        OPERATIONAL_ENVIRONMENT=op_env,
        TPC_REGION_NAME_PREFIX=tpc_region_name_prefix,
        MAX_FILE_SIZE=max_file_size,
        HOST=host_df)

    # exit(0)
    #     console.log("dataflow cmd data:", cmd_data_dataflow)

    # a.append(this_readout_mgraph)
    # for i,ru_name in enumerate(ru_app_names):
    #  = util.App(modulegraph=mgraphs_readout[i], host=host_ru[i])
    # = [ readout_gen.generate(nw_specs,
    #                                              RU_CONFIG = ru_configs,
    #                                              EMULATOR_MODE = emulator_mode,
    #                                              DATA_RATE_SLOWDOWN_FACTOR = data_rate_slowdown_factor,
    #                                              RUN_NUMBER = run_number,
    #                                              DATA_FILE = data_file,
    #                                              FLX_INPUT = use_felix,
    #                                              SSP_INPUT = use_ssp,
    #                                              CLOCK_SPEED_HZ = CLOCK_SPEED_HZ,
    #                                              RUIDX = hostidx,
    #                                              RAW_RECORDING_ENABLED = enable_raw_recording,
    #                                              RAW_RECORDING_OUTPUT_DIR = raw_recording_output_dir,
    #                                              FRONTEND_TYPE = frontend_type,
    #                                              SYSTEM_TYPE = system_type,
    #                                              SOFTWARE_TPG_ENABLED = enable_software_tpg,
    #                                              USE_FAKE_DATA_PRODUCERS = use_fake_data_producers,
    #                                              PARTITION=partition_name,
    #                                              LATENCY_BUFFER_SIZE=latency_buffer_size) for hostidx in range(len(host_ru))]
    #    console.log("readout cmd data:", cmd_data_readout)

    #     if enable_dqm:
    #         cmd_data_dqm = [ dqm_gen.generate(nw_specs,
    #                 RU_CONFIG = ru_configs,
    #                 EMULATOR_MODE = emulator_mode,
    #                 RUN_NUMBER = run_number,
    #                 DATA_FILE = data_file,
    #                 CLOCK_SPEED_HZ = CLOCK_SPEED_HZ,
    #                 RUIDX = hostidx,
    #                 SYSTEM_TYPE = system_type,
    #                 DQM_ENABLED=enable_dqm,
    #                 DQM_KAFKA_ADDRESS=dqm_kafka_address,
    #                 DQM_CMAP=dqm_cmap,
    #                 DQM_RAWDISPLAY_PARAMS=dqm_rawdisplay_params,
    #                 DQM_MEANRMS_PARAMS=dqm_meanrms_params,
    #                 DQM_FOURIER_PARAMS=dqm_fourier_params,
    #                 PARTITION=partition_name
    #                 ) for hostidx in range(len(host_ru))]
    #         console.log("dqm cmd data:", cmd_data_dqm)

    #     data_dir = join(json_dir, 'data')
    #     os.makedirs(data_dir)

    #     app_thi="thi"
    #     app_hsi = "hsi"
    #     app_trigger = "trigger"
    #     app_df = "dataflow"
    #     app_dqm = [f"dqm{idx}" for idx in range(len(host_ru))]
    #     app_ru = [f"ruflx{idx}" if use_felix else f"ruemu{idx}" for idx in range(len(host_ru))]
    #     if use_ssp:
    #         app_ru = [f"russp{idx}" if use_ssp else f"ruemu{idx}" for idx in range(len(host_ru))]

    #     jf_hsi = join(data_dir, app_hsi)
    #     jf_trigemu = join(data_dir, app_trigger)
    #     jf_df = join(data_dir, app_df)
    #     jf_dqm = [join(data_dir, app_dqm[idx]) for idx in range(len(host_ru))]
    #     jf_ru = [join(data_dir, app_ru[idx]) for idx in range(len(host_ru))]
    #     if control_timing_hw:
    #         jf_thi = join(data_dir, app_thi)

    #     cmd_set = ["init", "conf", "start", "stop", "pause", "resume", "scrap", "record"]

    #     apps = [app_hsi, app_trigger, app_df] + app_ru
    #     if enable_dqm:
    #         apps += app_dqm
    #     cmds_data = [cmd_data_hsi, cmd_data_trigger, cmd_data_dataflow] + cmd_data_readout
    #     if enable_dqm:
    #         cmds_data += cmd_data_dqm
    #     if control_timing_hw:
    #         apps.append(app_thi)
    #         cmds_data.append(cmd_data_thi)

    #     for app,data in zip(apps, cmds_data):
    #         console.log(f"Generating {app} command data json files")
    #         for c in cmd_set:
    #             with open(f'{join(data_dir, app)}_{c}.json', 'w') as f:
    #                 json.dump(data[c].pod(), f, indent=4, sort_keys=True)

    #     console.log(f"Generating top-level command json files")

    #     start_order = [app_df] + [app_trigger] + app_ru + [app_hsi] + app_dqm
    #     if not control_timing_hw and use_hsi_hw:
    #         resume_order = [app_trigger]
    #     else:
    #         resume_order = [app_hsi, app_trigger]

    #     for c in cmd_set:
    #         with open(join(json_dir,f'{c}.json'), 'w') as f:
    #             cfg = {
    #                 "apps": { app: f'data/{app}_{c}' for app in apps }
    #             }
    #             if c in ['conf']:
    #                 conf_order = start_order
    #                 if control_timing_hw:
    #                     conf_order = [app_thi] + conf_order
    #                 cfg[f'order'] = conf_order
    #             elif c == 'start':
    #                 cfg['order'] = start_order
    #                 if control_timing_hw:
    #                     del cfg['apps'][app_thi]
    #             elif c == 'stop':
    #                 cfg['order'] = start_order[::-1]
    #                 if control_timing_hw:
    #                     del cfg['apps'][app_thi]
    #             elif c in ('resume', 'pause'):
    #                 del cfg['apps'][app_df]
    #                 if control_timing_hw:
    #                     del cfg['apps'][app_thi]
    #                 elif use_hsi_hw:
    #                     del cfg['apps'][app_hsi]
    #                 for ruapp in app_ru:
    #                     del cfg['apps'][ruapp]
    #                 if enable_dqm:
    #                     for dqmapp in app_dqm:
    #                         del cfg['apps'][dqmapp]
    #                 if c == 'resume':
    #                     cfg['order'] = resume_order
    #                 elif c == 'pause':
    #                     cfg['order'] = resume_order[::-1]

    #             json.dump(cfg, f, indent=4, sort_keys=True)

    #     console.log(f"Generating boot json file")
    #     with open(join(json_dir,'boot.json'), 'w') as f:
    #         daq_app_specs = {
    #             "daq_application" : {
    #                 "comment": "Application profile using  PATH variables (lower start time)",
    #                 "env":{
    #                     "CET_PLUGIN_PATH": "getenv",
    #                     "DUNEDAQ_SHARE_PATH": "getenv",
    #                     "TIMING_SHARE": "getenv",
    #                     "LD_LIBRARY_PATH": "getenv",
    #                     "PATH": "getenv",
    #                     "DETCHANNELMAPS_SHARE": "getenv"
    #                 },
    #                 "cmd": ["CMD_FAC=rest://localhost:${APP_PORT}",
    #                     "INFO_SVC=" + info_svc_uri,
    #                     "cd ${APP_WD}",
    #                     "daq_application --name ${APP_NAME} -c ${CMD_FAC} -i ${INFO_SVC}"]
    #             }
    #         }

    #         if not disable_trace:
    #             daq_app_specs["daq_application"]["env"]["TRACE_FILE"] = "getenv:/tmp/trace_buffer_${HOSTNAME}_${USER}"

    #         cfg = {
    #             "env" : {
    #                 "DUNEDAQ_ERS_VERBOSITY_LEVEL": "getenv:1",
    #                 "DUNEDAQ_PARTITION": partition_name,
    #                 "DUNEDAQ_ERS_INFO": ers_info,
    #                 "DUNEDAQ_ERS_WARNING": ers_warning,
    #                 "DUNEDAQ_ERS_ERROR": ers_error,
    #                 "DUNEDAQ_ERS_FATAL": ers_fatal,
    #                 "DUNEDAQ_ERS_DEBUG_LEVEL": "getenv:-1",
    #             },
    #             "hosts": {
    #                 "host_df": host_df,
    #                 "host_trigger": host_trigger,
    #                 "host_hsi": host_hsi,
    #             },
    #             "apps" : {
    #                 app_hsi: {
    #                     "exec": "daq_application",
    #                     "host": "host_hsi",
    #                     "port": 3332
    #                 },
    #                 app_trigger : {
    #                     "exec": "daq_application",
    #                     "host": "host_trigger",
    #                     "port": 3333
    #                 },
    #                 app_df: {
    #                     "exec": "daq_application",
    #                     "host": "host_df",
    #                     "port": 3334
    #                 },
    #             },
    #             "response_listener": {
    #                 "port": 56789
    #             },
    #             "exec": daq_app_specs
    #         }

    #         if use_kafka:
    #             cfg["env"]["DUNEDAQ_ERS_STREAM_LIBS"] = "erskafka"

    #         appport = 3335
    #         for hostidx in range(len(host_ru)):
    #             cfg["hosts"][f"host_ru{hostidx}"] = host_ru[hostidx]
    #             cfg["apps"][app_ru[hostidx]] = {
    #                     "exec": "daq_application",
    #                     "host": f"host_ru{hostidx}",
    #                     "port": appport }
    #             appport = appport + 1
    #         if enable_dqm:
    #             for hostidx in range(len(host_ru)):
    #                 cfg["hosts"][f"host_dqm{hostidx}"] = host_ru[hostidx]
    #                 cfg["apps"][app_dqm[hostidx]] = {
    #                         "exec": "daq_application",
    #                         "host": f"host_dqm{hostidx}",
    #                         "port": appport }
    #                 appport = appport + 1

    #         if control_timing_hw:
    #             cfg["hosts"][f"host_timing_hw"] = host_timing_hw
    #             cfg["apps"][app_thi] = {
    #                     "exec": "daq_application",
    #                     "host": "host_timing_hw",
    #                     "port": appport + len(host_ru) }

    #         json.dump(cfg, f, indent=4, sort_keys=True)

    #     console.log("Generating metadata file")
    #     with open(join(json_dir, 'mdapp_multiru_gen.info'), 'w') as f:
    #         mdapp_dir = os.path.dirname(os.path.abspath(__file__))
    #         buildinfo_files = glob.glob('**/minidaqapp_build_info.txt', recursive=True)
    #         buildinfo = {}
    #         for buildinfo_file in buildinfo_files:
    #             if(os.path.dirname(os.path.abspath(buildinfo_file)) in mdapp_dir):
    #                 with open(buildinfo_file, 'r') as ff:
    #                     line = ff.readline()
    #                     while line:
    #                         line_parse = line.split(':')
    #                         buildinfo[line_parse[0].strip()]=':'.join(line_parse[1:]).strip()
    #                         line = ff.readline()

    #                 break
    #         mdapp_info = {
    #             "command_line": ' '.join(sys.argv),
    #             "mdapp_dir": mdapp_dir,
    #             "build_info": buildinfo
    #         }
    #         json.dump(mdapp_info, f, indent=4, sort_keys=True)

    #     console.log(f"MDAapp config generated in {json_dir}")
    from appfwk.conf_utils import connect_all_fragment_producers, add_network, make_app_command_data, set_mlt_links
    the_system.export("system_no_frag_prod_connection.dot")
    connect_all_fragment_producers(the_system, verbose=True)

    # console.log("After connecting fragment producers, trigger mgraph:", the_system.apps['trigger'].modulegraph)
    # console.log("After connecting fragment producers, the_system.app_connections:", the_system.app_connections)

    set_mlt_links(the_system, "trigger", verbose=True)

    # add_network("trigger", the_system, partition_name=partition_name, verbose=True)
    # # console.log("After adding network, trigger mgraph:", the_system.apps['trigger'].modulegraph)
    # add_network("hsi", the_system, partition_name=partition_name, verbose=True)
    # for ru_app_name in ru_app_names:
    #     add_network(ru_app_name, the_system, partition_name=partition_name, verbose=True)

    # add_network("dataflow", the_system, partition_name=partition_name, verbose=True)
    the_system.export("system.dot")

    ####################################################################
    # Application command data generation
    ####################################################################

    # if control_timing_hw:
    #     timing_cmd_network_endpoints=set()
    #     if use_hsi_hw:
    #         timing_cmd_network_endpoints.add('hsicmds')
    #     cmd_data_thi = thi_gen.generate(
    #         RUN_NUMBER = run_number,
    #         NETWORK_ENDPOINTS=network_endpoints,
    #         TIMING_CMD_NETWORK_ENDPOINTS=timing_cmd_network_endpoints,
    #         HSI_DEVICE_NAME=hsi_device_name,
    #     )
    #     console.log("thi cmd data:", cmd_data_thi)

    # Arrange per-app command data into the format used by util.write_json_files()
    app_command_datas = {
        name: make_app_command_data(the_system, app)
        for name, app in the_system.apps.items()
    }

    if control_timing_hw:
        app_command_datas["thi"] = cmd_data_thi

    ##################################################################################

    # Make boot.json config
    from appfwk.conf_utils import make_system_command_datas, generate_boot, write_json_files
    system_command_datas = make_system_command_datas(the_system)
    # Override the default boot.json with the one from minidaqapp
    boot = generate_boot(the_system.apps,
                         partition_name=partition_name,
                         ers_settings=ers_settings,
                         info_svc_uri=info_svc_uri,
                         disable_trace=disable_trace,
                         use_kafka=use_kafka)

    system_command_datas['boot'] = boot

    write_json_files(app_command_datas, system_command_datas, json_dir)

    console.log(f"MDAapp config generated in {json_dir}")
コード例 #2
0
def generate(
    PARTITION="hsi_readout_test",
    OUTPUT_PATH=".",
    TRIGGER_RATE_HZ: int = 1,
    CLOCK_SPEED_HZ: int = 50000000,
    HSI_TIMESTAMP_OFFSET:
    int = 0,  # Offset for HSIEvent timestamps in units of clock ticks. Positive offset increases timestamp estimate.
    HSI_DEVICE_ID: int = 0,
    MEAN_SIGNAL_MULTIPLICITY: int = 0,
    SIGNAL_EMULATION_MODE: int = 0,
    ENABLED_SIGNALS: int = 0b00000001,
):

    # network connection
    nw_specs = [
        nwmgr.Connection(name=PARTITION + ".hsievent",
                         topics=[],
                         address="tcp://127.0.0.1:12344")
    ]

    # Define modules and queues
    queue_bare_specs = [
        app.QueueSpec(inst="time_sync_from_netq",
                      kind='FollySPSCQueue',
                      capacity=100),
    ]

    # Only needed to reproduce the same order as when using jsonnet
    queue_specs = app.QueueSpecs(sorted(queue_bare_specs,
                                        key=lambda x: x.inst))

    mod_specs = [
        mspec("ntoq_timesync", "NetworkToQueue", [
            app.QueueInfo(
                name="output", inst="time_sync_from_netq", dir="output")
        ]),
        mspec("fhsig", "FakeHSIEventGenerator", [
            app.QueueInfo(name="time_sync_source",
                          inst="time_sync_from_netq",
                          dir="input"),
        ]),
    ]

    init_specs = app.Init(queues=queue_specs, modules=mod_specs)

    jstr = json.dumps(init_specs.pod(), indent=4, sort_keys=True)
    print(jstr)

    initcmd = rcif.RCCommand(id=cmdlib.CmdId("init"),
                             entry_state="NONE",
                             exit_state="INITIAL",
                             data=init_specs)

    trigger_interval_ticks = 0
    if TRIGGER_RATE_HZ > 0:
        trigger_interval_ticks = math.floor(
            (1 / TRIGGER_RATE_HZ) * CLOCK_SPEED_HZ)

    mods = [
        ("fhsig",
         fhsig.Conf(
             clock_frequency=CLOCK_SPEED_HZ,
             trigger_interval_ticks=trigger_interval_ticks,
             timestamp_offset=HSI_TIMESTAMP_OFFSET,
             mean_signal_multiplicity=MEAN_SIGNAL_MULTIPLICITY,
             signal_emulation_mode=SIGNAL_EMULATION_MODE,
             enabled_signals=ENABLED_SIGNALS,
             timesync_topic="Timesync",
             hsievent_connection_name=PARTITION + ".hsievent",
         )),
    ]

    confcmd = mrccmd("conf", "INITIAL", "CONFIGURED", mods)

    jstr = json.dumps(confcmd.pod(), indent=4, sort_keys=True)
    print(jstr)

    startpars = rcif.StartParams(run=33, disable_data_storage=False)

    startcmd = mrccmd("start", "CONFIGURED", "RUNNING",
                      [("ntoq_timesync", startpars), ("fhsig", startpars)])

    jstr = json.dumps(startcmd.pod(), indent=4, sort_keys=True)
    print("=" * 80 + "\nStart\n\n", jstr)

    stopcmd = mrccmd("stop", "RUNNING", "CONFIGURED", [("ntoq_timesync", None),
                                                       ("fhsig", None)])

    jstr = json.dumps(stopcmd.pod(), indent=4, sort_keys=True)
    print("=" * 80 + "\nStop\n\n", jstr)

    scrapcmd = mcmd("scrap", [("", None)])

    jstr = json.dumps(scrapcmd.pod(), indent=4, sort_keys=True)
    print("=" * 80 + "\nScrap\n\n", jstr)

    # Create a list of commands
    cmd_seq = [initcmd, confcmd, startcmd, stopcmd]

    # Print them as json (to be improved/moved out)
    jstr = json.dumps([c.pod() for c in cmd_seq], indent=4, sort_keys=True)
    return jstr
コード例 #3
0
def cli(partition_name, number_of_data_producers, emulator_mode, data_rate_slowdown_factor, run_number, trigger_rate_hz, trigger_window_before_ticks, trigger_window_after_ticks,
        token_count, data_file, output_path, disable_trace, use_felix, use_ssp, host_df, host_ru, host_trigger, host_hsi, host_timing_hw, control_timing_hw, timing_hw_connections_file, region_id, latency_buffer_size,
        hsi_device_name, hsi_readout_period, hsi_endpoint_address, hsi_endpoint_partition, hsi_re_mask, hsi_fe_mask, hsi_inv_mask, hsi_source,
        use_hsi_hw, hsi_device_id, mean_hsi_signal_multiplicity, hsi_signal_emulation_mode, enabled_hsi_signals,
        ttcm_s1, ttcm_s2, trigger_activity_plugin, trigger_activity_config, trigger_candidate_plugin, trigger_candidate_config,
        enable_raw_recording, raw_recording_output_dir, frontend_type, opmon_impl, enable_dqm, ers_impl, dqm_impl, pocket_url, enable_software_tpg, enable_tpset_writing, use_fake_data_producers, dqm_cmap,
        dqm_rawdisplay_params, dqm_meanrms_params, dqm_fourier_params, dqm_fouriersum_params,
        op_env, tpc_region_name_prefix, max_file_size, json_dir):

    """
      JSON_DIR: Json file output folder
    """

    if exists(json_dir):
        raise RuntimeError(f"Directory {json_dir} already exists")

    console.log("Loading dataflow config generator")
    from . import dataflow_gen
    if enable_dqm:
        console.log("Loading dqm config generator")
        from . import dqm_gen
    console.log("Loading readout config generator")
    from . import readout_gen
    console.log("Loading trigger config generator")
    from . import trigger_gen
    console.log("Loading hsi config generator")
    from . import hsi_gen
    console.log("Loading fake hsi config generator")
    from . import fake_hsi_gen
    console.log("Loading timing hardware config generator")
    from . import thi_gen
    console.log(f"Generating configs for hosts trigger={host_trigger} dataflow={host_df} readout={host_ru} hsi={host_hsi} dqm={host_ru}")

    total_number_of_data_producers = 0

    if use_ssp:
        total_number_of_data_producers = number_of_data_producers * len(host_ru)
        console.log(f"Will setup {number_of_data_producers} SSP channels per host, for a total of {total_number_of_data_producers}")
    else:
        total_number_of_data_producers = number_of_data_producers * len(host_ru)
        console.log(f"Will setup {number_of_data_producers} TPC channels per host, for a total of {total_number_of_data_producers}")

    if enable_software_tpg and frontend_type != 'wib':
        raise Exception("Software TPG is only available for the wib at the moment!")

    if enable_software_tpg and use_fake_data_producers:
        raise Exception("Fake data producers don't support software tpg")

    if use_fake_data_producers and enable_dqm:
        raise Exception("DQM can't be used with fake data producers")

    if enable_tpset_writing and not enable_software_tpg:
        raise Exception("TPSet writing can only be used when software TPG is enabled")

    if token_count > 0:
        trigemu_token_count = token_count

    if (len(region_id) != len(host_ru)) and (len(region_id) != 1):
        raise Exception("--region-id should be specified either once only or once for each --host-ru!")

    if frontend_type == 'wib' or frontend_type == 'wib2':
        system_type = 'TPC'
    elif frontend_type == 'pacman':
        system_type = 'NDLArTPC'
    else:
        system_type = 'PDS'

    if opmon_impl == 'cern':
        info_svc_uri = "influx://188.185.88.195:80/write?db=db1"
    elif opmon_impl == 'pocket':
        info_svc_uri = "influx://" + pocket_url + ":31002/write?db=influxdb"
    else:
        info_svc_uri = "file://info_${APP_NAME}_${APP_PORT}.json"

    if ers_impl == 'cern':
        use_kafka = True
        ers_info = "erstrace,throttle,lstdout,erskafka(dqmbroadcast:9092)"
        ers_warning = "erstrace,throttle,lstdout,erskafka(dqmbroadcast:9092)"
        ers_error = "erstrace,throttle,lstdout,erskafka(dqmbroadcast:9092)"
        ers_fatal = "erstrace,lstdout,erskafka(dqmbroadcast:9092)"
    elif ers_impl == 'pocket':
        use_kafka = True
        ers_info = "erstrace,throttle,lstdout,erskafka(" + pocket_url + ":30092)"
        ers_warning = "erstrace,throttle,lstdout,erskafka(" + pocket_url + ":30092)"
        ers_error = "erstrace,throttle,lstdout,erskafka(" + pocket_url + ":30092)"
        ers_fatal = "erstrace,lstdout,erskafka(" + pocket_url + ":30092)"
    else:
        use_kafka = False
        ers_info = "erstrace,throttle,lstdout"
        ers_warning = "erstrace,throttle,lstdout"
        ers_error = "erstrace,throttle,lstdout"
        ers_fatal = "erstrace,lstdout"

    dqm_kafka_address = "dqmbroadcast:9092" if dqm_impl == 'cern' else pocket_url + ":30092" if dqm_impl == 'pocket' else ''

    # network connections map
    nw_specs = [nwmgr.Connection(name=partition_name + ".hsievent",topics=[],  address="tcp://{host_trigger}:12344"),
        nwmgr.Connection(name=partition_name + ".triginh",topics=[],   address="tcp://{host_trigger}:12345")]

    port = 12346

    for hostidx in range(len(host_df)):
        nw_specs.append(nwmgr.Connection(name=f"{partition_name}.trigdec_{hostidx}",topics=[],  address="tcp://{host_df" + f"{hostidx}" + "}:" +f"{port}"))
        port = port + 1
        nw_specs.append(nwmgr.Connection(name=f"{partition_name}.frags_{hostidx}", topics=[],  address="tcp://{host_df" + f"{hostidx}" + "}:" + f"{port}"))
        port = port + 1

    if control_timing_hw and use_hsi_hw:
        nw_specs.append(nwmgr.Connection(name=partition_name + ".hsicmds",  topics=[], address="tcp://{host_timing_hw}:" + f"{port}"))
        port = port + 1

    if enable_software_tpg:
        nw_specs.append(nwmgr.Connection(name=f"{partition_name}.ds_tp_datareq_0",topics=[],   address="tcp://{host_trigger}:" + f"{port}"))
        port = port + 1

    host_id_dict = {}
    ru_configs = []
    ru_channel_counts = {}
    for region in region_id: ru_channel_counts[region] = 0
    regionidx = 0

    for hostidx in range(len(host_ru)):
        if enable_software_tpg:
            nw_specs.append(nwmgr.Connection(name=f"{partition_name}.tpsets_{hostidx}", topics=["TPSets"], address = "tcp://{host_ru" + f"{hostidx}" + "}:" + f"{port}"))
            port = port + 1


        if enable_dqm:
            nw_specs.append(nwmgr.Connection(name=f"{partition_name}.fragx_dqm_{hostidx}", topics=[], address="tcp://{host_ru" + f"{hostidx}" + "}:" + f"{port}"))
            port = port + 1

        nw_specs.append(nwmgr.Connection(name=f"{partition_name}.datareq_{hostidx}", topics=[], address="tcp://{host_ru" + f"{hostidx}" + "}:" + f"{port}"))
        port = port + 1

        # Should end up something like 'network_endpoints[timesync_0]:
        # "tcp://{host_ru0}:12347"'
        nw_specs.append(nwmgr.Connection(name=f"{partition_name}.timesync_{hostidx}", topics=["Timesync"], address= "tcp://{host_ru" + f"{hostidx}" + "}:" + f"{port}"))
        port = port + 1

        cardid = 0
        if host_ru[hostidx] in host_id_dict:
            host_id_dict[host_ru[hostidx]] = host_id_dict[host_ru[hostidx]] + 1
            cardid = host_id_dict[host_ru[hostidx]]
        else:
            host_id_dict[host_ru[hostidx]] = 0
        ru_configs.append( {"host": host_ru[hostidx], "card_id": cardid, "region_id": region_id[regionidx], "start_channel": ru_channel_counts[region_id[regionidx]], "channel_count": number_of_data_producers} )
        ru_channel_counts[region_id[regionidx]] += number_of_data_producers
        if len(region_id) != 1: regionidx = regionidx + 1
    
    if control_timing_hw:
        timing_cmd_network_endpoints = set()
        if use_hsi_hw:
            timing_cmd_network_endpoints.add(partition_name + '.hsicmds')
        cmd_data_thi = thi_gen.generate(RUN_NUMBER = run_number,
            NW_SPECS=nw_specs,
            TIMING_CMD_NETWORK_ENDPOINTS=timing_cmd_network_endpoints,
            CONNECTIONS_FILE=timing_hw_connections_file,
            HSI_DEVICE_NAME=hsi_device_name,
        )
        console.log("thi cmd data:", cmd_data_thi)

    if use_hsi_hw:
        cmd_data_hsi = hsi_gen.generate(nw_specs,
            RUN_NUMBER = run_number,
            CLOCK_SPEED_HZ = CLOCK_SPEED_HZ,
            TRIGGER_RATE_HZ = trigger_rate_hz,
            CONTROL_HSI_HARDWARE=control_timing_hw,
            CONNECTIONS_FILE=timing_hw_connections_file,
            READOUT_PERIOD_US = hsi_readout_period,
            HSI_DEVICE_NAME = hsi_device_name,
            HSI_ENDPOINT_ADDRESS = hsi_endpoint_address,
            HSI_ENDPOINT_PARTITION = hsi_endpoint_partition,
            HSI_RE_MASK=hsi_re_mask,
            HSI_FE_MASK=hsi_fe_mask,
            HSI_INV_MASK=hsi_inv_mask,
            HSI_SOURCE=hsi_source,
            PARTITION=partition_name)
    else:
        cmd_data_hsi = fake_hsi_gen.generate(nw_specs,
            RUN_NUMBER = run_number,
            CLOCK_SPEED_HZ = CLOCK_SPEED_HZ,
            DATA_RATE_SLOWDOWN_FACTOR = data_rate_slowdown_factor,
            TRIGGER_RATE_HZ = trigger_rate_hz,
            HSI_DEVICE_ID = hsi_device_id,
            MEAN_SIGNAL_MULTIPLICITY = mean_hsi_signal_multiplicity,
            SIGNAL_EMULATION_MODE = hsi_signal_emulation_mode,
            ENABLED_SIGNALS =  enabled_hsi_signals,
            PARTITION=partition_name)

    console.log("hsi cmd data:", cmd_data_hsi)

    cmd_data_trigger = trigger_gen.generate(nw_specs,
        SOFTWARE_TPG_ENABLED = enable_software_tpg,
        RU_CONFIG = ru_configs,
        ACTIVITY_PLUGIN = trigger_activity_plugin,
        ACTIVITY_CONFIG = eval(trigger_activity_config),
        CANDIDATE_PLUGIN = trigger_candidate_plugin,
        CANDIDATE_CONFIG = eval(trigger_candidate_config),
        TOKEN_COUNT = trigemu_token_count,
        DF_COUNT = len(host_df),
        SYSTEM_TYPE = system_type,
        TTCM_S1=ttcm_s1,
        TTCM_S2=ttcm_s2,
        TRIGGER_WINDOW_BEFORE_TICKS = trigger_window_before_ticks,
        TRIGGER_WINDOW_AFTER_TICKS = trigger_window_after_ticks,
        PARTITION=partition_name)


    console.log("trigger cmd data:", cmd_data_trigger)

    cmd_data_dataflow = [ dataflow_gen.generate(nw_specs,
        RU_CONFIG = ru_configs,
        HOSTIDX = hostidx,
        RUN_NUMBER = run_number,
        OUTPUT_PATH = output_path,
        SYSTEM_TYPE = system_type,
        SOFTWARE_TPG_ENABLED = enable_software_tpg,
        TPSET_WRITING_ENABLED = enable_tpset_writing,
        PARTITION=partition_name,
        OPERATIONAL_ENVIRONMENT = op_env,
        TPC_REGION_NAME_PREFIX = tpc_region_name_prefix,
        MAX_FILE_SIZE = max_file_size) for hostidx in range(len(host_df)) ]
    console.log("dataflow cmd data:", cmd_data_dataflow)

    cmd_data_readout = [ readout_gen.generate(nw_specs,
            RU_CONFIG = ru_configs,
            EMULATOR_MODE = emulator_mode,
            DATA_RATE_SLOWDOWN_FACTOR = data_rate_slowdown_factor,
            RUN_NUMBER = run_number,
            DATA_FILE = data_file,
            FLX_INPUT = use_felix,
            SSP_INPUT = use_ssp,
            CLOCK_SPEED_HZ = CLOCK_SPEED_HZ,
            RUIDX = hostidx,
            RAW_RECORDING_ENABLED = enable_raw_recording,
            RAW_RECORDING_OUTPUT_DIR = raw_recording_output_dir,
            FRONTEND_TYPE = frontend_type,
            SYSTEM_TYPE = system_type,
            SOFTWARE_TPG_ENABLED = enable_software_tpg,
            USE_FAKE_DATA_PRODUCERS = use_fake_data_producers,
            PARTITION=partition_name,
            LATENCY_BUFFER_SIZE=latency_buffer_size) for hostidx in range(len(host_ru))]
    console.log("readout cmd data:", cmd_data_readout)

    if enable_dqm:
        cmd_data_dqm = [ dqm_gen.generate(nw_specs,
                RU_CONFIG = ru_configs,
                EMULATOR_MODE = emulator_mode,
                RUN_NUMBER = run_number,
                DATA_FILE = data_file,
                CLOCK_SPEED_HZ = CLOCK_SPEED_HZ,
                RUIDX = hostidx,
                SYSTEM_TYPE = system_type,
                DQM_ENABLED=enable_dqm,
                DQM_KAFKA_ADDRESS=dqm_kafka_address,
                DQM_CMAP=dqm_cmap,
                DQM_RAWDISPLAY_PARAMS=dqm_rawdisplay_params,
                DQM_MEANRMS_PARAMS=dqm_meanrms_params,
                DQM_FOURIER_PARAMS=dqm_fourier_params,
                DQM_FOURIERSUM_PARAMS=dqm_fouriersum_params,
                PARTITION=partition_name
                ) for hostidx in range(len(host_ru))]
        console.log("dqm cmd data:", cmd_data_dqm)


    data_dir = join(json_dir, 'data')
    os.makedirs(data_dir)

    app_thi="thi"
    app_hsi = "hsi"
    app_trigger = "trigger"
    app_df = [f"dataflow{idx}" for idx in range(len(host_df))]
    app_dqm = [f"dqm{idx}" for idx in range(len(host_ru))]
    app_ru = [f"ruflx{idx}" if use_felix else f"ruemu{idx}" for idx in range(len(host_ru))]
    if use_ssp:
        app_ru = [f"russp{idx}" if use_ssp else f"ruemu{idx}" for idx in range(len(host_ru))]

    jf_hsi = join(data_dir, app_hsi)
    jf_trigemu = join(data_dir, app_trigger)
    jf_df = [join(data_dir, app_df[idx]) for idx in range(len(host_df))]
    jf_dqm = [join(data_dir, app_dqm[idx]) for idx in range(len(host_ru))]
    jf_ru = [join(data_dir, app_ru[idx]) for idx in range(len(host_ru))]
    if control_timing_hw:
        jf_thi = join(data_dir, app_thi)

    cmd_set = ["init", "conf", "start", "stop", "pause", "resume", "scrap", "record"]
    
    apps = [app_hsi, app_trigger] + app_df + app_ru
    if enable_dqm:
        apps += app_dqm
    cmds_data = [cmd_data_hsi, cmd_data_trigger] + cmd_data_dataflow + cmd_data_readout
    if enable_dqm:
        cmds_data += cmd_data_dqm
    if control_timing_hw:
        apps.append(app_thi)
        cmds_data.append(cmd_data_thi)

    for app,data in zip(apps, cmds_data):
        console.log(f"Generating {app} command data json files")
        for c in cmd_set:
            with open(f'{join(data_dir, app)}_{c}.json', 'w') as f:
                json.dump(data[c].pod(), f, indent=4, sort_keys=True)


    console.log(f"Generating top-level command json files")

    start_order = app_df + [app_trigger] + app_ru + [app_hsi] + app_dqm
    if not control_timing_hw and use_hsi_hw:
        resume_order = [app_trigger]
    else:
        resume_order = [app_hsi, app_trigger]

    for c in cmd_set:
        with open(join(json_dir,f'{c}.json'), 'w') as f:
            cfg = {
                "apps": { app: f'data/{app}_{c}' for app in apps }
            }
            if c in ['conf']:
                conf_order = start_order
                if control_timing_hw:
                    conf_order = [app_thi] + conf_order
                cfg[f'order'] = conf_order
            elif c == 'start':
                cfg['order'] = start_order
                if control_timing_hw:
                    del cfg['apps'][app_thi]
            elif c == 'stop':
                cfg['order'] = start_order[::-1]
                if control_timing_hw:
                    del cfg['apps'][app_thi]
            elif c in ('resume', 'pause'):
                for dfapp in app_df:
                    del cfg['apps'][dfapp]
                if control_timing_hw:
                    del cfg['apps'][app_thi]
                elif use_hsi_hw:
                    del cfg['apps'][app_hsi]
                for ruapp in app_ru:
                    del cfg['apps'][ruapp]
                if enable_dqm:
                    for dqmapp in app_dqm:
                        del cfg['apps'][dqmapp]
                if c == 'resume':
                    cfg['order'] = resume_order
                elif c == 'pause':
                    cfg['order'] = resume_order[::-1]

            json.dump(cfg, f, indent=4, sort_keys=True)


    console.log(f"Generating boot json file")
    with open(join(json_dir,'boot.json'), 'w') as f:
        daq_app_specs = {
            "daq_application" : {
                "comment": "Application profile using  PATH variables (lower start time)",
                "env":{
                    "CET_PLUGIN_PATH": "getenv",
                    "DUNEDAQ_SHARE_PATH": "getenv",
                    "TIMING_SHARE": "getenv",
                    "LD_LIBRARY_PATH": "getenv",
                    "PATH": "getenv",
                    "DETCHANNELMAPS_SHARE": "getenv"
                },
                "cmd": ["CMD_FAC=rest://localhost:${APP_PORT}",
                    "INFO_SVC=" + info_svc_uri,
                    "cd ${APP_WD}",
                    "daq_application --name ${APP_NAME} -c ${CMD_FAC} -i ${INFO_SVC}"]
            }
        }

        if not disable_trace:
            daq_app_specs["daq_application"]["env"]["TRACE_FILE"] = "getenv:/tmp/trace_buffer_${HOSTNAME}_${USER}"

        cfg = {
            "env" : {
                "DUNEDAQ_ERS_VERBOSITY_LEVEL": "getenv:1",
                "DUNEDAQ_PARTITION": partition_name,
                "DUNEDAQ_ERS_INFO": ers_info,
                "DUNEDAQ_ERS_WARNING": ers_warning,
                "DUNEDAQ_ERS_ERROR": ers_error,
                "DUNEDAQ_ERS_FATAL": ers_fatal,
                "DUNEDAQ_ERS_DEBUG_LEVEL": "getenv:-1",
            },
            "hosts": {
                "host_trigger": host_trigger,
                "host_hsi": host_hsi,
            },
            "apps" : {
                app_hsi: {
                    "exec": "daq_application",
                    "host": "host_hsi",
                    "port": 3332
                },
                app_trigger : {
                    "exec": "daq_application",
                    "host": "host_trigger",
                    "port": 3333
                },
            },
            "response_listener": {
                "port": 56789
            },
            "exec": daq_app_specs
        }

        if use_kafka:
            cfg["env"]["DUNEDAQ_ERS_STREAM_LIBS"] = "erskafka"

        appport = 3334
        for hostidx in range(len(host_df)):
            cfg["hosts"][f"host_df{hostidx}"] = host_df[hostidx]
            cfg["apps"][app_df[hostidx]] = {
                    "exec": "daq_application",
                    "host": f"host_df{hostidx}",
                    "port": appport }
            appport = appport + 1

        for hostidx in range(len(host_ru)):
            cfg["hosts"][f"host_ru{hostidx}"] = host_ru[hostidx]
            cfg["apps"][app_ru[hostidx]] = {
                    "exec": "daq_application",
                    "host": f"host_ru{hostidx}",
                    "port": appport }
            appport = appport + 1
        if enable_dqm:
            for hostidx in range(len(host_ru)):
                cfg["hosts"][f"host_dqm{hostidx}"] = host_ru[hostidx]
                cfg["apps"][app_dqm[hostidx]] = {
                        "exec": "daq_application",
                        "host": f"host_dqm{hostidx}",
                        "port": appport }
                appport = appport + 1
        
        if control_timing_hw:
            cfg["hosts"][f"host_timing_hw"] = host_timing_hw
            cfg["apps"][app_thi] = {
                    "exec": "daq_application",
                    "host": "host_timing_hw",
                    "port": appport + len(host_ru) }

        json.dump(cfg, f, indent=4, sort_keys=True)

    console.log("Generating metadata file")
    with open(join(json_dir, 'mdapp_multiru_gen.info'), 'w') as f:
        mdapp_dir = os.path.dirname(os.path.abspath(__file__))
        buildinfo_files = glob.glob('**/minidaqapp_build_info.txt', recursive=True)
        buildinfo = {}
        for buildinfo_file in buildinfo_files:
            if(os.path.dirname(os.path.abspath(buildinfo_file)) in mdapp_dir):
                with open(buildinfo_file, 'r') as ff:
                    line = ff.readline()
                    while line: 
                        line_parse = line.split(':')
                        buildinfo[line_parse[0].strip()]=':'.join(line_parse[1:]).strip()
                        line = ff.readline()
                    
                break
        mdapp_info = {
            "command_line": ' '.join(sys.argv),
            "mdapp_dir": mdapp_dir,
            "build_info": buildinfo
        }
        json.dump(mdapp_info, f, indent=4, sort_keys=True)

    console.log(f"MDAapp config generated in {json_dir}")
コード例 #4
0
ファイル: conf_utils.py プロジェクト: DUNE-DAQ/appfwk
def add_network(app_name, the_system, verbose=False):
    """Add the necessary QueueToNetwork and NetworkToQueue objects to the
       application named `app_name`, based on the inter-application
       connections specified in `the_system`. NB `the_system` is modified
       in-place."""

    # if the_system.network_endpoints is None:
    #     the_system.network_endpoints=assign_network_endpoints(the_system)

    if verbose:
        console.log(f"---- add_network for {app_name} ----")
    app = the_system.apps[app_name]

    modules_with_network = deepcopy(app.modulegraph.modules)

    unconnected_endpoints = set(app.modulegraph.endpoints.keys())

    if verbose:
        console.log(f"Endpoints to connect are: {unconnected_endpoints}")

    for conn_name, app_connection in the_system.app_connections.items():
        if verbose:
            console.log(
                f"conn_name {conn_name}, app_connection {app_connection}")

        # Create the nwmgr connection if it doesn't already exist
        if not the_system.has_network_endpoint(
                app_connection.nwmgr_connection):
            # IPM connections have the following confusing behaviour:
            # whether the connection is pub/sub or direct connection
            # is determined by whether the list of topics is empty;
            # and the end that binds is upstream for pub/sub
            # connections and downstream for direct connections
            is_pubsub = app_connection.topics != []
            bind_host = app_name if is_pubsub else app_connection.receivers[
                0].split(".")[0]
            port = the_system.next_unassigned_port()
            address = f"tcp://{{host_{bind_host}}}:{port}"
            if verbose:
                console.log(
                    f"Assigning address {address} for connection {app_connection.nwmgr_connection}"
                )
            the_system.network_endpoints.append(
                nwmgr.Connection(name=app_connection.nwmgr_connection,
                                 topics=app_connection.topics,
                                 address=address))
        from_app, from_endpoint = conn_name.split(".", maxsplit=1)

        if from_app == app_name:
            if from_endpoint in unconnected_endpoints:
                unconnected_endpoints.remove(from_endpoint)
            from_endpoint_internal = resolve_endpoint(app, from_endpoint,
                                                      Direction.OUT)
            if from_endpoint_internal is None:
                # The module.endpoint for this external endpoint was
                # specified as None, so we assume it was a direct
                # nwmgr sender, and don't make a qton for it
                if verbose:
                    console.log(
                        f"{conn_name} specifies its internal endpoint as None, so not creating a QtoN for it"
                    )
                continue
            from_endpoint_module_name, from_endpoint_sink = from_endpoint_internal.split(
                ".")
            # We're a publisher or sender. Make the queue to network
            qton_name = conn_name.replace(".", "_")
            qton_name = make_unique_name(qton_name, modules_with_network)

            if verbose:
                console.log(
                    f"Adding QueueToNetwork named {qton_name} connected to {from_endpoint_internal} in app {app_name}"
                )
            nwmgr_connection_name = app_connection.nwmgr_connection
            nwmgr_connection = the_system.get_network_endpoint(
                nwmgr_connection_name)
            topic = nwmgr_connection.topics[
                0] if nwmgr_connection.topics else ""
            modules_with_network.append(
                DAQModule(
                    name=qton_name,
                    plugin="QueueToNetwork",
                    connections={},  # No outgoing connections
                    conf=qton.Conf(
                        msg_type=app_connection.msg_type,
                        msg_module_name=app_connection.msg_module_name,
                        sender_config=nos.Conf(name=nwmgr_connection_name,
                                               topic=topic))))
            # Connect the module to the QueueToNetwork
            from_endpoint_module = None
            for mod in modules_with_network:
                if mod.name == from_endpoint_module_name:
                    from_endpoint_module = mod
                    break
            mod_connections = from_endpoint_module.connections
            mod_connections[from_endpoint_sink] = Connection(
                f"{qton_name}.input")

        if verbose:
            console.log(
                f"app_connection.receivers is {app_connection.receivers}")
        for receiver in app_connection.receivers:
            to_app, to_endpoint = receiver.split(".", maxsplit=1)
            if to_app == app_name:
                if to_endpoint in unconnected_endpoints:
                    unconnected_endpoints.remove(to_endpoint)
                to_endpoint_internal = resolve_endpoint(
                    app, to_endpoint, Direction.IN)
                if to_endpoint_internal is None:
                    # The module.endpoint for this external endpoint was
                    # specified as None, so we assume it was a direct
                    # nwmgr sender, and don't make a ntoq for it
                    if verbose:
                        console.log(
                            f"{to_endpoint} specifies its internal endpoint as None, so not creating a NtoQ for it"
                        )
                    continue

                ntoq_name = receiver.replace(".", "_")
                ntoq_name = make_unique_name(ntoq_name, modules_with_network)

                if verbose:
                    console.log(
                        f"Adding NetworkToQueue named {ntoq_name} connected to {to_endpoint_internal} in app {app_name}"
                    )

                nwmgr_connection_name = app_connection.nwmgr_connection
                nwmgr_connection = the_system.get_network_endpoint(
                    nwmgr_connection_name)

                modules_with_network.append(
                    DAQModule(
                        name=ntoq_name,
                        plugin="NetworkToQueue",
                        connections={
                            "output": Connection(to_endpoint_internal)
                        },
                        conf=ntoq.Conf(
                            msg_type=app_connection.msg_type,
                            msg_module_name=app_connection.msg_module_name,
                            receiver_config=nor.Conf(
                                name=nwmgr_connection_name,
                                subscriptions=nwmgr_connection.topics))))

    if unconnected_endpoints:
        # TODO: Use proper logging
        console.log(
            f"Warning: the following endpoints of {app_name} were not connected to anything: {unconnected_endpoints}"
        )

    app.modulegraph.modules = modules_with_network
コード例 #5
0
def generate(
    FRONTEND_TYPE="pacman",
    NUMBER_OF_DATA_PRODUCERS=1,
    NUMBER_OF_TP_PRODUCERS=1,
    DATA_RATE_SLOWDOWN_FACTOR=1,
    ENABLE_SOFTWARE_TPG=False,
    RUN_NUMBER=333,
    DATA_FILE="./frames.bin",
    TP_DATA_FILE="./tp_frames.bin",
):

    # Define modules and queues
    queue_bare_specs = ([
        app.QueueSpec(inst="time_sync_q", kind="FollyMPMCQueue", capacity=100),
        app.QueueSpec(
            inst="data_fragments_q", kind="FollyMPMCQueue", capacity=100),
        app.QueueSpec(
            inst="errored_frames_q", kind="FollyMPMCQueue", capacity=10000),
    ] + [
        app.QueueSpec(
            inst=f"data_requests_{idx}", kind="FollySPSCQueue", capacity=1000)
        for idx in range(NUMBER_OF_DATA_PRODUCERS)
    ] + [
        app.QueueSpec(
            inst=f"{FRONTEND_TYPE}_link_{idx}",
            kind="FollySPSCQueue",
            capacity=100000,
        ) for idx in range(NUMBER_OF_DATA_PRODUCERS)
    ] + [
        app.QueueSpec(
            inst=f"raw_tp_link_{idx}", kind="FollySPSCQueue", capacity=100000)
        for idx in range(
            NUMBER_OF_DATA_PRODUCERS,
            NUMBER_OF_DATA_PRODUCERS + NUMBER_OF_TP_PRODUCERS,
        )
    ] + [
        app.QueueSpec(
            inst=f"sw_tp_queue_{idx}", kind="FollySPSCQueue", capacity=100000)
        for idx in range(NUMBER_OF_DATA_PRODUCERS)
    ] + [
        app.QueueSpec(
            inst=f"tp_data_requests", kind="FollySPSCQueue", capacity=1000)
    ] + [
        app.QueueSpec(
            inst=f"tpset_link_{idx}", kind="FollySPSCQueue", capacity=10000)
        for idx in range(NUMBER_OF_DATA_PRODUCERS)
    ])

    # Only needed to reproduce the same order as when using jsonnet
    queue_specs = app.QueueSpecs(sorted(queue_bare_specs,
                                        key=lambda x: x.inst))

    mod_specs = ([
        mspec("fake_source", "PacmanCardReader", [
            app.QueueInfo(
                name=f"output_{idx}",
                inst=f"{FRONTEND_TYPE}_link_{idx}",
                dir="output",
            ) for idx in range(NUMBER_OF_DATA_PRODUCERS)
        ]),
    ] + [
        mspec(
            f"datahandler_{idx}",
            "DataLinkHandler",
            [
                app.QueueInfo(
                    name="raw_input",
                    inst=f"{FRONTEND_TYPE}_link_{idx}",
                    dir="input",
                ),
                app.QueueInfo(
                    name="timesync", inst="time_sync_q", dir="output"),
                app.QueueInfo(name="data_requests_0",
                              inst=f"data_requests_{idx}",
                              dir="input"),
                app.QueueInfo(name="fragment_queue",
                              inst="data_fragments_q",
                              dir="output"),
                app.QueueInfo(
                    name="tp_out", inst=f"sw_tp_queue_{idx}", dir="output"),
                app.QueueInfo(
                    name="tpset_out", inst=f"tpset_link_{idx}", dir="output"),
                app.QueueInfo(name="errored_frames",
                              inst="errored_frames_q",
                              dir="output"),
            ],
        ) for idx in range(NUMBER_OF_DATA_PRODUCERS)
    ] + [
        mspec(
            f"timesync_consumer",
            "TimeSyncConsumer",
            [
                app.QueueInfo(
                    name="input_queue", inst=f"time_sync_q", dir="input")
            ],
        )
    ] + [
        mspec(
            f"fragment_consumer",
            "FragmentConsumer",
            [
                app.QueueInfo(
                    name="input_queue", inst=f"data_fragments_q", dir="input")
            ],
        )
    ] + [
        mspec(
            f"sw_tp_handler_{idx}",
            "DataLinkHandler",
            [
                app.QueueInfo(
                    name="raw_input", inst=f"sw_tp_queue_{idx}", dir="input"),
                app.QueueInfo(
                    name="timesync", inst="time_sync_q", dir="output"),
                app.QueueInfo(
                    name="requests", inst="tp_data_requests", dir="input"),
                app.QueueInfo(name="fragment_queue",
                              inst="data_fragments_q",
                              dir="output"),
            ],
        ) for idx in range(NUMBER_OF_DATA_PRODUCERS)
    ] + [
        mspec(
            f"tpset_publisher_{idx}",
            "QueueToNetwork",
            [
                app.QueueInfo(
                    name="input", inst=f"tpset_link_{idx}", dir="input")
            ],
        ) for idx in range(NUMBER_OF_DATA_PRODUCERS)
    ] + [
        mspec(
            f"raw_tp_handler_{idx}",
            "DataLinkHandler",
            [
                app.QueueInfo(
                    name="raw_input", inst=f"raw_tp_link_{idx}", dir="input"),
                app.QueueInfo(
                    name="timesync", inst="time_sync_q", dir="output"),
            ],
        ) for idx in range(
            NUMBER_OF_DATA_PRODUCERS,
            NUMBER_OF_DATA_PRODUCERS + NUMBER_OF_TP_PRODUCERS,
        )
    ] + [
        mspec(
            "errored_frame_consumer",
            "ErroredFrameConsumer",
            [
                app.QueueInfo(
                    name="input_queue", inst="errored_frames_q", dir="input")
            ],
        )
    ])

    nw_specs = [
        nwmgr.Connection(name=f"tpsets_{idx}",
                         topics=["foo"],
                         address="tcp://127.0.0.1:" + str(5000 + idx))
        for idx in range(NUMBER_OF_DATA_PRODUCERS)
    ]
    nw_specs.append(
        nwmgr.Connection(name="timesync",
                         topics=["Timesync"],
                         address="tcp://127.0.0.1:6000"))

    init_specs = app.Init(queues=queue_specs,
                          modules=mod_specs,
                          nwconnections=nw_specs)

    jstr = json.dumps(init_specs.pod(), indent=4, sort_keys=True)
    print(jstr)

    initcmd = rccmd.RCCommand(
        id=basecmd.CmdId("init"),
        entry_state="NONE",
        exit_state="INITIAL",
        data=init_specs,
    )

    confcmd = mrccmd(
        "conf",
        "INITIAL",
        "CONFIGURED",
        [
            (
                "fake_source",
                pcr.Conf(link_confs=[
                    pcr.LinkConfiguration(geoid=pcr.GeoID(
                        system="kNDLarTPC", region=0, element=idx), )
                    for idx in range(NUMBER_OF_DATA_PRODUCERS)
                ] + [
                    pcr.LinkConfiguration(geoid=sec.GeoID(
                        system="TPC", region=0, element=idx), )
                    for idx in range(
                        NUMBER_OF_DATA_PRODUCERS,
                        NUMBER_OF_DATA_PRODUCERS + NUMBER_OF_TP_PRODUCERS,
                    )
                ],
                         # input_limit=10485100, # default
                         ),
            ),
        ] + [(
            f"datahandler_{idx}",
            rconf.Conf(
                readoutmodelconf=rconf.ReadoutModelConf(
                    source_queue_timeout_ms=QUEUE_POP_WAIT_MS,
                    fake_trigger_flag=1,
                    region_id=0,
                    element_id=idx,
                    timesync_connection_name=f"timesync",
                    timesync_topic_name="Timesync",
                ),
                latencybufferconf=rconf.LatencyBufferConf(
                    latency_buffer_size=3 * CLOCK_SPEED_HZ /
                    (25 * 12 * DATA_RATE_SLOWDOWN_FACTOR),
                    region_id=0,
                    element_id=idx,
                ),
                rawdataprocessorconf=rconf.RawDataProcessorConf(
                    region_id=0,
                    element_id=idx,
                    enable_software_tpg=ENABLE_SOFTWARE_TPG,
                    error_counter_threshold=100,
                    error_reset_freq=10000,
                ),
                requesthandlerconf=rconf.RequestHandlerConf(
                    latency_buffer_size=3 * CLOCK_SPEED_HZ /
                    (25 * 12 * DATA_RATE_SLOWDOWN_FACTOR),
                    pop_limit_pct=0.8,
                    pop_size_pct=0.1,
                    region_id=0,
                    element_id=idx,
                    output_file=f"output_{idx}.out",
                    stream_buffer_size=8388608,
                    enable_raw_recording=True,
                    use_o_direct=False,
                ),
            ),
        ) for idx in range(NUMBER_OF_DATA_PRODUCERS)] + [(
            f"sw_tp_handler_{idx}",
            rconf.Conf(
                readoutmodelconf=rconf.ReadoutModelConf(
                    source_queue_timeout_ms=QUEUE_POP_WAIT_MS,
                    fake_trigger_flag=1,
                    region_id=0,
                    element_id=idx,
                ),
                latencybufferconf=rconf.LatencyBufferConf(
                    latency_buffer_size=3 * CLOCK_SPEED_HZ /
                    (25 * 12 * DATA_RATE_SLOWDOWN_FACTOR),
                    region_id=0,
                    element_id=idx,
                ),
                rawdataprocessorconf=rconf.RawDataProcessorConf(
                    region_id=0,
                    element_id=idx,
                    enable_software_tpg=ENABLE_SOFTWARE_TPG,
                ),
                requesthandlerconf=rconf.RequestHandlerConf(
                    latency_buffer_size=3 * CLOCK_SPEED_HZ /
                    (25 * 12 * DATA_RATE_SLOWDOWN_FACTOR),
                    pop_limit_pct=0.8,
                    pop_size_pct=0.1,
                    region_id=0,
                    element_id=idx,
                    output_file=f"output_{idx}.out",
                    stream_buffer_size=8388608,
                    enable_raw_recording=False,
                    use_o_direct=False,
                ),
            ),
        ) for idx in range(NUMBER_OF_DATA_PRODUCERS)] + [(
            f"raw_tp_handler_{idx}",
            rconf.Conf(
                readoutmodelconf=rconf.ReadoutModelConf(
                    source_queue_timeout_ms=QUEUE_POP_WAIT_MS,
                    fake_trigger_flag=1,
                    region_id=0,
                    element_id=idx,
                ),
                latencybufferconf=rconf.LatencyBufferConf(
                    latency_buffer_size=3 * CLOCK_SPEED_HZ /
                    (25 * 12 * DATA_RATE_SLOWDOWN_FACTOR),
                    region_id=0,
                    element_id=idx,
                ),
                rawdataprocessorconf=rconf.RawDataProcessorConf(
                    region_id=0,
                    element_id=idx,
                    enable_software_tpg=ENABLE_SOFTWARE_TPG,
                ),
                requesthandlerconf=rconf.RequestHandlerConf(
                    latency_buffer_size=3 * CLOCK_SPEED_HZ /
                    (25 * 12 * DATA_RATE_SLOWDOWN_FACTOR),
                    pop_limit_pct=0.8,
                    pop_size_pct=0.1,
                    region_id=0,
                    element_id=idx,
                    output_file=f"output_{idx}.out",
                    stream_buffer_size=8388608,
                    enable_raw_recording=False,
                    use_o_direct=False,
                ),
            ),
        ) for idx in range(
            NUMBER_OF_DATA_PRODUCERS,
            NUMBER_OF_DATA_PRODUCERS + NUMBER_OF_TP_PRODUCERS,
        )] + [(
            f"tpset_publisher_{idx}",
            qton.Conf(
                msg_type="dunedaq::trigger::TPSet",
                msg_module_name="TPSetNQ",
                sender_config=nos.Conf(
                    name=f"tpsets_{idx}",
                    topic="foo",
                    stype="msgpack",
                ),
            ),
        ) for idx in range(NUMBER_OF_DATA_PRODUCERS)],
    )

    jstr = json.dumps(confcmd.pod(), indent=4, sort_keys=True)
    print(jstr)

    startpars = rccmd.StartParams(run=RUN_NUMBER)
    startcmd = mrccmd(
        "start",
        "CONFIGURED",
        "RUNNING",
        [
            ("datahandler_.*", startpars),
            ("fake_source", startpars),
            ("data_recorder_.*", startpars),
            ("timesync_consumer", startpars),
            ("fragment_consumer", startpars),
            ("sw_tp_handler_.*", startpars),
            ("raw_tp_handler_.*", startpars),
            ("tpset_publisher_.*", startpars),
            ("errored_frame_consumer", startpars),
        ],
    )

    jstr = json.dumps(startcmd.pod(), indent=4, sort_keys=True)
    print("=" * 80 + "\nStart\n\n", jstr)

    stopcmd = mrccmd(
        "stop",
        "RUNNING",
        "CONFIGURED",
        [
            ("fake_source", None),
            ("datahandler_.*", None),
            ("data_recorder_.*", None),
            ("timesync_consumer", None),
            ("fragment_consumer", None),
            ("sw_tp_handler_.*", None),
            ("raw_tp_handler_.*", None),
            ("tpset_publisher_.*", None),
            ("errored_frame_consumer", None),
        ],
    )

    jstr = json.dumps(stopcmd.pod(), indent=4, sort_keys=True)
    print("=" * 80 + "\nStop\n\n", jstr)

    scrapcmd = mrccmd("scrap", "CONFIGURED", "INITIAL", [("", None)])

    jstr = json.dumps(scrapcmd.pod(), indent=4, sort_keys=True)
    print("=" * 80 + "\nScrap\n\n", jstr)

    # Create a list of commands
    cmd_seq = [initcmd, confcmd, startcmd, stopcmd, scrapcmd]

    record_cmd = mrccmd(
        "record",
        "RUNNING",
        "RUNNING",
        [("datahandler_.*", rconf.RecordingParams(duration=10))],
    )

    jstr = json.dumps(record_cmd.pod(), indent=4, sort_keys=True)
    print("=" * 80 + "\nRecord\n\n", jstr)

    cmd_seq.append(record_cmd)

    # Print them as json (to be improved/moved out)
    jstr = json.dumps([c.pod() for c in cmd_seq], indent=4, sort_keys=True)
    return jstr
コード例 #6
0
def generate(
        PARTITION = "hsi_readout_test",
        RUN_NUMBER = 333, 
        CONNECTIONS_FILE="${TIMING_SHARE}/config/etc/connections.xml",
        READOUT_PERIOD = 1e3,
        HSI_DEVICE_NAME="BOREAS_FMC",
        TTCM_S1: int = 1,
        TTCM_S2: int = 2,
        UHAL_LOG_LEVEL="notice",
        OUTPUT_PATH=".",
    ):
    
    # network connection
    nw_specs = [nwmgr.Connection(name=PARTITION + ".hsievent",topics=[],  address="tcp://127.0.0.1:12344")]

    # Define modules and queues
    queue_bare_specs = [
            app.QueueSpec(inst="trigger_candidate_q", kind='FollySPSCQueue', capacity=2000),

        ]

    # Only needed to reproduce the same order as when using jsonnet
    queue_specs = app.QueueSpecs(sorted(queue_bare_specs, key=lambda x: x.inst))

    mod_specs = [   
                    mspec("hsi", "HSIReadout", []),

                    mspec("ttcm", "TimingTriggerCandidateMaker", [
                                    app.QueueInfo(name="output", inst="trigger_candidate_q", dir="output"),
                                ]),
                ]

    init_specs = app.Init(queues=queue_specs, modules=mod_specs, nwconnections=nw_specs)

    jstr = json.dumps(init_specs.pod(), indent=4, sort_keys=True)
    print(jstr)

    initcmd = rcif.RCCommand(
        id=cmdlib.CmdId("init"),
        entry_state="NONE",
        exit_state="INITIAL",
        data=init_specs
    )

    mods = [
                ("hsi", hsi.ConfParams(
                        connections_file=CONNECTIONS_FILE,
                        readout_period=READOUT_PERIOD,
                        hsi_device_name=HSI_DEVICE_NAME,
                        uhal_log_level=UHAL_LOG_LEVEL,
                        hsievent_connection_name = f"{PARTITION}.hsievent",
                        )),
                
                ("ttcm", ttcm.Conf(
                        s1=ttcm.map_t(signal_type=TTCM_S1,
                                      time_before=100000,
                                      time_after=200000),
                        s2=ttcm.map_t(signal_type=TTCM_S2,
                                      time_before=100000,
                                      time_after=200000),
                        hsievent_connection_name = PARTITION+".hsievent",
                        )),
            ]

    confcmd = mrccmd("conf", "INITIAL", "CONFIGURED", mods)

    jstr = json.dumps(confcmd.pod(), indent=4, sort_keys=True)
    print(jstr)

    startpars = rcif.StartParams(run=1, disable_data_storage=False)

    startcmd = mrccmd("start", "CONFIGURED", "RUNNING", [
            ("hsi", None),
            ("ttcm", startpars),
        ])

    jstr = json.dumps(startcmd.pod(), indent=4, sort_keys=True)
    print("="*80+"\nStart\n\n", jstr)

    stopcmd = mrccmd("stop", "RUNNING", "CONFIGURED", [
            ("hsi", None),
            ("ttcm", None),
        ])

    jstr = json.dumps(stopcmd.pod(), indent=4, sort_keys=True)
    print("="*80+"\nStop\n\n", jstr)


    scrapcmd = mcmd("scrap", [
            ("", None)
        ])

    jstr = json.dumps(scrapcmd.pod(), indent=4, sort_keys=True)
    print("="*80+"\nScrap\n\n", jstr)

    # Create a list of commands
    cmd_seq = [initcmd, confcmd, startcmd, stopcmd]

    # Print them as json (to be improved/moved out)
    jstr = json.dumps([c.pod() for c in cmd_seq], indent=4, sort_keys=True)
    return jstr