Exemple #1
0
def write(df, header, meas_input, plane, rdt):
    outputdir = join(meas_input.outputdir, "rdt", _rdt_to_order_and_type(rdt))
    iotools.create_dirs(outputdir)
    tfs.write(join(outputdir, f"f{_rdt_to_str(rdt)}_{plane.lower()}{EXT}"),
              df,
              header,
              save_index="NAME")
 def prepare_run(cls, lhc_instance, output_path):
     if lhc_instance.fullresponse:
         cls._prepare_fullresponse(lhc_instance, output_path)
     macros_path = join(output_path, MACROS_DIR)
     iotools.create_dirs(macros_path)
     lib_path = join(os.path.dirname(__file__), os.pardir, os.pardir, "lib")
     shutil.copy(join(lib_path, GENERAL_MACROS),
                 join(macros_path, GENERAL_MACROS))
     shutil.copy(join(lib_path, LHC_MACROS), join(macros_path, LHC_MACROS))
     if lhc_instance.energy is not None:
         core = f"{int(lhc_instance.energy*1000):04d}"
         file_path = lhc_instance.get_lhc_error_dir()
         shutil.copy(join(file_path, f"{core}GeV.tfs"),
                     join(output_path, ERROR_DEFFS_TXT))
         shutil.copy(
             join(file_path, "b2_errors_settings",
                  f"beam{lhc_instance.beam}_{core}GeV.madx"),
             join(output_path, B2_SETTINGS_MADX))
         b2_table = tfs.read(join(lhc_instance.get_lhc_error_dir(),
                                  f"b2_errors_beam{lhc_instance.beam}.tfs"),
                             index="NAME")
         gen_df = pd.DataFrame(data=np.zeros(
             (b2_table.index.size, len(_b2_columns()))),
                               index=b2_table.index,
                               columns=_b2_columns())
         gen_df["K1L"] = b2_table.loc[:, f"K1L_{core}"].to_numpy()
         tfs.write(join(output_path, B2_ERRORS_TFS),
                   gen_df,
                   headers_dict={
                       "NAME": "EFIELD",
                       "TYPE": "EFIELD"
                   },
                   save_index="NAME")
Exemple #3
0
def _create_input_ps():
    iotools.create_dirs(BASE_OUTPUT)
    iotools.copy_item(join(PS_MODEL, "elements.str"),
                      join(BASE_OUTPUT, "elements.str"))
    iotools.copy_item(join(PS_MODEL, "PS_LE_LHC_low_chroma.str"),
                      join(BASE_OUTPUT, "strengths.madx"))
    iotools.write_string_into_new_file(
        join(BASE_OUTPUT, MODIFIERS_MADX),
        f"call, file='{join(BASE_OUTPUT, 'elements.str')}';\n"
        f"call, file='{join(BASE_OUTPUT, 'strengths.madx')}';\n")
Exemple #4
0
def measure_optics(input_files, measure_input):
    """
    Main function to compute various lattice optics parameters from frequency spectra
    Args:
        input_files: InputFiles object containing frequency spectra files (linx/y)
        measure_input: OpticsInput object containing analysis settings

    Returns:
    """
    LOGGER.info(f"Calculating optics parameters - code version {VERSION}")
    iotools.create_dirs(measure_input.outputdir)
    logging_tools.add_module_handler(
        logging_tools.file_handler(
            os.path.join(measure_input.outputdir, LOG_FILE)))
    tune_dict = tune.calculate(measure_input, input_files)
    common_header = _get_header(measure_input, tune_dict)
    invariants = {}
    for plane in PLANES:
        phase_dict, out_dfs = phase.calculate(measure_input, input_files,
                                              tune_dict, plane)
        phase.write(out_dfs, [common_header, common_header],
                    measure_input.outputdir, plane)
        phase.write_special(measure_input, phase_dict, tune_dict[plane]["QF"],
                            plane)
        if measure_input.only_coupling:
            continue
        beta_df, beta_header = beta_from_phase.calculate(
            measure_input, tune_dict, phase_dict, common_header, plane)
        beta_from_phase.write(beta_df, beta_header, measure_input.outputdir,
                              plane)

        ratio = beta_from_amplitude.calculate(measure_input, input_files,
                                              tune_dict, beta_df,
                                              common_header, plane)
        invariants[plane] = kick.calculate(measure_input, input_files, ratio,
                                           common_header, plane)
        ip_df = interaction_point.betastar_from_phase(measure_input,
                                                      phase_dict)
        interaction_point.write(ip_df, common_header, measure_input.outputdir,
                                plane)
        dispersion.calculate_orbit(measure_input, input_files, common_header,
                                   plane)
        dispersion.calculate_dispersion(measure_input, input_files,
                                        common_header, plane)
        if plane == "X":
            dispersion.calculate_normalised_dispersion(measure_input,
                                                       input_files, beta_df,
                                                       common_header)
    # coupling.calculate_coupling(measure_input, input_files, phase_dict, tune_dict, common_header)
    if measure_input.nonlinear:
        iotools.create_dirs(os.path.join(measure_input.outputdir, "rdt"))
        rdt.calculate(measure_input, input_files, tune_dict, invariants,
                      common_header)
    if measure_input.chromatic_beating:
        chromatic_beating(input_files, measure_input, tune_dict)
Exemple #5
0
def _create_input_lhc():
    iotools.create_dirs(BASE_OUTPUT)
    iotools.copy_item(join(COMP_MODEL, "opticsfile.24_ctpps2"),
                      join(BASE_OUTPUT, "strengths.madx"))
    iotools.write_string_into_new_file(
        join(BASE_OUTPUT, MODIFIERS_MADX),
        f"call, file='{join(BASE_OUTPUT, 'strengths.madx')}';\n")
    iotools.write_string_into_new_file(join(BASE_OUTPUT, "corrections.madx"),
                                       "\n")
    iotools.write_string_into_new_file(join(BASE_OUTPUT, "extracted_mqts.str"),
                                       "\n")
Exemple #6
0
def _measure_optics(lins, optics_opt):
    if len(lins) == 0:
        lins = optics_opt.files
    inputs = measure_optics.InputFiles(lins, optics_opt)
    iotools.create_dirs(optics_opt.outputdir)
    calibrations = measure_optics.copy_calibration_files(
        optics_opt.outputdir, optics_opt.calibrationdir)
    inputs.calibrate(calibrations)
    with timeit(lambda spanned: LOGGER.info(
            f"Total time for optics measurements: {spanned}")):
        measure_optics.measure_optics(inputs, optics_opt)
Exemple #7
0
    def prepare_run(cls, accel: Lhc) -> None:
        if accel.year in ["2018", "2022"
                          ]:  # these years should be handled by the fetcher
            symlink_dst = Path(accel.model_dir) / LHC_REPOSITORY_NAME
            if not symlink_dst.exists():
                LOGGER.debug(f"Symlink destination: {symlink_dst}")
                symlink_dst.absolute().symlink_to(
                    (ACCELERATOR_MODEL_REPOSITORY / f"{accel.year}"))

        cls.check_accelerator_instance(accel)
        LOGGER.debug("Preparing model creation structure")
        macros_path = accel.model_dir / MACROS_DIR
        iotools.create_dirs(macros_path)

        LOGGER.debug("Copying macros to model directory")
        lib_path = Path(__file__).parent.parent / "madx_macros"
        shutil.copy(lib_path / GENERAL_MACROS, macros_path / GENERAL_MACROS)
        shutil.copy(lib_path / LHC_MACROS, macros_path / LHC_MACROS)
        shutil.copy(lib_path / LHC_MACROS_RUN3, macros_path / LHC_MACROS_RUN3)

        if accel.energy is not None:
            LOGGER.debug(
                "Copying B2 error files for given energy in model directory")
            core = f"{int(accel.energy * 1000):04d}"
            error_dir_path = accel.get_lhc_error_dir()
            shutil.copy(error_dir_path / f"{core}GeV.tfs",
                        accel.model_dir / ERROR_DEFFS_TXT)
            shutil.copy(
                error_dir_path / "b2_errors_settings" /
                f"beam{accel.beam}_{core}GeV.madx",
                accel.model_dir / B2_SETTINGS_MADX,
            )
            b2_table = tfs.read(error_dir_path /
                                f"b2_errors_beam{accel.beam}.tfs",
                                index="NAME")
            gen_df = pd.DataFrame(
                data=np.zeros((b2_table.index.size, len(_b2_columns()))),
                index=b2_table.index,
                columns=_b2_columns(),
            )
            gen_df["K1L"] = b2_table.loc[:, f"K1L_{core}"].to_numpy()
            tfs.write(
                accel.model_dir / B2_ERRORS_TFS,
                gen_df,
                headers_dict={
                    "NAME": "EFIELD",
                    "TYPE": "EFIELD"
                },
                save_index="NAME",
            )
Exemple #8
0
def analyse_kmod(opt):
    """
    Run Kmod analysis
    """
    LOG.info('Getting input parameter')
    if opt.interaction_point is None and opt.circuits is None:
        raise AttributeError('No IP or circuits specified, stopping analysis')
    if opt.interaction_point is not None and opt.circuits is not None:
        raise AttributeError('Both IP and circuits specified, choose only one, stopping analysis')
    if not 1 < len(opt.betastar_and_waist) < 5:
        raise AttributeError("Option betastar_and_waist has to consist of 2 to 4 floats")
    opt.betastar_and_waist = convert_betastar_and_waist(opt.betastar_and_waist)
    for error in ("cminus", "errorK", "errorL", "misalignment"):
        opt = check_default_error(opt, error)
    if opt.measurement_dir is None and opt.model_dir is None and opt.phase_weight:
        raise AttributeError("Cannot use phase advance without measurement or model")
    if opt.outputdir is None:
        opt.outputdir = opt.working_directory

    LOG.info(f"{'IP trim' if opt.interaction_point is not None else 'Individual magnets'} analysis")
    opt['magnets'] = MAGNETS_IP[opt.interaction_point.upper()] if opt.interaction_point is not None else [
        find_magnet(opt.beam, circuit) for circuit in opt.circuits]
    opt['label'] = f'{opt.interaction_point}{opt.beam}' if opt.interaction_point is not None else f'{opt.magnets[0]}-{opt.magnets[1]}'
    opt['instruments'] = list(map(str.upper, opt.instruments.split(",")))

    output_dir = join(opt.outputdir, opt.label)
    iotools.create_dirs(output_dir)

    LOG.info('Get inputfiles')
    magnet1_df, magnet2_df = helper.get_input_data(opt)
    opt, magnet1_df, magnet2_df, betastar_required = define_params(opt, magnet1_df, magnet2_df)

    LOG.info('Run simplex')
    magnet1_df, magnet2_df, results_df, instrument_beta_df = analysis.analyse(magnet1_df, magnet2_df, opt, betastar_required)

    LOG.info('Plot tunes and fit')
    if opt.no_plots:
        helper.plot_cleaned_data([magnet1_df, magnet2_df], join(output_dir, FIT_PLOTS_NAME), interactive_plot=False)

    LOG.info('Write magnet dataframes and results')
    for magnet_df in [magnet1_df, magnet2_df]:
        tfs.write(join(output_dir, f"{magnet_df.headers['QUADRUPOLE']}{EXT}"), magnet_df)

    tfs.write(join(output_dir, f'{RESULTS_FILE_NAME}{EXT}'), results_df)

    if opt.instruments_found:
        tfs.write(join(output_dir, f'{INSTRUMENTS_FILE_NAME}{EXT}'), instrument_beta_df)

    create_lsa_results_file(betastar_required, opt.instruments_found, results_df, instrument_beta_df, output_dir)
Exemple #9
0
def _run_harpy(harpy_options):
    iotools.create_dirs(harpy_options.outputdir)
    with timeit(
            lambda spanned: LOGGER.info(f"Total time for Harpy: {spanned}")):
        lins = []
        all_options = _replicate_harpy_options_per_file(harpy_options)
        tbt_datas = [(tbt.read_tbt(option.files,
                                   datatype=option.tbt_datatype), option)
                     for option in all_options]
        for tbt_data, option in tbt_datas:
            lins.extend([
                handler.run_per_bunch(bunch_data, bunch_options)
                for bunch_data, bunch_options in _multibunch(tbt_data, option)
            ])
    return lins
Exemple #10
0
def converter_entrypoint(opt):
    """
    Converts turn-by-turn files from various formats to ``LHC`` binary SDDS files.
    Optionally can replicate files with added noise.

    Converter Kwargs:
      - **files**: TbT files to convert

        Flags: **--files**
        Required: ``True``
      - **outputdir**: Output directory.

        Flags: **--outputdir**
        Required: ``True``
      - **tbt_datatype** *(str)*: Choose datatype from which to import (e.g LHC binary SDDS).

        Flags: **--tbt_datatype**
        Default: ``lhc``
      - **realizations** *(int)*: Number of copies with added noise.

        Flags: **--realizations**
        Default: ``1``
      - **noise_levels** *(float)*: Sigma of added Gaussian noise.

        Flags: **--noise_levels**
        Default: ``None``
      - **use_average** *(bool)*: If set, returned sdds only contains the average over all particle/bunches.

        Flags: **--use_average**
        Default: ``False``

      - **drop_elements**: Names of elements to drop from the input file during conversion.

        Flags: **--drop_elements**
        Default: ``None``
    """
    if opt.realizations < 1:
        raise ValueError("Number of realizations lower than 1.")
    iotools.create_dirs(opt.outputdir)
    save_options_to_config(
        str(
            Path(opt.outputdir) / DEFAULT_CONFIG_FILENAME.format(
                time=datetime.utcnow().strftime(formats.TIME))),
        dict(sorted(opt.items())),
    )
    _read_and_write_files(opt)
Exemple #11
0
def test_booster_creation_nominal():
    iotools.create_dirs(BASE_OUTPUT)
    iotools.write_string_into_new_file(join(BASE_OUTPUT, MODIFIERS_MADX), "\n")
    opt_dict = dict(type="nominal",
                    accel="psbooster",
                    ring=1,
                    nat_tunes=[4.21, 4.27],
                    drv_tunes=[0.205, 0.274],
                    driven_excitation="acd",
                    dpp=0.0,
                    energy=0.16,
                    modifiers=join(BASE_OUTPUT, MODIFIERS_MADX),
                    fullresponse=True,
                    outputdir=BASE_OUTPUT,
                    writeto=join(BASE_OUTPUT, "job.twiss.madx"),
                    logfile=join(BASE_OUTPUT, "madx_log.txt"))
    create_instance_and_model(opt_dict)
    _clean_up(BASE_OUTPUT)
Exemple #12
0
def _write_config_file(harpy_opt, optics_opt, accelerator_opt):
    """Write the parsed options into a config file for later use."""
    all_opt = OrderedDict()
    if harpy_opt is not None:
        all_opt["harpy"] = True
        all_opt.update(OrderedDict(sorted(harpy_opt.items())))

    if optics_opt is not None:
        optics_opt = OrderedDict(sorted(optics_opt.items()))
        optics_opt.pop('accelerator')

        all_opt["optics"] = True
        all_opt.update(optics_opt)
        all_opt.update(sorted(accelerator_opt.items()))

    out_dir = all_opt["outputdir"]
    file_name = DEFAULT_CONFIG_FILENAME.format(
        time=datetime.utcnow().strftime(formats.TIME))
    iotools.create_dirs(out_dir)

    save_options_to_config(os.path.join(out_dir, file_name), all_opt)
Exemple #13
0
def converter_entrypoint(opt: DotDict) -> None:
    """
    Looks for expected ``BetaBeat.src`` output files in the provided input directory, converts them to
    the format used in ``omc3`` and writes the converted files in the provided output directory.

    *--Required--*

    - **inputdir** *(str)*:

        Directory with BetaBeat.src output files.


    - **outputdir** *(str)*:

        Output directory for converted files.


    *--Optional--*

    - **suffix** *(str)*:

        AC dipole compensation suffix used in the provided BetaBeat.src output ('_free' for
        compensation by equation, '_free2' by model).

        choices: ``('', '_free', '_free2')``

        default: ``_free``

    """
    iotools.create_dirs(Path(opt.outputdir))
    save_options_to_config(
        Path(opt.outputdir) / DEFAULT_CONFIG_FILENAME.format(
            time=datetime.utcnow().strftime(formats.TIME)),
        OrderedDict(sorted(opt.items())),
    )
    LOGGER.warning(
        "Be aware that the input units in the BetaBeat.src outputs may not be SI"
    )
    convert_old_directory_to_new(opt)
Exemple #14
0
def create_instance_and_model(opt, accel_opt):
    if sys.flags.debug:
        numeric_level = getattr(logging, "DEBUG", None)
        ch = logging.StreamHandler(sys.stdout)
        formatter = logging.Formatter(
            ' %(asctime)s %(levelname)s | %(name)s : %(message)s')
        ch.setFormatter(formatter)
        logging.getLogger().addHandler(ch)
        logging.getLogger().setLevel(numeric_level)

    else:
        numeric_level = getattr(logging, "WARNING", None)
        logging.basicConfig(level=numeric_level)  # warning level to stderr

    create_dirs(opt.outputdir)
    accel_inst = manager.get_accelerator(accel_opt)
    LOGGER.info(
        f"Accelerator Instance {accel_inst.NAME}, model type {opt.type}")
    accel_inst.verify_object()
    creator = CREATORS[accel_inst.NAME][opt.type]
    creator.prepare_run(accel_inst, opt.outputdir)
    madx_script = creator.get_madx_script(accel_inst, opt.outputdir)
    run_string(madx_script, output_file=opt.writeto, log_file=opt.logfile)
Exemple #15
0
def write(df, header, meas_input, order, crdt):
    outputdir = Path(meas_input.outputdir) / "crdt" / order
    iotools.create_dirs(outputdir)
    tfs.write(str(outputdir / f"{crdt}{EXT}"), df, header, save_index='NAME')
Exemple #16
0
def create_instance_and_model(opt, accel_opt):
    """
    Manager Keyword Args:
        *--Required--*

        - **accel**:

            Choose the accelerator to use.Can be the class already.

            choices: ``['lhc', 'ps', 'esrf', 'psbooster', 'skekb', 'JPARC', 'petra', 'iota']``


    Creator Keyword Args:
        *--Required--*

        - **outputdir** *(str)*:

            Output path for model, twiss files will be writen here.


        *--Optional--*

        - **logfile** *(str)*:

            Path to the file where to write the MAD-X script output.If not
            provided it will be written to sys.stdout.


        - **type**:

            Type of model to create, either nominal or best_knowledge

            choices: ``('nominal', 'best_knowledge', 'coupling_correction')``


        - **writeto** *(str)*:

            Path to the file where to write the resulting MAD-X script.


    Accelerator Keyword Args:
        lhc: :mod:`omc3.model.accelerators.lhc`

        ps: :mod:`omc3.model.accelerators.ps`

        esrf: :mod:`omc3.model.accelerators.esrf`

        psbooster: :mod:`omc3.model.accelerators.psbooster`

        skekb: :mod:`omc3.model.accelerators.skekb`

        iota: :mod:`omc3.model.accelerators.iota`

        petra: :mod:`omc3.model.accelerators.petra` (not implemented)

        JPARC: Not implemented
    """
    if sys.flags.debug:
        numeric_level = getattr(logging, "DEBUG", None)
        ch = logging.StreamHandler(sys.stdout)
        formatter = logging.Formatter(' %(asctime)s %(levelname)s | %(name)s : %(message)s')
        ch.setFormatter(formatter)
        logging.getLogger().addHandler(ch)
        logging.getLogger().setLevel(numeric_level)
        
    else:
        numeric_level = getattr(logging, "WARNING", None)
        logging.basicConfig(level=numeric_level) # warning level to stderr

    create_dirs(opt.outputdir)
    accel_inst = manager.get_accelerator(accel_opt)
    LOGGER.info(f"Accelerator Instance {accel_inst.NAME}, model type {opt.type}")
    accel_inst.verify_object()
    creator = CREATORS[accel_inst.NAME][opt.type]
    creator.prepare_run(accel_inst, opt.outputdir)
    madx_script = creator.get_madx_script(accel_inst, opt.outputdir)
    run_string(madx_script, output_file=opt.writeto, log_file=opt.logfile)
Exemple #17
0
 def prepare_run(cls, accel):
     macros_path = accel.model_dir / MACROS_DIR
     create_dirs(macros_path)
     lib_path = Path(__file__).parent.parent.parent / "lib"
     shutil.copy(lib_path / GENERAL_MACROS, macros_path / GENERAL_MACROS)
Exemple #18
0
def analyse_kmod(opt):
    """
    Run Kmod analysis.

    Kmod Keyword Arguments:
        *--Required--*

        - **beam** *(int)*:

            define beam used: 1 or 2

            choices: ``[1, 2]``


        - **betastar_and_waist** *(float)*:

            Estimated beta star of measurements and waist shift


        - **working_directory** *(Path)*:

            path to working directory with stored KMOD measurement files


        *--Optional--*

        - **circuits** *(str)*:

            circuit names of the modulated quadrupoles


        - **cminus** *(float)*:

            C Minus


        - **errorK** *(float)*:

            error in K of the modulated quadrupoles, relative to gradient


        - **errorL** *(float)*:

            error in length of the modulated quadrupoles, unit m


        - **instruments** *(str)*:

            define instruments (use keywords from twiss) at which beta should be
            calculated , separated by comma, e.g. MONITOR,RBEND,INSTRUMENT,TKICKER

            default: ``MONITOR,SBEND,TKICKER,INSTRUMENT``


        - **interaction_point** *(str)*:

            define interaction point

            choices: ``['ip1', 'ip2', 'ip5', 'ip8', 'IP1', 'IP2', 'IP5', 'IP8']``


        - **log**:

            flag for creating a log file

            action: ``store_true``


        - **measurement_dir** *(Path)*:

            give an optics measurement directory to include phase constraint in
            penalty function


        - **misalignment** *(float)*:

            misalignment of the modulated quadrupoles in m


        - **model_dir** *(Path)*:

            twiss model that contains phase


        - **no_autoclean**:

            flag for manually cleaning data

            action: ``store_true``


        - **no_plots**:

            flag to not create any plots

            action: ``store_true``


        - **no_sig_digits**:

            flag to not use significant digits

            action: ``store_true``


        - **outputdir** *(Path)*:

            Path where outputfiles will be stored, defaults to the given
            working_directory


        - **phase_weight** *(float)*:

            weight in penalty function between phase and beta.If weight=0 phase is
            not used as a constraint.

            default: ``0.0``


        - **simulation**:

            flag for enabling simulation mode

            action: ``store_true``


        - **tune_uncertainty** *(float)*:

            tune measurement uncertainty

            default: ``2.5e-05``
    """
    LOG.info('Getting input parameter')
    if opt.interaction_point is None and opt.circuits is None:
        raise AttributeError('No IP or circuits specified, stopping analysis')
    if opt.interaction_point is not None and opt.circuits is not None:
        raise AttributeError('Both IP and circuits specified, choose only one, stopping analysis')
    if not 1 < len(opt.betastar_and_waist) < 5:
        raise AttributeError("Option betastar_and_waist has to consist of 2 to 4 floats")
    opt.betastar_and_waist = convert_betastar_and_waist(opt.betastar_and_waist)
    for error in ("cminus", "errorK", "errorL", "misalignment"):
        opt = check_default_error(opt, error)
    if opt.measurement_dir is None and opt.model_dir is None and opt.phase_weight:
        raise AttributeError("Cannot use phase advance without measurement or model")
    if opt.outputdir is None:
        opt.outputdir = opt.working_directory

    LOG.info(f"{'IP trim' if opt.interaction_point is not None else 'Individual magnets'} analysis")
    opt['magnets'] = MAGNETS_IP[opt.interaction_point.upper()] if opt.interaction_point is not None else [
        find_magnet(opt.beam, circuit) for circuit in opt.circuits]
    opt['label'] = f'{opt.interaction_point}B{opt.beam:d}' if opt.interaction_point is not None else f'{opt.magnets[0]}-{opt.magnets[1]}'
    opt['instruments'] = list(map(str.upper, opt.instruments.split(",")))

    output_dir = opt.outputdir / opt.label
    iotools.create_dirs(output_dir)

    LOG.info('Get inputfiles')
    magnet1_df, magnet2_df = helper.get_input_data(opt)
    opt, magnet1_df, magnet2_df, betastar_required = define_params(opt, magnet1_df, magnet2_df)

    LOG.info('Run simplex')
    magnet1_df, magnet2_df, results_df, instrument_beta_df = analysis.analyse(magnet1_df, magnet2_df, opt, betastar_required)

    LOG.info('Plot tunes and fit')
    if opt.no_plots:
        helper.plot_cleaned_data([magnet1_df, magnet2_df], output_dir / FIT_PLOTS_NAME, interactive_plot=False)

    LOG.info('Write magnet dataframes and results')
    for magnet_df in [magnet1_df, magnet2_df]:
        tfs.write(output_dir / f"{magnet_df.headers['QUADRUPOLE']}{EXT}", magnet_df)

    tfs.write(output_dir / f'{RESULTS_FILE_NAME}{EXT}', results_df)

    if opt.instruments_found:
        tfs.write(output_dir / f'{INSTRUMENTS_FILE_NAME}{EXT}', instrument_beta_df)

    create_lsa_results_file(betastar_required, opt.instruments_found, results_df, instrument_beta_df, output_dir)
Exemple #19
0
 def prepare_run(cls, instance, output_path):
     macros_path = join(output_path, MACROS_DIR)
     create_dirs(macros_path)
     lib_path = join(dirname(__file__), pardir, pardir, "lib")
     shutil.copy(join(lib_path, GENERAL_MACROS), join(macros_path, GENERAL_MACROS))
Exemple #20
0
def create_instance_and_model(opt, accel_opt) -> Accelerator:
    """
    Manager Keyword Args:
        *--Required--*

        - **accel**:

            Choose the accelerator to use.Can be the class already.

            choices: ``['lhc', 'ps', 'esrf', 'psbooster', 'skekb', 'JPARC', 'petra', 'iota']``


    Creator Keyword Args:
        *--Required--*

        - **outputdir** *(str)*:

            Output path for model, twiss files will be writen here.


        *--Optional--*

        - **logfile** *(str)*:

            Path to the file where to write the MAD-X script output.If not
            provided it will be written to sys.stdout.


        - **type**:

            Type of model to create.

            choices: ``('nominal', 'best_knowledge', 'coupling_correction')``


    Accelerator Keyword Args:
        lhc: :mod:`omc3.model.accelerators.lhc`

        ps: :mod:`omc3.model.accelerators.ps`

        esrf: :mod:`omc3.model.accelerators.esrf`

        psbooster: :mod:`omc3.model.accelerators.psbooster`

        skekb: :mod:`omc3.model.accelerators.skekb`

        iota: :mod:`omc3.model.accelerators.iota`

        petra: :mod:`omc3.model.accelerators.petra` (not implemented)

        JPARC: Not implemented
    """
    # Prepare paths
    create_dirs(opt.outputdir)

    accel_inst = manager.get_accelerator(accel_opt)
    LOG.info(f"Accelerator Instance {accel_inst.NAME}, model type {opt.type}")
    creator = CREATORS[accel_inst.NAME][opt.type]

    # Prepare model-dir output directory
    accel_inst.model_dir = opt.outputdir
    creator.prepare_run(accel_inst)

    # get madx-script with relative output-paths
    # as `cwd` changes run to correct directory.
    # The resulting model-dir is then more self-contained. (jdilly)
    accel_inst.model_dir = Path()
    madx_script = creator.get_madx_script(accel_inst)

    # Run madx to create model
    run_string(madx_script,
               output_file=opt.outputdir / JOB_MODEL_MADX,
               log_file=opt.logfile,
               cwd=opt.outputdir)

    # Check output and return accelerator instance
    accel_inst.model_dir = opt.outputdir
    creator.check_run_output(accel_inst)
    return accel_inst