Esempio n. 1
0
def _launch_single_job(inputfile_path):
    """ Function for pool to start a single madx job """
    log_file = inputfile_path + ".log"
    try:
        madx_wrapper.resolve_and_run_file(inputfile_path, log_file=log_file)
    except madx_wrapper.MadxError as e:
        return str(e)
    else:
        return None
def _launch_single_job(inputfile_path):
    """ Function for pool to start a single madx job """
    log_file = inputfile_path + ".log"
    try:
        madx_wrapper.resolve_and_run_file(inputfile_path, log_file=log_file)
    except madx_wrapper.MadxError as e:
        return str(e)
    else:
        return None
Esempio n. 3
0
def _callMadx(pathToInputFile, attemptsLeft=5):
    try:
        madx_wrapper.resolve_and_run_file(pathToInputFile, log_file=DEV_NULL)
    except madx_wrapper.MadxError:  # then madx failed for whatever reasons, lets try it again (tbach)
        print "madx failed. pathToInputFile:", pathToInputFile, "attempts left:", attemptsLeft
        if attemptsLeft is 0:
            raise Exception("madx finally failed, can not continue")
        print "lets wait 0.5s and try again..."
        time.sleep(0.5)
        _callMadx(pathToInputFile, attemptsLeft - 1)
Esempio n. 4
0
def _callMadx(pathToInputFile, attemptsLeft=5):
    try:
        madx_wrapper.resolve_and_run_file(pathToInputFile, log_file=DEV_NULL)
    except madx_wrapper.MadxError:  # then madx failed for whatever reasons, lets try it again (tbach)
        print "madx failed. pathToInputFile:", pathToInputFile, "attempts left:", attemptsLeft
        if attemptsLeft is 0:
            raise Exception("madx finally failed, can not continue")
        print "lets wait 0.5s and try again..."
        time.sleep(0.5)
        _callMadx(pathToInputFile, attemptsLeft - 1)
def run_madx_jobs(jobs, local, cwd, max_duration):
    """ Wrapper to run or submit the madx-jobs. """
    if local:
        for seq_jobs in jobs.values():
            for job in seq_jobs:
                madx.resolve_and_run_file(job, log_file="{}.log".format(job), cwd=cwd)

    else:
        for idx_job, key in enumerate(jobs):
            LOG.info("Sending Job No. {:d}/{:d}.".format(idx_job + 1, len(jobs)))
            # create one folder per job to not get conflicts with the temp-subfolder
            job_dir = get_job_dir(cwd, key)
            bash = htc.write_madx_bash(job_dir, "", jobs[key])
            condor_job = htc.create_job_for_bashfile(bash, duration=max_duration)
            condor_sub = htc.create_subfile_from_job(job_dir, condor_job)
            htc.submit_jobfile(condor_sub)
Esempio n. 6
0
def _launch_single_job(inputfile_path):
    """ Function for pool to start a single madx job """
    log_file = inputfile_path + ".log"
    madx_wrapper.resolve_and_run_file(inputfile_path, log_file=log_file)
Esempio n. 7
0
def main(accel_cls, options):
    '''
    :Parameters:
        'options': Values
            Values instance with all options from OptionParser
    :Return: int
        0 if execution was successful otherwise !=0
    '''

    print("+++ Starting Segment by Segment +++")
    print("Using accelerator class: " + accel_cls.__name__)
    measurement_path = options.path
    w_path = options.wpath
    if w_path == "0":
        w_path = measurement_path
    input_data = _InputData(measurement_path, w_path)

    save_path = options.save + os.path.sep
    utils.iotools.create_dirs(save_path)

    elements_data = options.segf.split(',')
    error_cut = float(options.cuts)

    twiss_file = options.twiss
    print("Input model twiss file", twiss_file)
    input_model = _try_to_load_twiss(twiss_file)
    if input_model is None:
        raise IOError("Cannot read input model, aborting.")

    twiss_directory = os.path.dirname(twiss_file)

    elements_names, start_bpms, end_bpms = structure_elements_info(
        elements_data)

    summaries = _Summaries(save_path)

    for element_name in elements_names:

        print("Started processing", element_name)

        start_bpm_name, end_bpm_name, is_element = get_good_bpms(
            input_data, error_cut, input_model, start_bpms, end_bpms,
            element_name)

        (start_bpm_horizontal_data, start_bpm_vertical_data,
         end_bpm_horizontal_data,
         end_bpm_vertical_data) = gather_data(input_data, start_bpm_name,
                                              end_bpm_name)

        element_has_dispersion, start_bpm_dispersion, end_bpm_dispersion = _get_dispersion_parameters(
            input_data, start_bpm_name, end_bpm_name)

        element_has_coupling, f_ini, f_end = _get_coupling_parameters(
            input_data, start_bpm_name, end_bpm_name)

        element_has_chrom, chrom_ini, chrom_end = _get_chrom_parameters(
            input_data, start_bpm_name, end_bpm_name)

        accel_instance = accel_cls.get_segment(
            element_name, start_bpm_name, end_bpm_name,
            os.path.join(save_path, "modifiers.madx"))

        if not options.madpass:
            _run4mad(save_path, accel_instance, start_bpm_horizontal_data,
                     start_bpm_vertical_data, end_bpm_horizontal_data,
                     end_bpm_vertical_data, start_bpm_dispersion,
                     end_bpm_dispersion, f_ini, f_end, chrom_ini, chrom_end,
                     options.path, twiss_directory, input_data.couple_method,
                     options.bb, options.mad)

        else:
            print("Just rerunning mad")
            mad_file_path, log_file_path = _get_files_for_mad(
                save_path, element_name)
            madx_wrapper.resolve_and_run_file(mad_file_path,
                                              log_file=log_file_path)

        propagated_models = _PropagatedModels(save_path, element_name)

        kmod_data_file_x, kmod_data_file_y = _get_kmod_files()

        getAndWriteData(element_name, input_data, input_model,
                        propagated_models, save_path, is_element,
                        element_has_dispersion, element_has_coupling,
                        element_has_chrom, accel_instance, summaries,
                        kmod_data_file_x, kmod_data_file_y)
        print("Everything done for", element_name, "\n")

    summaries.write_summaries_to_files()

    print("+++  Ended Segment by Segment   +++")
def _launch_single_job(inputfile_path):
    """ Function for pool to start a single madx job """
    log_file = inputfile_path + ".log"
    return madx_wrapper.resolve_and_run_file(inputfile_path, log_file=log_file)