Beispiel #1
0
def get_cathode_particlegroup(settings_input,
                              DISTGEN_INPUT_FILE,
                              verbose=False,
                              distgen_verbose=False,
                              id_start=1):
    unit_registry = UnitRegistry()
    settings = copy.copy(settings_input)

    distgen_input = yaml.safe_load(open(DISTGEN_INPUT_FILE))
    for k, v in settings.items():
        distgen_input = update_nested_dict(distgen_input, {k: v},
                                           verbose=verbose,
                                           create_new=False)
    gen = Generator(distgen_input, verbose=distgen_verbose)
    gen.run()
    PG = gen.particles

    if ('cathode:sigma_xy' in settings):
        raise ValueError(
            'cathode:sigma_xy is deprecated, please specify value and units instead.'
        )
    if ('cathode:sigma_xy:value' in settings
            and 'cathode:sigma_xy:units' in settings):
        sigma_xy_value = settings.pop(
            'cathode:sigma_xy:value'
        )  # remove from dictionary to avoid recursion problem
        sigma_xy_units = settings.pop(
            'cathode:sigma_xy:units'
        )  # remove from dictionary to avoid recursion problem
        sigma_xy = sigma_xy_value * unit_registry.parse_expression(
            sigma_xy_units)
        sigma_xy = sigma_xy.to('m').magnitude  # convert to meters

        sx_orig = 0.5 * (PG['sigma_x'] + PG['sigma_y'])
        sig_ratio = sigma_xy / sx_orig
        settings_1 = copy.copy(settings)

        var_list = [
            'r_dist:sigma_xy:value', 'r_dist:truncation_radius_right:value',
            'r_dist:truncation_radius_left:value'
        ]
        for var in var_list:
            if (var in settings):
                settings_1[var] = settings[var] * sig_ratio
        PG = get_cathode_particlegroup(settings_1,
                                       DISTGEN_INPUT_FILE,
                                       verbose=verbose,
                                       distgen_verbose=distgen_verbose,
                                       id_start=id_start)
        if (verbose):
            print(
                f'Rescaling sigma_xy from {sx_orig} -> {sigma_xy}. Achieved: {PG["sigma_x"]}'
            )
        return PG

    PG.assign_id()
    PG.id = np.arange(id_start, id_start + gen['n_particle'])

    return PG
Beispiel #2
0
def evaluate_astra_with_distgen(settings,
                                astra_input_file=None,
                                distgen_input_file=None,
                                workdir=None,
                                astra_bin='$ASTRA_BIN',
                                timeout=2500,
                                verbose=False,
                                auto_set_spacecharge_mesh=True,
                                archive_path=None,
                                merit_f=None):
    """
    Similar to run_astra_with_distgen, but returns a flat dict of outputs as processed by merit_f. 
    
    If no merit_f is given, a default one will be used. See:
        astra.evaluate.default_astra_merit
    
    Will raise an exception if there is an error. 
    
    """
    A = run_astra_with_distgen(
        settings=settings,
        astra_input_file=astra_input_file,
        distgen_input_file=distgen_input_file,
        workdir=workdir,
        astra_bin=astra_bin,
        timeout=timeout,
        auto_set_spacecharge_mesh=auto_set_spacecharge_mesh,
        verbose=verbose)

    if merit_f:
        output = merit_f(A)
    else:
        output = default_astra_merit(A)

    if output['error']:
        raise ValueError('run_astra_with_distgen returned error in output')

    #Recreate Generator object for fingerprint, proper archiving
    # TODO: make this cleaner
    G = Generator()
    G.input = A.distgen_input

    fingerprint = fingerprint_astra_with_distgen(A, G)
    output['fingerprint'] = fingerprint

    if archive_path:
        path = tools.full_path(archive_path)
        assert os.path.exists(path), f'archive path does not exist: {path}'
        archive_file = os.path.join(path, fingerprint + '.h5')
        output['archive'] = archive_file

        # Call the composite archive method
        archive_astra_with_distgen(A, G, archive_file=archive_file)

    return output
def evaluate_impact_with_distgen(settings,
                                 distgen_input_file=None,
                                 impact_config=None,
                                 workdir=None,
                                 archive_path=None,
                                 merit_f=None,
                                 verbose=False):
    """
    
    Similar to run_impact_with_distgen, but requires settings a the only positional argument. 
    
    If an archive_path is given, the complete evaluated Impact and Generator objects will be archived
    to a file named using a fingerprint from both objects.
    
    If merit_f is given, this function will be applied to the evaluated Impact object, and this will be returned.
    
    Otherwise, a default function will be applied. 
    
    
    """

    I = run_impact_with_distgen(settings=settings,
                                distgen_input_file=distgen_input_file,
                                impact_config=impact_config,
                                workdir=workdir,
                                verbose=verbose)

    if merit_f:
        output = merit_f(I)
    else:
        output = default_impact_merit(I)

    if 'error' in output and output['error']:

        raise ValueError('run_impact_with_distgen returned error in output')

    #Recreate Generator object for fingerprint, proper archiving
    # TODO: make this cleaner
    G = Generator()
    G.input = I.distgen_input

    fingerprint = fingerprint_impact_with_distgen(I, G)
    output['fingerprint'] = fingerprint

    if archive_path:
        path = tools.full_path(archive_path)
        assert os.path.exists(path), f'archive path does not exist: {path}'
        archive_file = os.path.join(path, fingerprint + '.h5')
        output['archive'] = archive_file

        # Call the composite archive method
        archive_impact_with_distgen(I, G, archive_file=archive_file)

    return output
def run_impact_with_distgen(settings=None,
                            distgen_input_file=None,
                            impact_config=None,
                            workdir=None,
                            verbose=False):
    """
    Creates, runs, and returns an Impact object using distgen input. 
    
    .distgen_input = parsed distgen.Generatator's .input is attached to the object.
        
    """

    # setup objects
    if isinstance(impact_config, str):
        I = Impact.from_yaml(impact_config)
    else:
        I = Impact(**impact_config)

    if workdir:
        I.workdir = workdir
        I.configure()  # again

    I.verbose = verbose
    G = Generator(distgen_input_file)
    G.verbose = verbose

    if settings:
        for key in settings:
            val = settings[key]
            if key.startswith('distgen:'):
                key = key[len('distgen:'):]
                if verbose:
                    print(f'Setting distgen {key} = {val}')
                G[key] = val
            else:
                # Assume impact
                if verbose:
                    print(f'Setting impact {key} = {val}')
                I[key] = val

    # Get particles
    G.run()
    P = G.particles

    # Attach particles
    I.initial_particles = P

    # Attach distgen input. This is non-standard.
    I.distgen_input = G.input

    I.run()

    return I
def get_distgen_beam_for_phasing_from_particlegroup(PG,
                                                    n_particle=10,
                                                    verbose=False,
                                                    output_PG=False):

    variables = ['x', 'y', 'z', 'px', 'py', 'pz', 't']

    transforms = {
        f'avg_{var}': {
            'type': f'set_avg {var}',
            f'avg_{var}': {
                'value': PG['mean_' + var],
                'units': PG.units(var).unitSymbol
            }
        }
        for var in variables
    }

    phasing_distgen_input = {
        'n_particle': 10,
        'random_type': 'hammersley',
        'transforms': transforms,
        'total_charge': {
            'value': 1.0,
            'units': 'pC'
        },
        'start': {
            'type': 'time',
            'tstart': {
                'value': 0.0,
                'units': 's'
            }
        },
    }

    gen = Generator(phasing_distgen_input, verbose=verbose)

    if (not output_PG):
        pbeam = gen.beam()
        return pbeam
    else:
        gen.run()
        PG = gen.particles
        return PG
Beispiel #6
0
def get_distgen_beam_for_phasing(beam, n_particle=10, verbose=False):

    variables = ['x', 'y', 'z','px', 'py', 'pz', 't']

    transforms = { f'avg_{var}':{'type': f'set_avg {var}', f'avg_{var}': { 'value': beam.avg(var).magnitude, 'units':  str(beam.avg(var).units)  } } for var in variables }
    #for var in variables:
    #  
    #    avg_var = beam.avg(var)
    #    transforms[f'set avg {var}'] = {'variables':var, 'type': 'set_avg', 
    #                                    f'avg_{var}': {'value': float(avg_var.magnitude), 'units': str(avg_var.units) }} 

    phasing_distgen_input = {'n_particle':10, 'random_type':'hammersley', 'transforms':transforms,
                             'total_charge':{'value':0.0, 'units':'C'},
                             'start': {'type':'time', 'tstart':{'value': 0.0, 'units': 's'}},}
    
    gen = Generator(phasing_distgen_input, verbose=verbose) 
    pbeam = gen.beam()

    return pbeam
def get_cathode_particlegroup(settings,
                              DISTGEN_INPUT_FILE,
                              verbose=True,
                              distgen_verbose=False,
                              id_start=1,
                              sigma_xy=None):
    distgen_input = yaml.safe_load(open(DISTGEN_INPUT_FILE))
    for k, v in settings.items():
        distgen_input = update_nested_dict(distgen_input, {k: v},
                                           verbose=verbose,
                                           create_new=False)
    gen = Generator(distgen_input, verbose=distgen_verbose)
    gen.run()
    PG = gen.particles

    if (sigma_xy is not None):
        sx = PG['sigma_x']
        sig_ratio = sigma_xy / sx
        settings_1 = copy.copy(settings)

        var_list = [
            'r_dist:sigma_xy:value', 'r_dist:truncation_radius_right:value',
            'r_dist:truncation_radius_left:value'
        ]
        for var in var_list:
            if (var in settings):
                settings_1[var] = settings[var] * sig_ratio
        PG = get_cathode_particlegroup(settings_1,
                                       DISTGEN_INPUT_FILE,
                                       verbose=verbose,
                                       distgen_verbose=distgen_verbose,
                                       id_start=id_start)
        if (verbose):
            print(
                f'Rescaling sigma_xy from {sx} -> {sigma_xy}. Acheived: {PG["sigma_x"]}'
            )
        return PG

    PG.assign_id()
    PG.id = np.arange(id_start, id_start + gen['n_particle'])

    return PG
Beispiel #8
0
def run_astra_with_distgen(settings=None,
                           astra_input_file=None,
                           distgen_input_file=None,
                           workdir=None,
                           astra_bin='$ASTRA_BIN',
                           timeout=2500,
                           verbose=False,
                           auto_set_spacecharge_mesh=True):
    """
    Run Astra with particles generated by distgen. 
    
        settings: dict with keys that can appear in an Astra, 
         or distgen keys with prefix 'distgen:'
        
    Example usage:
        A = run_astra_with_distgen({'lspch':False, 'distgen:n_particle':1000},
                       astra_input_file='astra.yaml',
                       distgen_input_file='distgen.yaml',
                       verbose=True,
                       timeout=None
                      )        
        
    """

    # Call simpler evaluation if there is no generator:
    if not distgen_input_file:
        return run_astra(settings=settings,
                         astra_input_file=astra_input_file,
                         workdir=workdir,
                         astra_bin=astra_bin,
                         timeout=timeout,
                         verbose=verbose)

    if verbose:
        print('run_astra_with_generator')

    # Distgen generator
    G = Generator(input=distgen_input_file, verbose=verbose)

    # Make astra objects
    if astra_input_file.endswith('.yaml'):
        if verbose:
            f'loading Astra as yaml: {astra_input_file}'
        A = Astra.from_yaml(astra_input_file)
        if workdir:
            A.workdir = workdir
            A.configure()  # again to make sure things are set properly

    else:
        A = Astra(astra_bin=astra_bin,
                  input_file=astra_input_file,
                  workdir=workdir)

    A.timeout = timeout
    A.verbose = verbose

    # Special
    A.input['newrun']['l_rm_back'] = True  # Remove backwards particles

    #
    if settings:
        for key, val in settings.items():

            found = False
            # Check distgen
            if key.startswith('distgen:'):
                key = key[len('distgen:'):]
                if verbose:
                    print(f'Setting distgen {key} = {val}')
                G[key] = val
                continue

            # Check for direct settable attribute
            if ':' in key:
                A[key] = val
                continue

            for nl in A.input:
                if key in A.input[nl]:
                    found = True
                    if verbose:
                        print(key, 'is in astra', nl)
                    A.input[nl][key] = val

            if not found:
                raise ValueError(f'Key not found: {key}')

    # Attach distgen input. This is non-standard.
    A.distgen_input = G.input

    # Run distgen
    G.run()
    P = G.particles

    # Attach to Astra object
    A.initial_particles = P

    if auto_set_spacecharge_mesh:
        n_particles = len(P)
        sc_settings = recommended_spacecharge_mesh(n_particles)
        A.input['charge'].update(sc_settings)
        if verbose:
            print('set spacecharge mesh for n_particles:', n_particles, 'to',
                  sc_settings)

    A.run()

    return A
def evaluate_gpt_with_stability(settings,
                                archive_path=None,
                                merit_f=None,
                                gpt_input_file=None,
                                distgen_input_file=None,
                                workdir=None,
                                use_tempdir=True,
                                gpt_bin='$GPT_BIN',
                                timeout=2500,
                                auto_phase=False,
                                verbose=False,
                                gpt_verbose=False,
                                asci2gdf_bin='$ASCI2GDF_BIN',
                                plot_on=False):
    """
    Will raise an exception if there is an error. 
    """
    unit_registry = UnitRegistry()

    random_state = np.random.get_state()
    np.random.seed(
        seed=6858
    )  # temporary seed to make the stability calculations reproducible

    if (gpt_input_file is None):
        raise ValueError('You must specify the GPT input file')

    if (distgen_input_file is None):
        raise ValueError('You must specify the distgen input file')

    input_particle_group = get_cathode_particlegroup(settings,
                                                     distgen_input_file,
                                                     verbose=verbose)

    G = run_gpt_with_particlegroup(settings=settings,
                                   gpt_input_file=gpt_input_file,
                                   input_particle_group=input_particle_group,
                                   workdir=workdir,
                                   use_tempdir=use_tempdir,
                                   gpt_bin=gpt_bin,
                                   timeout=timeout,
                                   auto_phase=auto_phase,
                                   verbose=verbose,
                                   gpt_verbose=gpt_verbose,
                                   asci2gdf_bin=asci2gdf_bin)

    if merit_f:
        output = merit_f(G)
    else:
        output = default_gpt_merit(G)

    stability_settings = add_stability_settings(G, settings)

    if 'stability:n_runs' in settings:
        n_runs = settings['stability:n_runs']
    else:
        n_runs = 100

    arrival_t = np.empty(n_runs)
    arrival_t[:] = np.nan
    final_E = copy.copy(arrival_t)

    for ii in range(n_runs):
        auto_phase = False
        reduced_timeout = 20
        s = add_jitter_to_settings(stability_settings)
        stability_beam = get_distgen_beam_for_phasing_from_particlegroup(
            input_particle_group,
            n_particle=10,
            verbose=verbose,
            output_PG=True)

        G = run_gpt_with_particlegroup(s,
                                       gpt_input_file,
                                       input_particle_group=stability_beam,
                                       workdir=workdir,
                                       use_tempdir=use_tempdir,
                                       gpt_bin=gpt_bin,
                                       timeout=reduced_timeout,
                                       auto_phase=auto_phase,
                                       verbose=verbose,
                                       gpt_verbose=gpt_verbose,
                                       asci2gdf_bin=asci2gdf_bin)
        arrival_t[ii] = G.stat('mean_t', 'screen')[-1]
        final_E[ii] = G.stat('mean_energy', 'screen')[-1]
        if ('stability:sigma_global_phase' in s):
            # global phase is used to mimic the laser, not an actual global shift, so we need to shift the time back by the global shift amount
            arrival_t[ii] = arrival_t[ii] + (
                s['global_phase'] -
                stability_settings['global_phase']) * 2.13675214e-12

    arrival_t = arrival_t - np.mean(arrival_t)
    final_E = final_E - np.mean(final_E)

    if (plot_on):
        plt.plot(final_E * 1e-3, arrival_t * 1e15, 'ro')
        plt.ylabel('Arrival time error (fs)')
        plt.xlabel('Energy error (kV)')
        plt.show()

    output['end_sigma_t_mean'] = np.std(arrival_t)
    output['end_sigma_E_mean'] = np.std(final_E)
    output['end_avg_Et_mean'] = np.mean(final_E * arrival_t)

    output['end_sigma_t_mean_slice'] = np.sqrt(output['end_sigma_t_mean']**2 -
                                               output['end_avg_Et_mean']**2 /
                                               output['end_sigma_E_mean']**2)
    output['end_sigma_t_combined'] = np.sqrt(output['end_sigma_t_mean']**2 +
                                             output['end_sigma_t']**2)
    output['end_sigma_t_combined_slice'] = np.sqrt(
        output['end_sigma_t_mean_slice']**2 + output['end_sigma_t']**2)

    for k in settings:
        output[k] = settings[k]

    np.random.set_state(
        random_state
    )  # return the RNG to what it was doing before this function seeded it

    if output['error']:
        raise ValueError('error occured!')

    #Recreate Generator object for fingerprint, proper archiving
    # TODO: make this cleaner
    gen = Generator()

    output['fingerprint'] = G.fingerprint()

    if archive_path:
        path = tools.full_path(archive_path)
        assert os.path.exists(path), f'archive path does not exist: {path}'
        archive_file = os.path.join(path, fingerprint + '.h5')
        output['archive'] = archive_file

        # Call the composite archive method
        archive_gpt_with_distgen(G, gen, archive_file=archive_file)

    return output
Beispiel #10
0
def evaluate_run_gpt_with_particlegroup(settings,
                                        archive_path=None,
                                        merit_f=None,
                                        gpt_input_file=None,
                                        distgen_input_file=None,
                                        workdir=None,
                                        use_tempdir=True,
                                        gpt_bin='$GPT_BIN',
                                        timeout=2500,
                                        auto_phase=False,
                                        verbose=False,
                                        gpt_verbose=False,
                                        asci2gdf_bin='$ASCI2GDF_BIN'):
    """
    Will raise an exception if there is an error. 
    """
    if (gpt_input_file is None):
        raise ValueError('You must specify the GPT input file')

    if (distgen_input_file is None):
        raise ValueError('You must specify the distgen input file')

    if ('final_charge' in settings
            and 'coreshield:core_charge_fraction' not in settings):
        settings['coreshield:core_charge_fraction'] = 0.5

    if ('coreshield' not in settings):
        input_particle_group = get_cathode_particlegroup(settings,
                                                         distgen_input_file,
                                                         verbose=verbose)
    else:
        input_particle_group = get_coreshield_particlegroup(settings,
                                                            distgen_input_file,
                                                            verbose=verbose)

    G = run_gpt_with_particlegroup(settings=settings,
                                   gpt_input_file=gpt_input_file,
                                   input_particle_group=input_particle_group,
                                   workdir=workdir,
                                   use_tempdir=use_tempdir,
                                   gpt_bin=gpt_bin,
                                   timeout=timeout,
                                   auto_phase=auto_phase,
                                   verbose=verbose,
                                   gpt_verbose=gpt_verbose,
                                   asci2gdf_bin=asci2gdf_bin)

    if merit_f:
        output = merit_f(G)
    else:
        output = default_gpt_merit(G)

    if ('merit:z' in settings.keys()):
        z_list = settings['merit:z']
        if (not isinstance(z_list, list)):
            z_list = [z_list]
        for z in z_list:
            g = copy.deepcopy(G)
            scr = get_screen_data(g, screen_z=z)[0]
            g.particles.clear()
            g.particles.insert(0, scr)
            g.output['n_tout'] = 0
            g.output['n_screen'] = 1
            if merit_f:
                g_output = merit_f(g)
            else:
                g_output = default_gpt_merit(g)
            for j in g_output.keys():
                if ('end_' in j):
                    output[j.replace('end_', f'merit:{z}_')] = g_output[j]

    if ('merit:peak_intensity_fraction' in settings.keys()):
        peak_intensity_fraction = settings['merit:peak_intensity_fraction']
        g = copy.deepcopy(G)
        scr = g.screen[-1]
        peak_radius = int(np.floor(scr.r.size * peak_intensity_fraction))
        r_sort = np.sort(scr.r)
        scr.weight[scr.r > r_sort[peak_radius]] = 0.0
        output['peak_intensity'] = 490206980 * scr.charge / (
            np.pi * r_sort[peak_radius]**2)

    for k in settings:
        output[k] = settings[k]

    if output['error']:
        raise ValueError('error occured!')

    #Recreate Generator object for fingerprint, proper archiving
    # TODO: make this cleaner
    gen = Generator()

    output['fingerprint'] = G.fingerprint()

    if archive_path:
        path = tools.full_path(archive_path)
        assert os.path.exists(path), f'archive path does not exist: {path}'
        archive_file = os.path.join(path, fingerprint + '.h5')
        output['archive'] = archive_file

        # Call the composite archive method
        archive_gpt_with_distgen(G, gen, archive_file=archive_file)

    return output
Beispiel #11
0
def run_gpt_with_distgen(settings=None,
                         gpt_input_file=None,
                         distgen_input_file=None,
                         workdir=None,
                         use_tempdir=True,
                         gpt_bin='$GPT_BIN',
                         timeout=2500,
                         auto_phase=False,
                         verbose=False,
                         gpt_verbose=False,
                         asci2gdf_bin='$ASCI2GDF_BIN'):
    """
    Run gpt with particles generated by distgen. 
    
        settings: dict with keys that can appear in an gpt or distgen Generator input file. 
        
    Example usage:
        G = run_gpt_with_distgen({'lspch':False},
                       gpt_input_file='$LCLS_LATTICE/gpt/models/gunb_eic/gpt.in',
                       distgen_input_file='$LCLS_LATTICE/distgen/models/gunb_gaussian/gunb_gaussian.json',
                       verbose=True,
                       timeout=None
                      )        
        
    """

    # Call simpler evaluation if there is no generator:
    if not distgen_input_file:
        return run_gpt(settings=settings,
                       gpt_input_file=gpt_input_file,
                       workdir=workdir,
                       use_tempdir=use_tempdir,
                       gpt_bin=gpt_bin,
                       timeout=timeout,
                       verbose=verbose)

    if (verbose):
        print('Run GPT with Distgen:')

    # Make gpt and generator objects
    G = GPT(gpt_bin=gpt_bin,
            input_file=gpt_input_file,
            workdir=workdir,
            use_tempdir=use_tempdir)
    G.timeout = timeout
    G.verbose = verbose

    # Distgen generator
    gen = Generator(verbose=verbose)
    f = full_path(distgen_input_file)
    distgen_params = yaml.safe_load(open(f))

    # Set inputs
    if settings:
        G, distgen_params = set_gpt_and_distgen(G,
                                                distgen_params,
                                                settings,
                                                verbose=verbose)

    # Link particle files
    particle_file = os.path.join(G.path, G.get_dist_file())

    if (verbose):
        print('Linking particle files, distgen output will point to -> "' +
              os.path.basename(particle_file) + '" in working directory.')

    G.set_dist_file(particle_file)

    if ('output' in distgen_params and verbose):
        print('Replacing Distgen output params')

    distgen_params['output'] = {'type': 'gpt', 'file': particle_file}

    if (verbose):
        print('\nDistgen >------\n')
    # Configure distgen
    gen.parse_input(distgen_params)

    # Run
    beam = gen.beam()
    write_gpt(beam, particle_file, verbose=verbose, asci2gdf_bin=asci2gdf_bin)

    if (verbose):
        print('------< Distgen\n')

    if (auto_phase):

        if (verbose):
            print('\nAuto Phasing >------\n')
        t1 = time.time()

        # Create the distribution used for phasing
        if (verbose):
            print('****> Creating intiial distribution for phasing...')

        phasing_beam = get_distgen_beam_for_phasing(beam,
                                                    n_particle=10,
                                                    verbose=verbose)
        phasing_particle_file = os.path.join(G.path,
                                             'gpt_particles.phasing.gdf')
        write_gpt(phasing_beam,
                  phasing_particle_file,
                  verbose=verbose,
                  asci2gdf_bin=asci2gdf_bin)

        if (verbose):
            print('<**** Created intiial distribution for phasing.\n')

        G.write_input_file()  # Write the unphased input file
        phased_file_name, phased_settings = gpt_phasing(
            G.input_file,
            path_to_gpt_bin=G.gpt_bin[:-3],
            path_to_phasing_dist=phasing_particle_file,
            verbose=verbose)
        G.set_variables(phased_settings)
        t2 = time.time()
        if (verbose):
            print(f'Time Ellapsed: {t2-t1} sec.')
            print('------< Auto Phasing\n')

    G.run(gpt_verbose=gpt_verbose)

    return G
    def run(self, inputs, verbose=False):
       
        tag = f'vb24@{self.id}:'

        #----------------------------------------------------------------------------
        # Get laser distribution, cathode quantities, and gun current
        #----------------------------------------------------------------------------
        r_params = {'sigma_xy': dunits(str(inputs[f'{tag}laser:sigma_xy'])),
                    'alpha':    dunits(str(inputs[f'{tag}laser:alpha_xy']))}

        count = self.pvdefs[f'{tag}laser:r']['count']
        laser_wavelength = inputs[f'{tag}laser:wavelength']
        laser_power = inputs[f'{tag}laser:power']
        laser_sigma_xy = inputs[f'{tag}laser:sigma_xy']
        laser_alpha_xy = inputs[f'{tag}laser:alpha_xy']
        laser_avg_x = inputs[f'{tag}laser:mean_x']
        laser_avg_y = inputs[f'{tag}laser:mean_y']

        r_dist = SuperGaussianRad(verbose=False, **r_params)
        rs = (r_dist.get_r_pts(count)).to(self.pvdefs[f'{tag}laser:r']['unit'])
        Pr = (dunits(str(laser_power))*r_dist.rho(rs)).to(self.pvdefs[f'{tag}laser:Pr']['unit'])

        cathode_QE = inputs[f'{tag}cathode:QE']
        cathode_MTE = inputs[f'{tag}cathode:MTE']

        hc = 1*units.h*units.c
        photon_flux = (laser_power/(hc/laser_wavelength) ).to_base_units()
        gun_current = (photon_flux*cathode_QE*(1*units.e)).to(self.pvdefs[f'{tag}gun:current']['unit'])
        #----------------------------------------------------------------------------
       

        #----------------------------------------------------------------------------
        # Create Distgen input and run generator
        #----------------------------------------------------------------------------
        distgen_input = yaml.dump(
                        {'n_particle':inputs[f'{tag}gpt:n_particle'].magnitude,
                         'random_type':'hammersley',
                         'total_charge': {'value': 0.0, 'units': 'pC'},   
                         'start': {
                             'type':'cathode',
                             'MTE': {'value': cathode_MTE.magnitude, 'units': str(cathode_MTE.units)}},

                         'r_dist': {
                             'type':'rsg',
                             'sigma_xy':{'value': laser_sigma_xy.magnitude, 'units': str(laser_sigma_xy.units)},
                             'alpha':{'value': laser_alpha_xy.magnitude, 'units': str(laser_alpha_xy.units)},},

                         'transforms':{
                             't1':{'type':'set_avg x', 'avg_x': {'value': laser_avg_x.magnitude, 'units': str(laser_avg_x.units)}},
                             't2':{'type':'set_avg y', 'avg_y': {'value': laser_avg_y.magnitude, 'units': str(laser_avg_y.units)}}
                         }})
 
        gen = Generator(distgen_input, verbose=True)     
        beam = gen.beam()   
        #----------------------------------------------------------------------------


        #----------------------------------------------------------------------------
        # Configure GPT and run
        #----------------------------------------------------------------------------
        G = GPT(input_file=os.path.join(os.getcwd(),'templates/gpt.in'), 
                initial_particles = ParticleGroup(data=beam.data()), 
                use_tempdir=True,
                workdir=os.path.join(os.getcwd(),'tmp'),
                timeout = 5,
                verbose=True)

        settings = {'gun_voltage':   inputs[f'{tag}gun:voltage'].magnitude, 
                    'sol01_current': inputs[f'{tag}sol1:current'].magnitude,
                    'sol02_current': inputs[f'{tag}sol2:current'].magnitude,
                    'npts':          inputs[f'{tag}gpt:n_screen'].magnitude+1}

        result = G.set_variables(settings)
        G.run()
        #----------------------------------------------------------------------------


        #----------------------------------------------------------------------------
        # Load all relevant data into output structure
        #----------------------------------------------------------------------------
        # laser distribution
        output = {f'{tag}laser:r':rs.magnitude, f'{tag}laser:Pr':Pr.magnitude, f'{tag}gun:current':gun_current.magnitude}

        # GPT statistical data
        stats = {'max':['r'], 'mean':['x', 'y', 'z', 'kinetic_energy'], 'sigma':['x','y']}
        for stat, variables in stats.items():
                output = {**output, **{f'{tag}beam:{stat}_{var}': self.gpt_stat_to_pv(G, f'{stat}_{var}', 'screen').magnitude for var in variables} }

        scr_numbers = [1]
        for scr_number in scr_numbers:
            z = inputs[f'{tag}scr{scr_number}:mean_z'].magnitude
            for var in ['x' ,'y']:
                output[f'{tag}scr{scr_number}:mean_{var}']  = np.interp(z, output[f'{tag}beam:mean_z'], output[f'{tag}beam:mean_{var}'])
                output[f'{tag}scr{scr_number}:sigma_{var}'] = np.interp(z, output[f'{tag}beam:mean_z'], output[f'{tag}beam:sigma_{var}'])
                
        # transmission
        output[f'{tag}beam:transmission'] = [100*len(screen['x'])/inputs[f'{tag}gpt:n_particle'].magnitude for screen in G.screen]
    
        min_clearance = np.min( (inputs[f'{tag}beampipe:radius']-self.gpt_stat_to_pv(G, f'{stat}_{var}', 'screen') ) ).to('mm')
        output[f'{tag}beam:radiation'] = output[f'{tag}gun:current']*np.max(output[f'{tag}beam:mean_kinetic_energy'])/min_clearance.magnitude
        #----------------------------------------------------------------------------


        return output
Beispiel #13
0
def run_astra_with_distgen(settings=None,
                           astra_input_file=None,
                           distgen_input_file=None,
                           workdir=None,
                           astra_bin='$ASTRA_BIN',
                           timeout=2500,
                           verbose=False,
                           auto_set_spacecharge_mesh=True):
    """
    Run Astra with particles generated by distgen. 
    
        settings: dict with keys that can appear in an Astra or distgen Generator input file. 
        
    Example usage:
        A = run_astra_with_distgen({'lspch':False},
                       astra_input_file='$LCLS_LATTICE/astra/models/gunb_eic/astra.in',
                       distgen_input_file='$LCLS_LATTICE/distgen/models/gunb_gaussian/gunb_gaussian.json',
                       verbose=True,
                       timeout=None
                      )        
        
    """

    # Call simpler evaluation if there is no generator:
    if not distgen_input_file:
        return run_astra(settings=settings,
                         astra_input_file=astra_input_file,
                         workdir=workdir,
                         astra_bin=astra_bin,
                         timeout=timeout,
                         verbose=verbose)

    if verbose:
        print('run_astra_with_generator')

    # Distgen generator
    G = Generator(input=distgen_input_file, verbose=verbose)

    # Make astra objects
    A = Astra(astra_bin=astra_bin,
              input_file=astra_input_file,
              workdir=workdir)
    A.timeout = timeout
    A.verbose = verbose

    # Special
    A.input['newrun']['l_rm_back'] = True  # Remove backwards particles

    # Set inputs
    if settings:
        A.input, G.input = set_astra_and_distgen(A.input,
                                                 G.input,
                                                 settings,
                                                 verbose=verbose)

    # Attach distgen input. This is non-standard.
    A.distgen_input = G.input

    # Run distgen
    G.run()
    P = G.particles

    # Attach to Astra object
    A.initial_particles = P

    if auto_set_spacecharge_mesh:
        n_particles = len(P)
        sc_settings = recommended_spacecharge_mesh(n_particles)
        A.input['charge'].update(sc_settings)
        if verbose:
            print('set spacecharge mesh for n_particles:', n_particles, 'to',
                  sc_settings)

    A.run()

    return A
Beispiel #14
0
def create_beam(file, parameters_dict=None):
    """Create a beam object from importing a yaml file.

    Arguments:
    file -- yaml file from which the beam is being generated
    parameters_dict -- dictionary of parameters that are being applied to the yaml file (default=None) 
    """
    gen = Generator(file, verbose=0)

    if parameters_dict:

        for i, param in enumerate(parameters_dict):
            value = parameters_dict[param]

            if 'n_particle' in param:
                gen.input['n_particle'] = value

            elif 'file' in param:
                gen.input['output']['file'] = value

            elif all(x in param for x in ('output', 'type')):
                gen.input['output']['type'] = value

            elif all(x in param for x in ('sigma_xy', 'units')):
                gen.input['r_dist']['sigma_xy']['units'] = value

            elif all(x in param for x in ('sigma_xy', 'value')):
                gen.input['r_dist']['sigma_xy']['value'] = value

            elif all(x in param for x in ('r_dist', 'type')):
                gen.input['r_dist']['type'] = value

            elif ('random_type') in param:
                gen.input['random_type'] = value

            elif all(x in param for x in ('MTE', 'units')):
                gen.input['start']['MTE']['units'] = value

            elif all(x in param for x in ('MTE', 'value')):
                gen.input['start']['MTE']['value'] = value

            elif all(x in param for x in ('start', 'type')):
                gen.input['start']['type'] = value

            elif all(x in param for x in ('max_t', 'units')):
                gen.input['t_dist']['max_t']['units'] = value

            elif all(x in param for x in ('max_t', 'value')):
                gen.input['t_dist']['max_t']['value'] = value

            elif all(x in param for x in ('min_t', 'units')):
                gen.input['t_dist']['min_t']['units'] = value

            elif all(x in param for x in ('min_t', 'value')):
                gen.input['t_dist']['min_t']['value'] = value

            elif all(x in param for x in ('t_dist', 'type')):
                gen.input['t_dist']['type'] = value

            elif all(x in param for x in ('total_charge', 'units')):
                gen.input['total_charge']['units'] = value

            elif all(x in param for x in ('total_charge', 'value')):
                gen.input['total_charge']['value'] = value

            else:
                print(
                    'ERROR: One or more parameter names was invalid, please check spelling'
                )
                return
    else:
        print('No parameters changed')

    gen.verbose = False
    gen.run()
    return gen
Beispiel #15
0
def evaluate_gpt_with_distgen(settings, 
             archive_path=None, 
             merit_f=None, 
             gpt_input_file=None,
             distgen_input_file=None,
             workdir=None, 
             use_tempdir=True,
             gpt_bin='$GPT_BIN',
             timeout=2500,
             auto_phase=False,
             verbose=False,
             gpt_verbose=False,
             asci2gdf_bin='$ASCI2GDF_BIN',
             kill_msgs=DEFAULT_KILL_MSGS
             ):    
    """
    Simple evaluate GPT.
    
    Similar to run_astra_with_distgen, but returns a flat dict of outputs. 
    
    Will raise an exception if there is an error. 
    
    """
    G = run_gpt_with_distgen(settings=settings,
                             gpt_input_file=gpt_input_file,
                             distgen_input_file=distgen_input_file,
                             workdir=workdir, 
                             use_tempdir=use_tempdir,
                             gpt_bin=gpt_bin,
                             timeout=timeout,
                             auto_phase=auto_phase,
                             verbose=verbose,
                             gpt_verbose=gpt_verbose,
                             asci2gdf_bin=asci2gdf_bin,
                             kill_msgs=kill_msgs)
        
    if merit_f:
        merit_f = tools.get_function(merit_f)
        output = merit_f(G)
    else:
        output = default_gpt_merit(G)
    
    if output['error']:
        raise ValueError('error occured!')
        
    #Recreate Generator object for fingerprint, proper archiving
    # TODO: make this cleaner
    gen = Generator()
    gen.input = G.distgen_input    
    
    fingerprint = fingerprint_gpt_with_distgen(G, gen)
    output['fingerprint'] = fingerprint    
    
    if archive_path:
        path = tools.full_path(archive_path)
        assert os.path.exists(path), f'archive path does not exist: {path}'
        archive_file = os.path.join(path, fingerprint+'.h5')
        output['archive'] = archive_file
        
        # Call the composite archive method
        archive_gpt_with_distgen(G, gen, archive_file=archive_file)          
        
    return output