示例#1
0
def generate_3D_animation(x,
                          y,
                          z,
                          sdir,
                          title="",
                          npts=360,
                          xlabel="",
                          ylabel="",
                          zlabel="",
                          mode='default',
                          get_2D=True):

    tdir = sdir + "/tmp/"
    mkdir_p(tdir)

    if mode == 'default':
        alim, elim = 360, 360

    for ang in range(0, 360, int(360 / npts)):
        scatter_3D(x,
                   y,
                   z,
                   sdir=tdir + "{:5d}.png".format(ang),
                   azim=ang % alim,
                   elev=ang % alim,
                   xlabel=xlabel,
                   ylabel=ylabel,
                   zlabel=zlabel)

    animate(tdir, sdir, title + "_3D_animation", rmdir=True)
示例#2
0
def generate_attractor_animation(data, sdir, xlabel="", ylabel="", title=""):

    sns.set()

    tdir = sdir + "/tmp/"
    mkdir_p(tdir)

    if data.ndim == 1:

        for n in range(data.shape[-1] - 1):

            fig, ax1 = plt.subplots(1, 1)

            ax1.plot(data[:n], np.roll(data, 1)[:n])
            ax1.set_xlabel(xlabel)
            ax1.set_ylabel(ylabel)
            ax1.set_title(title)

            fig.savefig(tdir + "{:04d}".format(n))

    elif data.ndim == 2:

        for m in range(data.shape[-1]):

            for n in range(data.shape[1] - 1):
                fig, ax1 = plt.subplots()
                ax1.plot(data[:n, m], np.roll(data, 1)[:n, m])
                ax1.set_xlabel(xlabel)
                ax1.set_ylabel(ylabel)
                ax1.set_title(title)

                fig.savefig(tdir + "{:4d}{:4d}".format(m, n))

    animate(tdir, sdir, title + "_attractor", rmdir=True)
示例#3
0
def extract_animation(ii, mesh, fname, sdir, mode='train'):

    tdir = sdir + "/tmp/"
    mkdir_p(tdir)

    if mode == 'all':
        for train in range(ii.shape[-1]):
            for pulse in range(ii.shape[-2]):

                basic_plot(
                    ii[:, :, pulse, train],
                    mesh,
                    sdir=tdir + "train_{}_pulse_{}.png".format(train, pulse),
                    label="train/pulse: {}/{}".format(train + 1, pulse + 1))
    if mode == 'train':

        ii = mean_intensity(ii, mode='train')

        for train in range(ii.shape[-1]):

            basic_plot(ii[:, :, train],
                       mesh,
                       sdir=tdir + "train_{}.png".format(train),
                       label='train: {}'.format(train + 1))

    animate(indir=tdir, outdir=sdir, fname=fname, delay=0.06)
示例#4
0
def propagate_ensemble(ensemble,
                       bl,
                       sdir=None,
                       scale_input=None,
                       VERBOSE=True):
    """
    propagate an ensemble of wavefronts (stored in hdf5 files) in ensemble_dir
    down a desired beamline. 
    
    Useful in the case when there is a lot of ensembles
    
    :param ensemble: list of strings pointing to hdf5 wavefront files
    :param scale: bool or list of scaling parameters [nx, ny, fov] for scaling 
    of input wavefield (to be updated)
    """
    for file in ensemble:

        wfr = load_wfr(file)

        if scale_input is not None and type(scale) == list:
            scale(wfr, scale[0], scale[1], scale[2])
        elif scale:
            scale(wfr)

        bl.propagate(wfr)

        if sdir is not None:
            mkdir_p(sdir)
            wfr.store_hdf5(sdir + file)
示例#5
0
def propagate_NVE():
            
    wfr_directory = sys.argv[1].replace("*", "/")
 
    job_name = "NKB_4980eV_250pC_NVE_to_EHC"
    python_command = propagate_NVE
    input_directory = dCache + "/NanoKB-Pulse/NVE/"
    
    save_directory = input_directory.replace("/NVE/", "/EHC/")
    mkdir_p(save_directory)
    
    log_directory = logs
    
    focus = "nano"
    analysis = False
    
    filename = __file__
    dt = datetime.now().__str__()
    function = python_command.__name__
    
    description = "Propagate NanoKB Pulses 4.98 keV, 250 pC from Undulator NVE mirror to the EHC Screen"
    
    append = None
    
    print("info")
    print("wavefront directory: {}".format(wfr_directory))
    print("save directory: {}".format(save_directory))
    print("focus (i.e. beamline option): {}".format(focus))
    print("analysis: {}".format(analysis))
    print("datetime: {}".format(dt))
    print("filename: {}".format(filename))
    print("function: {}".format(function))
    print("description: {}".format(description))
    
  
    wfr = Wavefront()
    wfr.load_hdf5(wfr_directory)
     
    bl = Beamline()
    bl.append(Drift(2.2+3.5), propagation_parameters(1/3,1,1/3,1,'quadratic'))
    bl.propagate(wfr)
    
    wfr.custom_fields['focus'] = focus
    wfr.custom_fields['job name'] = job_name
    wfr.custom_fields['input directory'] = wfr_directory
    wfr.custom_fields['datetime'] = dt
    wfr.custom_fields['function'] = function
    wfr.custom_fields['filename'] = filename
    wfr.custom_fields['description'] = description
    #wfr.custom_fields['bl'] = bl.__str__
                
    if analysis: 
        wfr.analysis()
        
    wfr.store_hdf5(wfr_directory.replace("/NVE/", "/EHC/"))
示例#6
0
def propagate_from_ensemble(wfr_file,
                            ensemble_dir,
                            bl,
                            ekev,
                            sdir=None,
                            scale_input=None,
                            VERBOSE=True):
    """
    propagate a single wavefont from an ensemble of wavefronts (stored in hdf5 files) 
    down a desired beamline. This script can be used in conjunction with jobscheduler
    
    :param wfr_file: llocation of .hdf5 file
    :param scale: bool or list of scaling parameters [nx, ny, fov] for scaling 
    of input wavefield (to be updated)
    """
    if VERBOSE:
        print("loading wavefront")
        print("ensemble dir: {}".format(ensemble_dir))
        print("file: {}".format(wfr_file))

    wfr = load_wfr(ensemble_dir + "/" + wfr_file)
    srwlib.srwl.SetRepresElecField(wfr._srwl_wf, 'f')

    if VERBOSE:
        print("wavefront loaded")
        print(wfr.srw_info())

    if scale_input is not None and type(scale) == list:
        scale(wfr, scale[0], scale[1], scale[2])
    elif scale:
        wfr = scale(wfr)
        if VERBOSE:
            print("wavefront scaled")

    if VERBOSE:
        print("propagating wavefront")
        print(bl)

    bl.propagate(wfr)

    if VERBOSE:
        print("wavefront propagated")

    if sdir is not None:
        mkdir_p(sdir)
        wfr.store_hdf5(sdir + wfr_file)
示例#7
0
def propagation_batch_launcher(input_directory, sdir, focus, analysis = False, crop = None, append = None, descriptor = "", VERBOSE = True):
    """
    This part launches the jobs that run in main 
    """
    
    wfr_directory = input_directory

    mkdir_p(sdir)
    
    cwd = os.getcwd()
    script =  __file__    
    filename = script
    function = inspect.currentframe().f_code.co_name
    
    
    js = JobScheduler(cwd + "/" + script, logDir = logs + "/",
                      jobName = "NVE_4.96keV", partition = 'exfel', nodes = 2, jobType = 'array',
                      jobArray = wfr_directory, options = [sdir, focus, analysis, crop, append, descriptor])
    
    js.run(test = False)
示例#8
0
def preprocess_shimadzu_data(proposal, exp, run, px):

    sdir = dCache + "/NKB_sensing/whitefield/" + run + "/"
    print("Saving results to dCache: {}".format(sdir))
    mkdir_p(sdir)
    print("Directory exists: {}".format(os.path.exists(sdir)))

    data = load_data(run, proposal, exp)

    ii = data.get_array('SPB_EHD_HPVX2_1/CAM/CAMERA:daqOutput',
                        'data.image.pixels')

    ii = shimadzu_reshape(ii)

    mesh = get_mesh(ii, px, px)
    print("Mesh Shape: {}".format(mesh.shape))

    print("")
    print("Shimadzu Data Pre-Processed")
    return ii, mesh
示例#9
0
    return arr


if __name__ == '__main__':

    array_dir = dCache + "/whitefield_data/cropped_intensity_r0047.npy"

    train_no = int(sys.argv[1])

    method = [get_normalised_difference, get_second_order_doc]

    if type(method) == list:

        for m in method:

            array = np.load(array_dir).astype('float64')[:, :, 3:103:2, :]
            array = np.delete(array, 24, axis=-2)
            map_loc = dCache + "/tmp/"
            sdir = dCache + "/whitefield_data/r0047/"
            mkdir_p(sdir)

            sdir += "{}/".format(m.__name__)

            mkdir_p(map_loc)
            mkdir_p(sdir)

            get_intensity_autocorrelation_train(train_no,
                                                array,
                                                map_loc=map_loc,
                                                sdir=sdir,
                                                method=m)
示例#10
0
from felpy.utils.os_utils import mkdir_p
from felpy.utils.job_utils import JobScheduler
 
<<<<<<< HEAD
from wpg.wavefront import Wavefront
from labwork.about import logs, dCache 

=======
from felpy.model.wavefront import Wavefront
>>>>>>> 108cfb9b6fc97d3841ee1db54862523eee5b184e

indir = dCache + "NanoKB-Pulse/source/"
outdir = dCache + "NanoKB-Pulse/source_data/"

mkdir_p(outdir)
tmp_dir = outdir + "/tmp/"
mkdir_p(tmp_dir)

intensity_dir = tmp_dir + "/integrated_intensity/"
mkdir_p(intensity_dir)

complex_dir = tmp_dir + "/complex_wavefield/"
mkdir_p(complex_dir)



def launch():
    """
    This part launches the jobs that run in main 
    """
def propagate_NVE():

    wfr_directory = sys.argv[1].replace("*", "/")

    job_name = "NKB_4980eV_250pC_Source_to_NVE"
    python_command = propagate_NVE
    input_directory = dCache + "/NanoKB-Pulse/source/"

    save_directory = input_directory.replace("/source/", "/NVE/")
    mkdir_p(save_directory)

    log_directory = logs

    focus = "nano"
    analysis = False

    filename = __file__
    dt = datetime.now().__str__()
    function = python_command.__name__

    description = "Propagate NanoKB Pulses 4.98 keV, 250 pC from Undulator-exit to the NVE"

    crop = 'NVE'
    append = None

    print("info")
    print("wavefront directory: {}".format(wfr_directory))
    print("save directory: {}".format(save_directory))
    print("focus (i.e. beamline option): {}".format(focus))
    print("analysis: {}".format(analysis))
    print("datetime: {}".format(dt))
    print("filename: {}".format(filename))
    print("function: {}".format(function))
    print("description: {}".format(description))

    wfr = Wavefront()
    wfr.load_hdf5(wfr_directory)
    wfr.set_electric_field_representation('frequency')

    wfr = scale(wfr)

    print("wfr domain: {}".format(wfr.params.wDomain))

    bl = get_beamline_object(ekev=4.96,
                             options=focus,
                             crop=crop,
                             theta_KB=5e-03,
                             theta_HOM=3.5e-03)

    wfr.custom_fields['focus'] = focus
    wfr.custom_fields['job name'] = job_name
    wfr.custom_fields['input directory'] = wfr_directory
    wfr.custom_fields['datetime'] = dt
    wfr.custom_fields['function'] = function.__str__()
    wfr.custom_fields['filename'] = filename
    wfr.custom_fields['description'] = description
    #wfr.custom_fields['bl'] = bl.__str__

    bl.propagate(wfr)

    if analysis:
        wfr.analysis()

    wfr.store_hdf5(wfr_directory.replace("/source/", "/NVE/"))
示例#12
0
    nph = get_energy_statistics(wfr)[1]
    print("Mirror Angle: {} mrad Complete".format(ang*1e3
                                                 ))
    return (ang, nph)
>>>>>>> Stashed changes
    
    MIRRORS = ['HOM1', 'HOM2', 'NHE', 'NVE']
    
<<<<<<< Updated upstream
    if len(sys.argv) > 1:    
        ekev = float(sys.argv[1])
    else:
        ekev = 5.0
        
    edir = "./{}keV/".format(ekev)
    mkdir_p(edir)
    for MIRROR in MIRRORS:    
        sdir = edir + "/{}/".format(MIRROR)
        mkdir_p(sdir)

        spb = Instrument(VERBOSE = False)
        pool = mp.Pool(PROCESSES)
        r = pool.map(partial(core, mirror_name = MIRROR, ekev = ekev),
                 tqdm(np.linspace(spb.params[MIRROR]['ang_min'],
                                  spb.params[MIRROR]['ang_max'],
                                  N)))
        
        
        #animate(sdir, edir, "{}_mirror_rotation".format(MIRROR))
        
        np.save(edir + "{}_mirror_flux_data".format(MIRROR), r)
示例#13
0
    def __init__(self,
                 python_command,
                 job_name,
                 log_directory,
                 partition='exfel',
                 nodes=1,
                 job_type='single',
                 job_array=None,
                 n_spawns=1,
                 VERBOSE=True,
                 runtime="14-00:00:00",
                 email="*****@*****.**",
                 mailtype="ALL",
                 options=None,
                 rundir=None):
        """
        
        :param job_type: options = spawn, array, single 
        """

        if type(python_command) == str:
            self.command = "script"
        elif type(python_command) == type(random_string):
            self.command = "method"

        self.python_command = python_command
        self.job_name = job_name
        self.partition = partition
        self.nodes = nodes
        self.job_type = job_type
        self.job_array = job_array
        self.n_spawns = n_spawns

        self.VERBOSE = VERBOSE

        if self.VERBOSE:
            print("\nInitialising Job Scheduler\n")
        if rundir:
            self.rundir = rundir
        else:
            self.rundir = os.getcwd()

        self.runTime = runtime
        self.log_directory = log_directory + job_name + "/"
        self.jobDir = log_directory + "jobs/" + job_name + "/"
        self.outDir = log_directory + "out/" + job_name + "/"
        self.errDir = log_directory + "error/" + job_name + "/"
        self.email = email
        self.mailtype = mailtype
        self.options = options

        if os.path.exists(self.jobDir):
            shutil.rmtree(self.jobDir)
        mkdir_p(self.jobDir)

        if os.path.exists(self.outDir):
            shutil.rmtree(self.outDir)
        mkdir_p(self.outDir)

        if os.path.exists(self.errDir):
            shutil.rmtree(self.errDir)
        mkdir_p(self.errDir)

        if self.VERBOSE == True:
            self.__str__()
示例#14
0
        nodes = sys.argv[5]
    else: 
        nodes = 2
    
    mode = input("batch of single?")
        
    if mode == 'single':
        print("Launching Single Job")
        launch_single(in_directory,
                      out_directory,
                      job_name,
                      logs,
                      nodes)
       
    elif mode == 'batch':
        print("Launching Batch Job")
        mkdir_p(out_directory)
        for item in os.listdir(in_directory):
            
            launch_single(in_directory + "{}".format(item),
                          out_directory + "{}".format(item),
                          job_name,
                          logs,
                          nodes)

    if DEBUG: 
        print(in_directory)
        print(out_directory)
        print(job_name)
        print(logs)
        print("nodes: ",nodes)
示例#15
0
    bl = get_beamline_object(ekev, apertures = True, surface = True,
                             crop = ["d1", "HOM1"], theta_HOM = angle)
    
    bl.propagate(wfr)
     
    return wfr.get_intensity().sum()


if __name__ == '__main__':
    
    from labwork.about import dCache
    from felpy.utils.os_utils import mkdir_p
    
    try:
        SDIR =   "./mirror_reflectivity/"
        mkdir_p(SDIR)

    except(FileNotFoundError):
        SDIR = input("Save Directory: ")
        mkdir_p(SDIR)

    
    #ii = no_mirror()
    
    energies = [5.0, 7.0, 9.0, 11.0, 12.0]
    angles = np.linspace(0, 10e-03, 35)
    
    noap = memory_map(SDIR + "mirror_refl_no_aperture", shape = (len(energies), len(angles),2))
    ap = memory_map(SDIR + "mirror_refl_aperture", shape = (len(energies), len(angles),2))
    
    cpus = mpi.cpu_count()//2