f_calib.close() # get DAQ quantities (only scalars) df, filenames = ut.analysis.get_data_daq(fname, daq_labels, sacla_converter, t0=0, selection=sel) # get laser on/off tags is_laser_on_tags = df[df.is_laser == 1].index.tolist() is_laser_off_tags = df[df.is_laser == 0].index.tolist() # get spectra from Von Hamos, using laser on / off tags #roi = [[0, 1024], [325, 335]] # X, Y ap = ImagesProcessor(facility="SACLA") ap.add_analysis('get_projection', args={"axis": 1}) ap.add_analysis('get_mean_std') ap.set_dataset('/run_%s/detector_2d_1' % run) ap.add_preprocess("set_thr", args={"thr_low": 65}) # get the total spectra results_on = ap.analyze_images(fname, tags=is_laser_on_tags) spectrum_on = results_on["get_projection"]["spectra"].sum(axis=0) results_off = ap.analyze_images(fname, tags=is_laser_off_tags) spectrum_off = results_off["get_projection"]["spectra"].sum(axis=0) spectrum_off = spectrum_off / spectrum_off.sum() spectrum_on = spectrum_on / spectrum_on.sum() # this is the average image from the Von Hamos
if __name__ == "__main__": # set filename and dataset name #DIR = "/swissfel/photonics/data/2014-11-26_SACLA_ZnO/full_hdf5/256635-257499/" DIR = "/home/sala/Work/Data/SACLA/" #fname = "/home/sala/Work/Data/Sacla/ZnO/258706_roi.h5" fname = DIR + "259408_roi.h5" dataset_name = "/run_259408/detector_2d_1" # set up parameters for ROI and threshold roi = [[0, 1024], [0, 400]] thr = 65 # create an AnalysisProcessor object an = ImagesProcessor(facility="SACLA") # if you want a flat dict as a result an.flatten_results = True # add analysis an.add_analysis("get_projection", args={ 'axis': 1, 'thr_low': thr, }) an.add_analysis("get_mean_std", args={'thr_low': thr}) bins = np.arange(-150, 300, 5) an.add_analysis("get_histo_counts", args={'bins': bins}) an.add_analysis(get_line_histos, args={'axis': 0, 'bins': bins}) # set the dataset an.set_dataset(dataset_name)
def compute_rixs_spectra( dataset_name, df, thr_low=0, thr_hi=999999, ): # In principle, a single run can contain *multiple mono settings*, so we need to load data from all the runs, and the group them by mono energy. `Pandas` can help us with that... # We load all data from files, place it in a `DataFrame`, and then add some useful derived quantities. At last, we use `tags` as index for the `DataFrame` runs = sorted(df.run.unique()) print(runs) # label for ascii output dump out_label = "rixs_" + runs[0] + "-" + runs[-1] delay = df.delay.unique() if len(delay) > 1: print( "More than one delay settings in the selected run range, exiting") sys.exit(-1) print("\nAvailable energy settings") print(df.photon_mono_energy.unique(), "\n") # Now we can run the analysis. For each energy value and each run, a *list of tags* is created, # such that events have the same mono energy and they are part of the same run (as each run is in a separated file). # For each of these lists, we run the `AnalysisProcessor` and create the required spectra, for laser on and off. # the mono energies contained in the files energies_list = sorted(df.photon_mono_energy.unique().tolist()) fnames = [DIR + str(run) + "_roi.h5" for run in runs] # The AnalysisProcessor an = ImagesProcessor(facility="SACLA") # if you want a flat dict as a result an.flatten_results = True # add analysis an.add_analysis("get_projection", args={ 'axis': 1, 'thr_low': thr_low, 'thr_hi': thr_hi }) an.add_analysis("get_mean_std", args={'thr_low': thr_low}) bins = np.arange(-150, 1000, 5) an.add_analysis("get_histo_counts", args={'bins': bins}) an.set_dataset("/run_%s/%s" % (str(run), dataset_name)) # run the analysis n_events = -1 spectrum_on = None spectrum_off = None # multiprocessing import from multiprocessing import Pool from multiprocessing.pool import ApplyResult # initialization of the RIXS maps. Element 0 is laser_on_ element 1 is laser_off rixs_map = [ np.zeros((len(energies_list), 1024)), np.zeros((len(energies_list), 1024)) ] rixs_map_std = [ np.zeros((len(energies_list), 1024)), np.zeros((len(energies_list), 1024)) ] n_events = -1 spectrum = [None, None] total_results = {} events_per_energy = [{}, {}] for i, energy in enumerate(energies_list): async_results = [] # list for results events_per_energy[0][energy] = 0 events_per_energy[1][energy] = 0 energy_masks = [] # creating the pool pool = Pool(processes=8) # looping on the runs for j, run in enumerate(runs): df_run = df[df.run == run] energy_masks.append(df_run[df_run.photon_mono_energy == energy]) # apply the analysis async_results.append( pool.apply_async( an, (fnames[j], n_events, energy_masks[j].index.values))) # closing the pool pool.close() # waiting for all results results = [r.get() for r in async_results] print("Got results for energy", energy) # producing the laser on/off maps for j, run in enumerate(runs): if run not in total_results: total_results[run] = {} if "spectra" not in results[j]: continue df_run = df[df.run == run] energy_mask = energy_masks[j] laser_masks = [None, None] if n_events != -1: laser_masks[0] = energy_mask.is_laser.values[:n_events] else: laser_masks[0] = energy_mask.is_laser.values laser_masks[1] = ~laser_masks[0] for laser in [0, 1]: norm = np.count_nonzero( ~np.isnan(results[j]["spectra"][laser_masks[laser]][:, 0])) events_per_energy[laser][energy] += norm spectrum = np.nansum( (results[j]["spectra"][laser_masks[laser]].T / df_run[laser_masks[laser]].I0.values).T, axis=0) spectrum_events = np.nansum( results[j]["spectra"][laser_masks[laser]], axis=0) rixs_map[laser][energies_list.index(energy)] += spectrum rixs_map_std[laser][energies_list.index( energy)] += spectrum_events total_results[run][energy] = {} total_results[run][energy]["results"] = results[j] total_results[run][energy]["laser_on"] = laser_masks[0] for laser in [0, 1]: for energy in list(events_per_energy[0].keys()): rixs_map[laser][energies_list.index( energy)] /= events_per_energy[laser][energy] rixs_map_std[laser] = rixs_map[laser] / np.sqrt(rixs_map_std[laser]) np.savetxt( "%s_map_%s_%dps.txt" % (out_label, "on" if laser == 0 else "off", delay), rixs_map[laser]) #np.savetxt("%s_map_%dps_energies.txt" % (out_label, delay), sorted(events_per_energy[0].keys())) return rixs_map, rixs_map_std, total_results
CSVDIR = "<base_path>/data/" saveDir = "<base_path>/analyzed_runs/" img_save_dir = "<base_path>/analyzed_runs/imgAna/" # Continue execution whenever a figure is created (they will be shown by the end of the script run) plt.ion() # INPUT PARAMETERS thr = 50 # pixel's threshold value roi = [[450, 520], [240, 280]] # SL [[xmin xmax], [ymin ymax]] # roi = [[420, 470], [190, 230]] # Bragg Peak [[xmin xmax], [ymin ymax]] # bkgRoi = np.array(roi) #+ np.array([[-40, 40], [-40, 40]]) bkgRoi = np.array(roi) # create ImagesProcessor object ip = ImagesProcessor(facility="SACLA") # if you want a flat dict as a result ip.flatten_results = True # PREPROCESS FUNCTIONS (bkg sub, masks, ...) # (comment out for loading a background image) dark = np.load('/home/usov_i/SACLA Dec2015/python_scripts2016/analysis/dark_439011and02comb.npy') ip.add_preprocess("subtract_correction", args={"sub_image": dark}) ip.add_preprocess("set_thr", args={"thr_low": thr}) # ANALYSIS FUNCTIONS ip.add_analysis("get_mean_std") # , args={'thr_low': thr}) bins = np.arange(-50, 600, 2) ip.add_analysis("get_histo_counts", args={'bins': bins, 'roi': roi}) ip.add_analysis("roi_bkgRoi", args={'roi': roi, 'bkg_roi': bkgRoi})
CSVDIR = "<base_path>/data/" saveDir = "<base_path>/analyzed_runs/" img_save_dir = "<base_path>/analyzed_runs/imgAna/" # Continue execution whenever a figure is created (they will be shown by the end of the script run) plt.ion() # INPUT PARAMETERS thr = 50 # pixel's threshold value roi = [[450, 520], [240, 280]] # SL [[xmin xmax], [ymin ymax]] # roi = [[420, 470], [190, 230]] # Bragg Peak [[xmin xmax], [ymin ymax]] # bkgRoi = np.array(roi) #+ np.array([[-40, 40], [-40, 40]]) bkgRoi = np.array(roi) # create ImagesProcessor object ip = ImagesProcessor(facility="SACLA") # if you want a flat dict as a result ip.flatten_results = True # PREPROCESS FUNCTIONS (bkg sub, masks, ...) # (comment out for loading a background image) dark = np.load('/home/usov_i/SACLA Dec2015/python_scripts2016/analysis/dark_439011and02comb.npy') ip.add_preprocess("subtract_correction", args={"sub_image": dark}) ip.add_preprocess("set_thr", args={"thr_low": thr}) # ANALYSIS FUNCTIONS ip.add_analysis("get_mean_std") # , args={'thr_low': thr}) bins = np.arange(-50, 600, 2) ip.add_analysis("get_histo_counts", args={'bins': bins, 'roi': roi}) ip.add_analysis("roi_bkgRoi", args={'roi': roi, 'bkg_roi': bkgRoi})
def bin_tt_COM(df, bin_edges, rname, fname, calibration=0.01, roi=[[235, 270], [500, 540]]): """ Bin data according to the timing tool and perform a center of mass analysis of the roi This script is somewhat redundant with the image analysis, as it loops again through all the images. """ # create corrected delay df["dl_corr"] = df.delay + calibration * df.tt bin_size = bin_edges[1] - bin_edges[0] df_xon = df[df.x_status == 1] df_lon = df_xon[df.laser_status == 1] df_loff = df_xon[df.laser_status == 0] bin_center = bin_edges[:-1] + 0.5 * bin_size df_out = pd.DataFrame(bin_center, columns=["time"]) if len(df_lon) != 0: binned_int_lon = stats.binned_statistic(df_lon.dl_corr, df_lon.intensity, bins=bin_edges, statistic="mean") binned_bkg_lon = stats.binned_statistic(df_lon.dl_corr, df_lon.bkg, bins=bin_edges, statistic="mean") binned_I0_lon = stats.binned_statistic(df_lon.dl_corr, df_lon.I0, bins=bin_edges, statistic="mean") df_out["intensity_lon"] = binned_int_lon.statistic df_out["bkg_lon"] = binned_bkg_lon.statistic df_out["I0_lon"] = binned_I0_lon.statistic else: print ("No laser ON shots") if len(df_loff) != 0: binned_int_loff = stats.binned_statistic(df_loff.dl_corr, df_loff.intensity, bins=bin_edges, statistic="mean") binned_bkg_loff = stats.binned_statistic(df_loff.dl_corr, df_loff.bkg, bins=bin_edges, statistic="mean") binned_I0_loff = stats.binned_statistic(df_loff.dl_corr, df_loff.I0, bins=bin_edges, statistic="mean") df_out["I0_loff"] = binned_I0_loff.statistic df_out["bkg_loff"] = binned_bkg_loff.statistic df_out["intensity_loff"] = binned_int_loff.statistic else: print ("No laser OFF shots") """ COM analysis COM analysis loops through the bins and load the images corresponding for each bin. The COM of the averaged images in the bin is taken and written in the df_out dataframe """ binnumber = binned_int_lon.binnumber peakCOM = np.zeros([len(df_out.time), 2]) dataset_name = "/run_" + rname + "/detector_2d_1" ip = ImagesProcessor(facility="SACLA") ip.flatten_results = True ip.set_dataset(dataset_name) ip.add_preprocess("set_roi", args={"roi": roi}) ip.add_analysis("get_mean_std") for ii in range(len(df_out.time)): n = ii + 1 ismember = binnumber == n tagList = df.index[ismember] results = ip.analyze_images(fname, n=-1, tags=tagList) if "images_mean" in results: peakCOM[ii, :] = ndimage.measurements.center_of_mass(results["images_mean"]) else: peakCOM[ii, :] = np.NaN del results print ("bin number %s" % n) df_out["COMx"] = peakCOM[:, 0] df_out["COMy"] = peakCOM[:, 1] return df_out
def bin_tt_COM(df, bin_edges, rname, fname, calibration=0.01, roi=[[235, 270], [500, 540]]): """ Bin data according to the timing tool and perform a center of mass analysis of the roi This script is somewhat redundant with the image analysis, as it loops again through all the images. """ # create corrected delay df['dl_corr'] = df.delay + calibration * df.tt bin_size = bin_edges[1] - bin_edges[0] df_xon = df[df.x_status == 1] df_lon = df_xon[df.laser_status == 1] df_loff = df_xon[df.laser_status == 0] bin_center = bin_edges[:-1] + 0.5 * bin_size df_out = pd.DataFrame(bin_center, columns=['time']) if len(df_lon) != 0: binned_int_lon = stats.binned_statistic(df_lon.dl_corr, df_lon.intensity, bins=bin_edges, statistic='mean') binned_bkg_lon = stats.binned_statistic(df_lon.dl_corr, df_lon.bkg, bins=bin_edges, statistic='mean') binned_I0_lon = stats.binned_statistic(df_lon.dl_corr, df_lon.I0, bins=bin_edges, statistic='mean') df_out['intensity_lon'] = binned_int_lon.statistic df_out['bkg_lon'] = binned_bkg_lon.statistic df_out['I0_lon'] = binned_I0_lon.statistic else: print('No laser ON shots') if len(df_loff) != 0: binned_int_loff = stats.binned_statistic(df_loff.dl_corr, df_loff.intensity, bins=bin_edges, statistic='mean') binned_bkg_loff = stats.binned_statistic(df_loff.dl_corr, df_loff.bkg, bins=bin_edges, statistic='mean') binned_I0_loff = stats.binned_statistic(df_loff.dl_corr, df_loff.I0, bins=bin_edges, statistic='mean') df_out['I0_loff'] = binned_I0_loff.statistic df_out['bkg_loff'] = binned_bkg_loff.statistic df_out['intensity_loff'] = binned_int_loff.statistic else: print('No laser OFF shots') """ COM analysis COM analysis loops through the bins and load the images corresponding for each bin. The COM of the averaged images in the bin is taken and written in the df_out dataframe """ binnumber = binned_int_lon.binnumber peakCOM = np.zeros([len(df_out.time), 2]) dataset_name = "/run_" + rname + "/detector_2d_1" ip = ImagesProcessor(facility="SACLA") ip.flatten_results = True ip.set_dataset(dataset_name) ip.add_preprocess("set_roi", args={'roi': roi}) ip.add_analysis("get_mean_std") for ii in range(len(df_out.time)): n = ii + 1 ismember = (binnumber == n) tagList = df.index[ismember] results = ip.analyze_images(fname, n=-1, tags=tagList) if 'images_mean' in results: peakCOM[ii, :] = ndimage.measurements.center_of_mass( results['images_mean']) else: peakCOM[ii, :] = np.NaN del results print(('bin number %s' % n)) df_out['COMx'] = peakCOM[:, 0] df_out['COMy'] = peakCOM[:, 1] return df_out