a = e_min.to_value('TeV') b = e_max.to_value('TeV') norm_ = norm.to(1 / (u.TeV * u.s * u.m**2 * u.sr)) * u.TeV angle = ((1 - np.cos(solid_angle.to_value('rad'))) * 2 * np.pi * u.sr) integral = norm_ * (b**(mc_index + 1) - a**(mc_index + 1)) / (mc_index + 1) t_norm = (n_simulated / (integral * area * angle)).to_value(u.s) return t_norm path = "/Volumes/gct-jason/astri_onsky_archive/d2019-05-15_simulations/proton.h5" # path = "/Volumes/gct-jason/astri_onsky_archive/d2019-10-03_simulations/gamma_1deg.h5" norm = 9.6e-9 / (u.GeV * u.cm**2 * u.s * u.sr) index = -2.7 r = HDF5Reader(path) df = r.read("mc") # df_stats = pd.DataFrame(dict(same=df.min() df.max(), df.min(), df.max()]) print(df.iloc[0]) # print(df_stats) # print(df.max()) n_simulated = (df['num_showers'].values * df['shower_reuse'].values).sum() for _, row in df.iterrows(): t_norm = caclulate_t_norm(row, index, norm, n_simulated) # print(t_norm) # t.append(,) print(t_norm)
def main(): path = get_data("d190522_hillas_over_campaign/hillas.h5") # path = get_data("d190522_hillas_over_campaign/hillas_old.h5") path_mc = get_astri_2019("d2019-05-15_simulations/proton.h5") with HDF5Reader(path) as reader: df = reader.read("data") mapping = reader.get_mapping() df = df.loc[(df['width'] * df['length'] / df['concentration_1']) < 20] # df = df.loc[df['intensity'] > 1000*4] print(f"N_EVENTS={df.index.size}") with HDF5Reader(path_mc) as reader: df_mc = reader.read("data") mapping_mc = reader.get_mapping() output_dir = get_plot("d190522_hillas_over_campaign/cog") for _, group in df.groupby("iinv"): investigation = group.iloc[0]['investigation'] x = group['x'].values y = group['y'].values p_cog = COGPlotter() p_cog.plot(x, y, mapping) p_cog.save(join(output_dir, f"{investigation}.png"), dpi=1000) x = df['x'].values y = df['y'].values p_cog = COGPlotter() p_cog.plot(x, y, mapping) p_cog.save(join(output_dir, f"all.png"), dpi=1000) image = bin_cog(x, y, mapping) ci = CameraImage.from_mapping(mapping) ci.image = image ci.add_colorbar() ci.save(join(output_dir, f"all_image.png"), dpi=1000) df_week1 = df.loc[df['iinv'] < 6] x = df_week1['x'].values y = df_week1['y'].values p_cog = COGPlotter() p_cog.plot(x, y, mapping) p_cog.save(join(output_dir, "week1.png"), dpi=1000) camera = bin_cog(x, y, mapping) centre = camera.reshape((32, 64))[[12, 13, 18, 19]].ravel() ci = CameraImage.from_mapping(mapping) ci.image = camera ci.add_colorbar() ci.save(join(output_dir, f"week1_image.png"), dpi=1000) p_hist = Hist() p_hist.plot(camera, "Camera") p_hist.plot(centre, "Centre") p_hist.save(join(output_dir, f"week1_hist.png"), dpi=1000) mean_camera = np.mean(camera) mean_centre = np.mean(centre) std_camera = np.std(camera) std_centre = np.std(centre) print(f"Week1: camera_mean={mean_camera}, camera={std_camera/mean_camera}, centre={std_centre/mean_centre}") df_week2 = df.loc[df['iinv'] >= 6] x = df_week2['x'].values y = df_week2['y'].values p_cog = COGPlotter() p_cog.plot(x, y, mapping) p_cog.save(join(output_dir, "week2.png"), dpi=1000) x = df_mc['x'].values y = df_mc['y'].values p_cog = COGPlotter() p_cog.plot(x, y, mapping_mc) p_cog.save(join(output_dir, f"mc.png"), dpi=1000) camera = bin_cog(x, y, mapping) centre = camera.reshape((32, 64))[[12, 13, 18, 19]].ravel() ci = CameraImage.from_mapping(mapping) ci.image = camera ci.add_colorbar() ci.save(join(output_dir, f"mc_image.png"), dpi=1000) p_hist = Hist() p_hist.plot(camera, "Camera") p_hist.plot(centre, "Centre") p_hist.save(join(output_dir, f"mc_hist.png"), dpi=1000) mean_camera = np.mean(camera) mean_centre = np.mean(centre) std_camera = np.std(camera) std_centre = np.std(centre) print(f"MC: camera_mean={mean_camera}, camera={std_camera/mean_camera}, centre={std_centre/mean_centre}")
def main(): initial = dict( eped=-1, eped_sigma=0.27, pe=2.4, pe_sigma=0.06, opct=0.25, lambda_0=0.63, lambda_1=0.84, lambda_2=1.07, ) path = get_data(f"d200805_charge_resolution/1_extract_lab_old/charge_0MHz_{matched_voltage}mV.h5") with HDF5Reader(path) as reader: df = reader.read('data') df = df.loc[df['pixel'] == poi] spe_illuminations = np.unique(df['expected_illumination_pe'])[5:8] n_illuminations = len(spe_illuminations) charges_raw = [] pulse_path = "/Users/Jason/Software/sstcam-simulation/tutorials/d201209_workshop/pulse_shape.txt" cc = CrossCorrelation(1, 1, reference_pulse_path=pulse_path) for illumination in spe_illuminations: df_i = df.loc[df['expected_illumination_pe'] == illumination] charge = cc.get_pulse_height(df_i['charge'].values) charges_raw.append(ChargeContainer(charge, n_bins=100, range_=(-3, 15))) pdf = SiPMModifiedPoisson(n_illuminations) pdf.update_parameters_initial(**initial) cost = UnbinnedNLL(pdf, charges_raw) values, errors = minimize_with_iminuit(cost) charges = [] for container in charges_raw: charge = container.values - values['eped'] charges.append(ChargeContainer(charge, n_bins=100, range_=(-3, 15))) pdf = SiPMModifiedPoisson(n_illuminations) pdf.update_parameters_initial(**initial) cost = UnbinnedNLL(pdf, charges) values, errors = minimize_with_iminuit(cost) values_array = np.array(list(values.values())) output = dict( n_illuminations=n_illuminations, hist=[], between=[], edges=[], fit_x=[], fit_y=[], values=values, errors=errors ) fig = plt.figure(figsize=(10, 5)) for i in range(n_illuminations): ax = fig.add_subplot(n_illuminations, 1, i+1) ax.hist( charges[i].between, weights=charges[i].hist, bins=charges[i].edges, density=True, histtype='step', ) fit_x = np.linspace(charges[i].edges.min(), charges[i].edges.max(), 1000) fit_y = pdf(fit_x, values_array, i) lambda_ = values[f'lambda_{i}'] lambda_err = errors[f'lambda_{i}'] label = f"λ = {lambda_:.3f} ± {lambda_err:.3f} p.e." ax.plot(fit_x, fit_y, label=label) ax.legend() output['hist'].append(charges[i].hist) output['between'].append(charges[i].between) output['edges'].append(charges[i].edges) output['fit_x'].append(fit_x) output['fit_y'].append(fit_y) output_path = get_plot(f"d201202_tutorial_material/spe_0MHz_{matched_voltage}mV.pdf") fig.savefig(output_path) output_path = get_data(f"d201202_tutorial_material/spe_0MHz_{matched_voltage}mV.pkl") with open(output_path, 'wb') as file: pickle.dump(output, file, protocol=pickle.HIGHEST_PROTOCOL)
def main(): astri_db = "/Volumes/gct-jason/astri_onsky_archive/astri_db.h5" hillas_paths = sort_file_list(glob( "/Volumes/gct-jason/astri_onsky_archive/d2019-05-02_mrk421/*_hillas.h5" )) source_name = "mrk421" df_hillas_list = [] readers = [] mapping = None for ipath, hillas_path in enumerate(hillas_paths): dl1_path = hillas_path.replace("_hillas.h5", "_dl1.h5") r1_path = hillas_path.replace("_hillas.h5", "_r1.tio") hillas_reader = HDF5Reader(hillas_path) dl1_reader = DL1Reader(dl1_path) r1_reader = TIOReader(r1_path) if mapping is None: mapping = hillas_reader.get_mapping() df_data = hillas_reader.read("data") df_source = hillas_reader.read("source") df_merged = pd.merge(df_data, df_source) df_merged['ipath'] = ipath df_hillas_list.append(df_merged) readers.append((dl1_reader, r1_reader)) df_hillas = pd.concat(df_hillas_list, ignore_index=True) df_hillas = df_hillas.set_index('t_cpu') # start_time = df_hillas.index[0] # end_time = df_hillas.index[-1] # # df_pointing = read_pointing_from_database(astri_db) # df_pointing = df_pointing.set_index('timestamp')[start_time:end_time] # source_table = Simbad.query_object(source_name) # source_skycoord = SkyCoord( # ra=Angle(source_table["RA"], unit='hourangle'), # dec=Angle(source_table["DEC"], unit='degree'), # frame='icrs' # ) # add_camera_position(df_pointing, source_skycoord) # # time_range = pd.Timedelta('1m') # max_alpha = 0.1 # min_intensity = 500 # d_list = [] # for timestamp, row in df_pointing.iterrows(): # d = dict() # df_within = df_hillas[timestamp-time_range:timestamp+time_range] # df_within = df_within.loc[df_within['intensity'] > min_intensity] # min_alpha_entry = df_within.sort_values("alpha90").iloc[0] # if min_alpha_entry['alpha90'] < max_alpha: # ipath = int(min_alpha_entry['ipath']) # iev = int(min_alpha_entry['iev']) # dl1_reader, r1_reader = readers[ipath] # d['dl1'] = dl1_reader[iev]['photons'] * 0.25 # d['r1'] = r1_reader[iev] # d['iev'] = iev # d['iobs'] = min_alpha_entry['iobs'] # d['alpha'] = min_alpha_entry['alpha90'] # d['x_src'] = min_alpha_entry['source_x'] # d['y_src'] = min_alpha_entry['source_y'] # d['timestamp'] = min_alpha_entry.name # else: # d['x_src'] = row['x_src'] # d['y_src'] = row['y_src'] # d['timestamp'] = timestamp # d_list.append(d) df_hillas_cut = df_hillas.loc[ (df_hillas['image_max'] > 200) & (df_hillas['alpha90'] < 0.1) & (df_hillas['baseline_mean'] < 11.88) & (df_hillas['charge_median'] < 11) & (df_hillas['size_tm_20'] < 24.5) & (df_hillas['size_tm_40'] < 14.5) ] print(f"N_EVENTS: {df_hillas_cut.index.size}") if df_hillas_cut.index.size < 100: raise ValueError() d_list = [] for timestamp, row in df_hillas_cut.iterrows(): ipath = int(row['ipath']) iev = int(row['iev']) dl1_reader, r1_reader = readers[ipath] dl1_ev = dl1_reader[iev] d_list.append(dict( timestamp=timestamp, iev=iev, iobs=row['iobs'], dl1=dl1_ev['photons'] * 0.25, dl1_pulse_time=dl1_ev['pulse_time'], r1=r1_reader[iev], alpha=row['alpha90'], x_src=row['source_x'], y_src=row['source_y'], x_cog=row['x'], y_cog=row['y'], psi=row['psi'], )) df = pd.DataFrame(d_list) with pd.HDFStore(get_data("d190717_alpha/wobble.h5"), mode='w') as store: store['data'] = df store['mapping'] = mapping store.get_storer('mapping').attrs.metadata = mapping.metadata
def main(): pm = PixelMasks() dead = np.where(np.logical_or(pm.dead, np.repeat(pm.bad_hv, 4)))[0] bright_path = get_astri_2019("d2019-04-23_nudges/bright_50pe/charge.h5") with HDF5Reader(bright_path) as reader: df_bright = reader.read("data").groupby(['nudge', 'pixel' ]).mean().reset_index() isin_dead = df_bright['pixel'].isin(dead) df_bright = df_bright.loc[~isin_dead] spe_path = get_astri_2019("d2019-04-23_nudges/results/process_spe/spe.csv") df_spe = pd.read_csv(spe_path) illumination = calculate_illumination(df_bright, df_spe) cc2height = calculate_cc2height(df_bright) print(f"charge2height = {cc2height}") nudges = np.unique(df_bright['nudge']) # nudges = np.delete(nudges, np.where(np.isin(nudges, [5, 25]))) mv2pe = np.zeros(nudges.size) charge2photons = np.zeros(nudges.size) pde = np.zeros(nudges.size) for inudge, nudge in enumerate(nudges): df_bright_nudge = df_bright.loc[df_bright['nudge'] == nudge].mean() df_spe_nudge = df_spe.loc[df_spe['nudge'] == nudge] cc2pe = df_spe_nudge['cc2pe'].iloc[0] mv2pe[inudge] = cc2pe / cc2height charge_onsky = df_bright_nudge['onsky_calib'] charge2photons[inudge] = charge_onsky / illumination charge_cc = df_bright_nudge['cc'] cc2photons = charge_cc / illumination pde[inudge] = cc2photons / cc2pe mask = ~np.isin(nudges, [5, 25]) mv2pe_coeff = polyfit(nudges[mask], mv2pe[mask], 3) charge2photons_coeff = polyfit(nudges, charge2photons, 3) min_ = nudges.min() max_ = nudges.max() output_dir = get_astri_2019( "d2019-04-23_nudges/results/extract_charge2photons") p_mv2pe = Mv2pePlotter() p_mv2pe.plot(nudges[mask], mv2pe[mask], mv2pe_coeff, min_, max_) p_mv2pe.save(join(output_dir, "mv2pe.pdf")) p_mv2pe_cc = Mv2pePlotter() p_mv2pe_cc.plot(nudges, charge2photons, charge2photons_coeff, min_, max_) p_mv2pe_cc.ax.set_ylabel("Charge (mVns) per photon") p_mv2pe_cc.save(join(output_dir, "charge2photons.pdf")) p_pde = PDEPlotter() p_pde.plot(nudges[mask], pde[mask]) p_pde.save(join(output_dir, "pde.pdf")) output = dict( charge2photons_coeff=charge2photons_coeff.tolist(), nudge_min=-40, #int(nudges.min()), nudge_max=int(nudges.max()), ) outpath = get_calib_data('charge2photons.yml') with open(outpath, 'w') as outfile: yaml.dump(output, outfile, default_flow_style=False) print(f"Created charge2photons file: {outpath}")
def main(): parser = argparse.ArgumentParser( description='Plot the contents of the SPE HDF5 file', formatter_class=argparse.ArgumentDefaultsHelpFormatter ) parser.add_argument( '-f', '--file', dest='input_path', help='path to the input SPE HDF5 file' ) parser.add_argument( '-o', '--output', dest='output_dir', help='directory to save plots' ) parser.add_argument( '-p', '--pixel', dest='plot_pixel', type=int, help='Pixel to plot the spectrum for' ) args = parser.parse_args() input_path = args.input_path output_dir = args.output_dir plot_pixel = args.plot_pixel with HDF5Reader(input_path) as reader: df_values = reader.read('values') df_errors = reader.read('errors') df_arrays = reader.read('arrays') mapping = reader.get_mapping() metadata = reader.get_metadata() columns = df_values.columns for column in columns: p_camera = SPECamera(mapping) p_camera.set_image(df_values[column], df_errors[column]) p_camera.fig.suptitle(column) p_camera.save(join(output_dir, f"camera_{column}.pdf")) p_hist = SPEHist() p_hist.plot(df_values[column]) p_hist.fig.suptitle(column) p_hist.save(join(output_dir, f"hist_{column}.pdf")) n_illuminations = metadata['n_illuminations'] fitter_name = metadata['fitter'] initial = metadata['initial'] pixel_values = df_values.loc[plot_pixel].to_dict() pixel_errors = df_errors.loc[plot_pixel].to_dict() pixel_arrays = df_arrays.loc[plot_pixel] p_spectrum_pixel = SpectrumFitPlotter(n_illuminations) p_spectrum_pixel.plot( pixel_arrays['charge_hist_x'], pixel_arrays['charge_hist_y'], pixel_arrays['charge_hist_edges'], pixel_arrays['fit_x'], pixel_arrays['fit_y'], pixel_values, pixel_errors, initial, ) p_spectrum_pixel.fig.suptitle( f"{fitter_name}, {n_illuminations} Illuminations, Pixel={plot_pixel}", x=0.75 ) p_spectrum_pixel.save(join(output_dir, f"spectrum_p{plot_pixel}.pdf"))
import pandas as pd from CHECLabPy.core.io import HDF5Reader, HDF5Writer path = "/Users/Jason/Software/sstcam_sandbox/sstcam_sandbox/d190506_astri_publicity/events.txt" output_path = path.replace(".txt", "_hillas.h5") with HDF5Writer(output_path) as writer: df = pd.read_csv(path, sep='\t') ifile = 0 for _, row in df.iterrows(): path = row['path'] iev = row['iev'] with HDF5Reader(path) as reader: df = reader.read("data") df = df.loc[df['iev'] == iev] if ifile == 0: writer.add_mapping(reader.get_mapping()) writer.add_metadata(**reader.get_metadata()) keys = ['data', 'pointing', 'mc', 'mcheader'] for key in keys: if key not in reader.dataframe_keys: continue df = reader.read(key) df = df.loc[df['iev'] == iev] writer.append(df, key=key) ifile += 1
def main(): path = get_data("d190520_charge_extraction/data/analysis.h5") output_dir = get_plot("d190520_charge_extraction/data") with HDF5Reader(path) as reader: df = reader.read("data") extractors = np.unique(df['extractor']) peak_extractors = [e for e in extractors if e.startswith("peak")] width = np.zeros(len(peak_extractors)) shift = np.zeros(len(peak_extractors)) sn_50 = np.zeros(len(peak_extractors)) sn_3 = np.zeros(len(peak_extractors)) for iex, extractor in enumerate(peak_extractors): width[iex] = int(extractor.split("_")[1]) shift[iex] = int(extractor.split("_")[2]) series = df.loc[df['extractor'] == extractor].iloc[0] sn_50[iex] = series['sn_on_50'] sn_3[iex] = series['sn_on_3'] p_image = Image(sidebyside=True) p_image.plot(width, shift, sn_50) p_image.ax.set_title("Mid Average Illumination") p_image.save(join(output_dir, f"peak_sn_50.pdf")) p_image = Image(sidebyside=True) p_image.plot(width, shift, sn_3) p_image.ax.set_title("Low Average Illumination") p_image.save(join(output_dir, f"peak_sn_3.pdf")) sliding_extractors = [e for e in extractors if e.startswith("sliding")] width = np.zeros(len(sliding_extractors)) sn_50 = np.zeros(len(sliding_extractors)) sn_3 = np.zeros(len(sliding_extractors)) sn_peak_50 = np.zeros(len(sliding_extractors)) sn_peak_3 = np.zeros(len(sliding_extractors)) for iex, extractor in enumerate(sliding_extractors): width[iex] = int(extractor.split("_")[1]) series = df.loc[df['extractor'] == extractor].iloc[0] sn_50[iex] = series['sn_on_50'] sn_3[iex] = series['sn_on_3'] shift = width[iex] // 2 peak_extractor = f"peak_{width[iex]:.0f}_{shift:.0f}" if peak_extractor in extractors: series = df.loc[df['extractor'] == peak_extractor].iloc[0] sn_peak_50[iex] = series['sn_on_50'] sn_peak_3[iex] = series['sn_on_3'] series = df.loc[df['extractor'] == "cc_nn"].iloc[0] cc_sn_50 = series['sn_on_50'] cc_sn_3 = series['sn_on_3'] p_curve = Curve(sidebyside=True) p_curve.plot(width, sn_50, "Sliding Window") p_curve.plot(width, sn_peak_50, "Peak Finding") p_curve.plot(width, np.full(width.size, cc_sn_50), "Cross Correlation") # p_curve.ax.axhline(cc_sn_50, label='Cross Correlation') p_curve.add_legend("best") p_curve.ax.set_title("Mid Average Illumination") p_curve.save(join(output_dir, f"sliding_sn_50.pdf")) p_curve = Curve(sidebyside=True) p_curve.plot(width, sn_3, "Sliding Window") p_curve.plot(width, sn_peak_3, "Peak Finding") p_curve.plot(width, np.full(width.size, cc_sn_3), "Cross Correlation") # p_curve.ax.axhline(cc_sn_3, label='Cross Correlation') p_curve.add_legend("best") p_curve.ax.set_title("Low Average Illumination") p_curve.save(join(output_dir, f"sliding_sn_3.pdf"))