def process(file): runlist_path = file.runlist_path output_path = file.charge_averages_path df_runs = open_runlist_dl1(runlist_path) df_runs['transmission'] = 1 / df_runs['fw_atten'] n_runs = df_runs.index.size mapping = df_runs.iloc[0]['reader'].mapping n_pixels = df_runs.iloc[0]['reader'].n_pixels cs = ChargeStatistics() desc0 = "Looping over files" it = enumerate(df_runs.iterrows()) for i, (_, row) in tqdm(it, total=n_runs, desc=desc0): reader = row['reader'] transmission = row['transmission'] n_rows = n_pixels * 1000 pixel, charge = reader.select_columns(['pixel', 'charge'], stop=n_rows) cs.add(pixel, transmission, charge) reader.store.close() df_pixel, df_camera = cs.finish() df = df_pixel[["pixel", "amplitude", "mean", "std"]].copy() df = df.rename(columns={"amplitude": "transmission"}) df_runs2 = df_runs[['transmission', 'pe_expected', 'fw_pos']].copy() df_runs2['run_number'] = df_runs2.index df = pd.merge(df, df_runs2, on='transmission') with HDF5Writer(output_path) as writer: writer.write(data=df) writer.write_mapping(mapping) writer.write_metadata(n_pixels=n_pixels)
def process(input_paths, data_path, poi): df_list = [] n_files = len(input_paths) for ifile, f in enumerate(input_paths): print("Processing File {}/{}".format(ifile, n_files)) reader = TIOReader(f, max_events=1000) n_events = reader.n_events n_samples = reader.n_samples wfs = np.zeros((n_events, n_samples)) desc = "Processing events" for wf in tqdm(reader, total=n_events, desc=desc): iev = wf.iev wfs[iev] = wf[poi] average_wf = wfs.mean(0) df_list.append( pd.DataFrame( dict(ifile=ifile, file=f, wf=average_wf, isam=np.arange(n_samples)))) df = pd.concat(df_list, ignore_index=True) with HDF5Writer(data_path) as writer: writer.write(data=df) writer.write_metadata(n_files=n_files)
def process(input_path, output_path, poi): r0_reader = TIOReader(input_path) n_events = r0_reader.n_events n_samples = r0_reader.n_samples samples = np.arange(n_samples, dtype=np.uint16) df_list = [] desc = "Looping over events" for r0 in tqdm(r0_reader, total=n_events, desc=desc): iev = r0.iev fci = r0.first_cell_id[poi].item() df_list.append( pd.DataFrame(dict( iev=iev, fci=fci, isam=samples, r0=r0[poi], ))) df = pd.concat(df_list, ignore_index=True) with HDF5Writer(output_path) as writer: writer.write(data=df) writer.write_metadata(poi=poi)
def process(file): runlist_path = file.runlist_path fw_path = file.fw_path ff_path = file.ff_path output_path = file.charge_resolution_path df_runs = open_runlist_dl1(runlist_path) df_runs['transmission'] = 1 / df_runs['fw_atten'] n_runs = df_runs.index.size mapping = df_runs.iloc[0]['reader'].mapping n_pixels = df_runs.iloc[0]['reader'].n_pixels with HDF5Reader(fw_path) as reader: df = reader.read("data") fw_m = df['fw_m'].values fw_merr = df['fw_merr'].values with HDF5Reader(ff_path) as reader: df = reader.read("data") ff_m = df['ff_m'].values ff_c = df['ff_c'].values cr = ChargeResolution() cs = ChargeStatistics() desc0 = "Looping over files" it = enumerate(df_runs.iterrows()) for i, (_, row) in tqdm(it, total=n_runs, desc=desc0): reader = row['reader'] transmission = row['transmission'] n_rows = n_pixels * 1000 pixel, charge = reader.select_columns(['pixel', 'charge'], stop=n_rows) true = transmission * fw_m[pixel] measured = (charge - ff_c[pixel]) / ff_m[pixel] cr.add(pixel, true, measured) cs.add(pixel, true, measured) reader.store.close() df_cr_pixel, df_cr_camera = cr.finish() df_cs_pixel, df_cs_camera = cs.finish() def add_error(df): df['true_err'] = df['true'] / fw_m[df['pixel']] * fw_merr[df['pixel']] add_error(df_cr_pixel) with HDF5Writer(output_path) as writer: writer.write( charge_resolution_pixel=df_cr_pixel, charge_resolution_camera=df_cr_camera, charge_statistics_pixel=df_cs_pixel, charge_statistics_camera=df_cs_camera, ) writer.write_mapping(mapping) writer.write_metadata(n_pixels=n_pixels)
def process(monitor_paths, output_path): df_list = [] iev = 0 desc0 = "Looping over files" desc1 = "Looping over events" for path in tqdm(monitor_paths, total=len(monitor_paths), desc=desc0): with open(path) as file: for line in file: if line and line != '\n': try: data = line.replace('\n', '').replace('\t', " ") data = data.split(" ") t_cpu = pd.to_datetime("{} {}".format( data[0], data[1]), format="%Y-%m-%d %H:%M:%S:%f") # TODO: store monitor ASCII with UTC timestamps t_cpu = (t_cpu.tz_localize("Europe/Berlin").tz_convert( "UTC").tz_localize(None)) if 'Monitoring Event Done' in line: iev += 1 continue device = data[2] measurement = data[3] key = device + "_" + measurement if key == "TM_SP_VOLTAGE": imod = int(data[4]) sp = imod * 16 + np.arange(16) values = np.array(data[5:21], dtype=np.float) df_list.append( pd.DataFrame( dict( iev=iev, t_cpu=t_cpu, superpixel=sp, hv=values, ))) except: pass # except ValueError: # print("ValueError from line: {}".format(line)) # except IndexError: # print("IndexError from line: {}".format(line)) df = pd.concat(df_list, ignore_index=True) with HDF5Writer(output_path) as writer: writer.write(data=df)
def process(file): input_path = file.dl1_path angular_response_path = file.angular_response_path illumination_profile_path = file.illumination_profile_path plot_dir = file.plot_dir ip = IlluminationProfile(angular_response_path) reader = DL1Reader(input_path) mapping = reader.mapping pixel, true = reader.select_columns(['pixel', 'mc_true']) xpix = mapping['xpix'].values ypix = mapping['ypix'].values dist = np.sqrt(xpix**2 + ypix**2) n_pixels = mapping.metadata['n_pixels'] n_events = reader.n_events true_p = true.values.reshape((n_events, 2048)).mean(0) df = pd.DataFrame( dict( pixel=np.arange(n_pixels), distance=dist, true=true_p, )) pixel = df['pixel'].values true = df['true'].values dist = df['distance'].values params = polyfit(dist, true, [0, 2]) params_norm = params / polyval(0, params) pixel_corrections = polyval(dist, params_norm) df_corr = pd.DataFrame(dict( pixel=pixel, correction=pixel_corrections, )) df_params = pd.DataFrame(params_norm) with HDF5Writer(illumination_profile_path) as writer: writer.write(correction=df_corr, params=df_params) writer.write_mapping(mapping) p_dvt = PixelScatter(ip) p_dvt.plot(dist, true, params) p_dvt.save(os.path.join(plot_dir, "illumination_profile.pdf")) p_f = CameraImage.from_mapping(mapping) p_f.image = pixel_corrections p_f.add_colorbar("Illumination Profile Correction") p_f.save(os.path.join(plot_dir, "illumination_profile_camera.pdf"))
def process_list(input_paths, amplitudes, output_path, poi): desc = "Looping over files" process_poi = partial(process, poi=poi) it = list(zip(input_paths, amplitudes))[::4] with Pool(int(os.cpu_count() - 2)) as pool: process_pool = pool.imap(process_poi, it) df_list = list(tqdm(process_pool, total=len(it), desc=desc)) # for i in it: # process_poi(i) df = pd.concat(df_list, ignore_index=True) with HDF5Writer(output_path) as writer: writer.write(data=df) writer.write_metadata(poi=poi)
def process(file): dl1_paths = file.dl1_paths pde = file.pde mc_calib_path = file.mc_calib_path output_path = file.intensity_resolution_path n_runs = len(dl1_paths) reader_list = [DL1Reader(p) for p in dl1_paths] mapping = reader_list[0].mapping n_pixels = reader_list[0].n_pixels n_rows = n_pixels * 1000 with HDF5Reader(mc_calib_path) as reader: df = reader.read("data") mc_m = df['mc_m'].values cr = ChargeResolution(mc_true=True) cs = ChargeStatistics() desc0 = "Looping over files" for reader in tqdm(reader_list, total=n_runs, desc=desc0): pixel, charge, true = reader.select_columns( ['pixel', 'charge', 'mc_true'], stop=n_rows) true_photons = true / pde measured = charge / mc_m[pixel] f = true > 0 true_photons = true_photons[f] measured = measured[f] cr.add(pixel, true_photons, measured) cs.add(pixel, true_photons, measured) reader.store.close() df_cr_pixel, df_cr_camera = cr.finish() df_cs_pixel, df_cs_camera = cs.finish() with HDF5Writer(output_path) as writer: writer.write( charge_resolution_pixel=df_cr_pixel, charge_resolution_camera=df_cr_camera, charge_statistics_pixel=df_cs_pixel, charge_statistics_camera=df_cs_camera, ) writer.write_mapping(mapping) writer.write_metadata(n_pixels=n_pixels)
def process(file): dl1_paths = file.dl1_paths pde = file.pde mc_calib_path = file.mc_calib_path with DL1Reader(dl1_paths[0]) as reader: n_pixels = reader.n_pixels mapping = reader.mapping cols = ['pixel', 'charge', 'mc_true'] pixel, charge, true = reader.select_columns(cols) df = pd.DataFrame(dict( pixel=pixel, charge=charge, true=true, )) df_agg = df.groupby(['pixel', 'true']).agg({'charge': ['mean', 'std']}).reset_index() pixels = np.where(df.groupby('pixel').sum()['true'].values > 1000)[0] m_array = np.full(n_pixels, np.nan) for p in pixels: df_p = df_agg.loc[(df_agg['pixel'] == p) & (df_agg['true'] > 0)] x = df_p['true'].values / pde y = df_p['charge']['mean'].values yerr = df_p['charge']['std'].values yerr[np.isnan(yerr)] = 1000 yerr[yerr == 0] = 1000 c, m = polyfit(x, y, [1], w=y/yerr) m_array[p] = m df_calib = pd.DataFrame(dict( pixel=np.arange(n_pixels), mc_m=m_array, )) df_calib_mean = df_calib.copy() df_calib_mean['mc_m'] = np.nanmean(m_array) print("Average Gradient = {}".format(np.nanmean(m_array))) with HDF5Writer(mc_calib_path) as writer: writer.write(data=df_calib) writer.write_mapping(mapping) writer.write_metadata(n_pixels=n_pixels)
def process(readers, output_path): df_list = [] desc0 = "Looping over files" desc1 = "Looping over events" for reader in tqdm(readers, total=len(readers), desc=desc0): mapping = reader.mapping sp_arr = np.vstack(mapping.groupby("superpixel").pixel.apply(np.array)) n_events = reader.n_events n_pixels = reader.n_pixels pixels = np.arange(n_pixels) for wfs in tqdm(reader, total=n_events, desc=desc1): iev = wfs.iev if iev % 10: continue t_cpu = wfs.t_cpu amplitude = wfs.max(axis=1) sum_wfs = wfs[sp_arr].sum(1) amplitude_sp = sum_wfs.max(axis=1) # plt.plot(wfs[sp_arr][372].T) # plt.ylim((-25, 75)) # plt.pause(0.5) # plt.cla() df_list.append( pd.DataFrame( dict( iev=iev, t_cpu=t_cpu, pixel=pixels, amplitude=amplitude, amplitude_sp=np.repeat(amplitude_sp, 4), ))) df = pd.concat(df_list, ignore_index=True) with HDF5Writer(output_path) as writer: writer.write(data=df) writer.write_mapping(readers[0].mapping)
def process(file): r0_paths = file.r0_paths tfnone_paths = file.tfnone_paths tfpoly_paths = file.tfpoly_paths vped_list = file.vped_list output_path = file.averages_path try: r0_df = get_df(r0_paths, vped_list) tfnone_df = get_df(tfnone_paths, vped_list) tfpoly_df = get_df(tfpoly_paths, vped_list) except: embed() with HDF5Writer(output_path) as writer: writer.write( r0=r0_df, tfnone=tfnone_df, tfpoly_df=tfpoly_df, )
def process(file): runlist_path = file.runlist_path fw_path = file.fw_path ff_path = file.ff_path output_path = file.stats_path df_runs = open_runlist_dl1(runlist_path) df_runs['transmission'] = 1 / df_runs['fw_atten'] n_runs = df_runs.index.size mapping = df_runs.iloc[0]['reader'].mapping n_pixels = df_runs.iloc[0]['reader'].n_pixels with HDF5Reader(fw_path) as reader: df = reader.read("data") fw_m = df['fw_m'].values fw_merr = df['fw_merr'].values with HDF5Reader(ff_path) as reader: df = reader.read("data") ff_m = df['ff_m'].values ff_c = df['ff_c'].values df_list = [] desc0 = "Looping over files" it = enumerate(df_runs.iterrows()) for i, (run, row) in tqdm(it, total=n_runs, desc=desc0): reader = row['reader'] transmission = row['transmission'] fw_pos = row['fw_pos'] n_rows = n_pixels * 1000 pixel, charge = reader.select_columns(['pixel', 'charge'], stop=n_rows) true = transmission * fw_m[pixel] df = pd.DataFrame( dict( pixel=pixel, charge=charge, measured=(charge - ff_c[pixel]) / ff_m[pixel], run=run, transmission=transmission, fw_pos=fw_pos, true=true, )) trans = df.groupby('pixel').transform('mean') df['charge_mean'] = trans['charge'] df['measured_mean'] = trans['measured'] gb = df.groupby('pixel') df_stats = gb.agg({ 'charge': ['mean', 'std'], 'measured': ['mean', 'std'] }) df_stats['run'] = run df_stats['transmission'] = transmission df_stats['fw_pos'] = fw_pos df_stats['true'] = transmission * fw_m df_stats['pixel'] = df_stats.index df_stats.loc[:, ('measured', 'res')] = gb.apply(charge_resolution_df).values df_stats.loc[:, ('charge', 'rms')] = gb.apply(rms_charge_df).values df_stats.loc[:, ('measured', 'rms')] = gb.apply(rms_measured_df).values df_list.append(df_stats) reader.store.close() df = pd.concat(df_list, ignore_index=True) with HDF5Writer(output_path) as writer: writer.write(data=df) writer.write_mapping(mapping) writer.write_metadata(n_pixels=n_pixels)
def process(trigger_path, output_path): df = read_trigger_file(trigger_path) embed() with HDF5Writer(output_path) as writer: writer.write(data=df)
def process(file): runlist_path = file.runlist_path output_path = file.saturation_recovery_path fw_path = file.fw_path plot_path = file.saturation_recovery_plot_path poi = file.poi df_runs = open_runlist_dl1(runlist_path) df_runs['transmission'] = 1 / df_runs['fw_atten'] n_runs = df_runs.index.size mapping = df_runs.iloc[0]['reader'].mapping n_pixels = df_runs.iloc[0]['reader'].n_pixels cs = ChargeStatistics() desc0 = "Looping over files" it = enumerate(df_runs.iterrows()) for i, (_, row) in tqdm(it, total=n_runs, desc=desc0): reader = row['reader'] transmission = row['transmission'] n_rows = n_pixels * 1000 pixel, charge = reader.select_columns(['pixel', 'saturation_coeff'], stop=n_rows) cs.add(pixel, transmission, charge) reader.store.close() df_pixel, df_camera = cs.finish() df = df_pixel[["pixel", "amplitude", "mean", "std"]].copy() df = df.rename(columns={"amplitude": "transmission"}) df_runs2 = df_runs[['transmission', 'pe_expected', 'fw_pos']].copy() df_runs2['run_number'] = df_runs2.index df = pd.merge(df, df_runs2, on='transmission') with HDF5Reader(fw_path) as reader: df_fw = reader.read("data") fw_m = df_fw['fw_m'].values fw_merr = df_fw['fw_merr'].values pixel = df['pixel'].values transmission = df['transmission'].values df['illumination'] = transmission * fw_m[pixel] df['illumination_err'] = transmission * fw_merr[pixel] d_list = [] for pix in np.unique(df['pixel']): df_p = df.loc[df['pixel'] == pix] true = df_p['illumination'].values true_err = df_p['illumination_err'].values measured = df_p['mean'].values measured_std = df_p['std'].values flag = np.zeros(true.size, dtype=np.bool) flag[np.abs(true - 2500).argsort()[:5]] = True x = true[flag] y = measured[flag] y_err = measured_std[flag] p = polyfit(x, y, [1], w=1 / y_err) ff_c, ff_m = p d_list.append(dict( pixel=pix, ff_c=ff_c, ff_m=ff_m, )) if pix == poi: print("{:.3f}".format(ff_m)) p_fit = FitPlotter() p_fit.plot(true, measured, true_err, measured_std, flag, p) p_fit.save(plot_path) df_calib = pd.DataFrame(d_list) df_calib = df_calib.sort_values('pixel') with HDF5Writer(output_path) as writer: writer.write(data=df_calib) writer.write_mapping(mapping) writer.write_metadata(n_pixels=n_pixels)
def process(file): charge_averages_path = file.charge_averages_path fw_path = file.fw_path ff_path = file.ff_path plot_dir = file.ff_plot_dir poi = file.poi with HDF5Reader(charge_averages_path) as reader: df_avg = reader.read("data") mapping = reader.read_mapping() metadata = reader.read_metadata() with HDF5Reader(fw_path) as reader: df_fw = reader.read("data") fw_m = df_fw['fw_m'].values fw_merr = df_fw['fw_merr'].values pixel = df_avg['pixel'].values transmission = df_avg['transmission'].values df_avg['illumination'] = transmission * fw_m[pixel] df_avg['illumination_err'] = transmission * fw_merr[pixel] d_list = [] for pix in np.unique(df_avg['pixel']): df_p = df_avg.loc[df_avg['pixel'] == pix] true = df_p['illumination'].values true_err = df_p['illumination_err'].values measured = df_p['mean'].values measured_std = df_p['std'].values flag = np.zeros(true.size, dtype=np.bool) flag[np.abs(true - 50).argsort()[:3]] = True x = true[flag] y = measured[flag] y_err = measured_std[flag] p, f = polyfit(x, y, [1], w=1 / y_err, full=True) ff_c, ff_m = p # n = x.size # sy = np.sqrt(np.sum((y - polyval(x, p))**2) / (n - 1)) # sm = sy * np.sqrt(1/(np.sum((x - np.mean(x))**2))) # ff_merr = sm ff_merr = 0 d_list.append(dict( pixel=pix, ff_c=ff_c, ff_m=ff_m, ff_merr=ff_merr, )) if pix == poi: print("{:.3f} ± {:.3f}".format(ff_m, ff_merr)) p_fit = FitPlotter() p_fit.plot(true, measured, true_err, measured_std, flag, ff_c, ff_m, ff_merr) p_fit.save(os.path.join(plot_dir, "flat_fielding.pdf")) df_calib = pd.DataFrame(d_list) df_calib = df_calib.sort_values('pixel') with HDF5Writer(ff_path) as writer: writer.write(data=df_calib) writer.write_mapping(mapping) writer.write_metadata(**metadata) p_hist2d = Hist2D() p_hist2d.plot(df_avg['illumination'].values, df_avg['mean'].values) p_hist2d.save(os.path.join(plot_dir, "pixel_averages.pdf"))
def process(readers, output_path, superpixels): pix_dict = obtain_pixel_list(superpixels) df_list = [] df_list_sum = [] desc0 = "Looping over files" desc1 = "Looping over events" for reader in tqdm(readers, total=len(readers), desc=desc0): n_events = reader.n_events mapping = reader.tc_mapping mappingsp = MappingSP(mapping) for wfs in tqdm(reader, total=n_events, desc=desc1): iev = wfs.iev t_cpu = wfs.t_cpu for sp, p in pix_dict.items(): wfs_pix = wfs[p] amplitude = wfs_pix.max(axis=1) baseline = wfs_pix[:, :20].mean(axis=1) # plt.plot(wfs_pix.T) # plt.title("iev = {}".format(iev)) # plt.ylim((-60, 60)) # plt.pause(1) # plt.cla() df_list.append( pd.DataFrame( dict( iev=iev, t_cpu=t_cpu, pixel=p, superpixel=sp, amplitude=amplitude, baseline=baseline, ))) wfs_sum = wfs_pix.sum(0) amplitude = wfs_sum.max() baseline = wfs_sum[:20].mean() df_list_sum.append( pd.DataFrame(dict( iev=iev, t_cpu=t_cpu, superpixel=sp, amplitude=amplitude, baseline=baseline, ), index=pd.Index([0]))) df = pd.concat(df_list, ignore_index=True) df_sum = pd.concat(df_list_sum, ignore_index=True) with HDF5Writer(output_path) as writer: writer.write(data=df) writer.write(data_sum=df_sum) meta = {"sp{}".format(sp): l for sp, l in superpixels.items()} writer.write_metadata(**meta)
def process(file): runlist_path = file.spe_runlist_path spe_path = file.spe_path profile_path = file.illumination_profile_path dead = file.dead fw_path = file.fw_path plot_dir = file.fw_plot_dir pde = file.pde df_runs = open_runlist_dl1(runlist_path, False) df_runs['transmission'] = 1/df_runs['fw_atten'] store_spe = pd.HDFStore(spe_path) df_spe = store_spe['coeff_pixel'] df_spe_err = store_spe['errors_pixel'] mapping = store_spe['mapping'] with warnings.catch_warnings(): warnings.simplefilter('ignore', UserWarning) mapping.metadata = store_spe.get_storer('mapping').attrs.metadata meta_spe = store_spe.get_storer('metadata').attrs.metadata n_spe_illuminations = meta_spe['n_illuminations'] spe_files = meta_spe['files'] n_pixels = meta_spe['n_pixels'] mean_opct = df_spe['opct'].mean() if pde is None: pe2photons = PE2Photons().convert(mean_opct) else: pe2photons = 1/pde print("PDE = {:.3f}".format(1/pe2photons)) print("OPCT = {:.3f}".format(mean_opct)) spe_transmission = [] pattern = '(.+?)/Run(.+?)_dl1.h5' for path in spe_files: try: reg_exp = re.search(pattern, path) if reg_exp: run = int(reg_exp.group(2)) spe_transmission.append(df_runs.loc[run]['transmission']) except AttributeError: print("Problem with Regular Expression, " "{} does not match patten {}".format(path, pattern)) pix_lambda = np.zeros((n_spe_illuminations, n_pixels)) pix_lambda_err = np.zeros((n_spe_illuminations, n_pixels)) for ill in range(n_spe_illuminations): key = "lambda_" + str(ill) lambda_ = df_spe[['pixel', key]].sort_values('pixel')[key].values * pe2photons lambda_err = df_spe_err[['pixel', key]].sort_values('pixel')[key].values pix_lambda[ill] = lambda_ pix_lambda_err[ill] = lambda_err if profile_path: with HDF5Reader(profile_path) as reader: correction = reader.read("correction")['correction'] else: correction = np.ones(n_pixels) df_list = [] for i in range(n_spe_illuminations): df_list.append(pd.DataFrame(dict( pixel=np.arange(n_pixels), correction=correction, transmission=spe_transmission[i], lambda_=pix_lambda[i], lambda_err=pix_lambda_err[i], ))) df = pd.concat(df_list) # Obtain calibration dead_mask = np.zeros(n_pixels, dtype=np.bool) dead_mask[dead] = True transmission = np.unique(df['transmission'].values) lambda_ = [] lambda_err = [] corrections = [] for i in range(len(transmission)): df_t = df.loc[df['transmission'] == transmission[i]] lambda_.append(df_t['lambda_'].values) lambda_err.append(df_t['lambda_err'].values) corrections.append(df_t['correction'].values) correction = corrections[0] lambda_ = np.array(lambda_) lambda_err = np.array(lambda_err) c_list = [] m_list = [] merr_list = [] for pix in range(n_pixels): x = transmission y = lambda_[:, pix] yerr = lambda_err[:, pix] w = 1/yerr cp, mp = polyfit(x, y, 1, w=w) c_list.append(cp) m_list.append(mp) w2 = w**2 merrp = np.sqrt(np.sum(w2)/(np.sum(w2)*np.sum(w2*x**2) - (np.sum(w2*x))**2)) merr_list.append(merrp) c = np.array(c_list) m = np.array(m_list) merr = np.array(merr_list) # Exlude low gradients (dead pixels) # dead_mask[m < 1000] = True merr_corrected = merr / correction merr_corrected_d = merr_corrected[~dead_mask] m_corrected = m / correction m_corrected_d = m_corrected[~dead_mask] w = 1/merr_corrected_d m_avg = np.average(m_corrected_d, weights=w) m_pix = m_avg * correction m_avg_std = np.sqrt(np.average((m_corrected_d - m_avg) ** 2, weights=w)) m_pix_std = m_avg_std * correction print("{:.3f} ± {:.3f}".format(m_avg, m_avg_std)) df_calib = pd.DataFrame(dict( pixel=np.arange(n_pixels), fw_m=m_pix, fw_merr=m_pix_std, )) df_calib = df_calib.sort_values('pixel') with HDF5Writer(fw_path) as writer: writer.write(data=df_calib) writer.write_mapping(mapping) writer.write_metadata( n_pixels=n_pixels, fw_m_camera=m_avg, fw_merr_camera=m_avg_std, ) p_fit = FitPlotter() l = np.s_[:5] p_fit.plot(transmission, lambda_[:, l], lambda_err[:, l], c[l], m[l]) p_fit.save(os.path.join(plot_dir, "fw_calibration_fit.pdf")) p_line = LinePlotter() p_line.plot(m_avg, m_pix, m_avg_std) p_line.save(os.path.join(plot_dir, "fw_calibration.pdf")) p_hist = HistPlotter() p_hist.plot(m_corrected[~dead_mask]) p_hist.save(os.path.join(plot_dir, "relative_pde.pdf"))