Ejemplo n.º 1
0
def main():
    path = "/Users/Jason/Downloads/tempdata/muon_search/Run13312_dl1.h5"
    output_dir = path.replace("_dl1.h5", "_muonsearch")
    threshold = 60
    number = 11

    with DL1Reader(path) as reader:
        n_events = reader.n_events
        p_images = ImagePlotter(reader.mapping)
        for df in tqdm(reader.iterate_over_events(), total=n_events):
            iev = df['iev'].values[0]
            t_cpu = df['t_cpu'].values[0]

            sp_max = df['sp_max'].values
            above_threshold = sp_max > threshold
            n_sp_above = np.sum(above_threshold) // 4
            if n_sp_above < number:
                continue
            sp_argmax = df['sp_argmax'].values
            image = df['photons'].values
            image_t = df['pulse_time'].values

            p_images.set_image(
                iev, t_cpu, above_threshold, sp_argmax, image, image_t, None
            )
            p_images.save(join(output_dir, f"e{iev}.png"))
Ejemplo n.º 2
0
def process(file, fitter_class):
    name = file.__class__.__name__
    input_paths = file.dl1_paths
    spe_config = file.spe_config
    poi = file.poi
    output_dir = get_plot(f"d190313_spectra_fitting/{name}")
    fitter_class_name = fitter_class.__name__

    readers = [DL1Reader(path) for path in input_paths]
    n_illuminations = len(readers)
    fitter = fitter_class(n_illuminations, spe_config)

    charges = []
    for reader in readers:
        pixel, charge = reader.select_columns(['pixel', 'charge_cc'])
        if poi != -1:
            charge_p = charge[pixel == poi]
        else:
            charge_p = charge
        charges.append(charge_p)
    fitter.apply(*charges)

    fitx = np.linspace(fitter.range[0], fitter.range[1], 1000)

    p_spe = SPEPlotter()
    p_spe.plot(fitx, fitter)
    p_spe.save(os.path.join(output_dir, f"fit_{fitter_class_name}.pdf"))

    p_spe_table = SPEPlotterTable()
    p_spe_table.plot(fitx, fitter)
    p_spe_table.save(
        os.path.join(output_dir, f"fit_table_{fitter_class_name}.pdf"))
Ejemplo n.º 3
0
def howmanyh5datasets(onlyone, maxdatain, datainlist, pathdir):
    dataincounta = -1
    if onlyone == 0:
        stronly = 0  #OUTPUT
        for datain in range(0, maxdatain):
            str = datainlist[datain]  #OUTPUT
            strtest = str.endswith("h5")
            if strtest == True:
                dataincounta = dataincounta + 1  #OUTPUT
    if onlyone == 1:
        dataincounta = 0  #OUTPUT
        str = "data_Run030_dl1.h5"  #OUTPUT
        stronly = "data_Run030_dl1.h5"  #OUTPUT
    openfirst = 0
    datain = 0
    while openfirst == 0:
        strtest = datainlist[datain].endswith("h5")
        if strtest == True:
            path = (pathdir + '/' + datainlist[datain])
            readert = DL1Reader(path)  #OUTPUT
            ievt = readert.select_column('iev').values  #OUTPUT
            maxievt = max(ievt)  #OUTPUT
            openfirst = 1
        else:
            datain = datain + 1
    return {
        'onlyone': onlyone,
        'dataincounta': dataincounta,
        'str': str,
        'maxievt': maxievt,
        'ievt': ievt,
        'readert': readert,
        'stronly': stronly
    }
Ejemplo n.º 4
0
def main():
    paths = [
        "/Volumes/gct-jason/astri_onsky_archive/d2019-05-15_simulations/proton/run1_dl1.h5",
    ]

    df_list = []

    for ipath, path in enumerate(paths):
        with DL1Reader(path) as reader:
            n_events = reader.get_metadata()['n_events']
            mapping = reader.get_mapping()
            geom = get_ctapipe_camera_geometry(mapping, plate_scale=37.56e-3)

            desc = "Looping over events"
            it = reader.iterate_over_events()
            for df in tqdm(it, total=n_events, desc=desc):
                iev = df['iev'].values[0]

                image = df['photons'].values
                time = df['pulse_time'].values

                mask = obtain_cleaning_mask(geom, image, time)
                if not mask.any():
                    continue

                image_m = image[mask]
                time_m = time[mask]
                geom_m = geom[mask]

                try:
                    hillas = hillas_parameters(geom_m, image_m)
                except HillasParameterizationError:
                    continue

                # timing_parameters(geom_m, image_m, time_m, hillas)

                gt0 = image_m > 0
                pix_x = geom_m.pix_x[gt0]
                pix_y = geom_m.pix_y[gt0]
                peakpos = time_m[gt0]
                intensity = image_m[gt0]

                longi, trans = camera_to_shower_coordinates(
                    pix_x, pix_y, hillas.x, hillas.y, hillas.psi)
                longi = longi.value
                trans = trans.value

                # df_list.append(pd.DataFrame(dict(
                #     ipath=ipath,
                #     iev=iev,
                #     longi=longi,
                #     peakpos=peakpos,
                # )))

                p_relation = RelationPlotter()
                p_relation.plot(longi, peakpos, intensity)
                p_relation.save(
                    get_plot(
                        f"d190524_time_gradient/relation/i{ipath}_e{iev}.pdf"))
def get_numpy():
    """
    Pandas dataframe columns are essentially numpy arrays.
    """
    r = DL1Reader(PATH)
    df = r.load_entire_table()
    charge_numpy_array = df['charge'].values
    print(type(charge_numpy_array))
def select_subset():
    """
    A subset of the DataFrame can be selected to produce a new DataFrame
    """
    r = DL1Reader(PATH)
    df = r.load_entire_table()
    df['tm'] = df['pixel'] // 64
    df_tm4 = df.loc[df['tm'] == 4]
    print(df_tm4)
def get_table_mean():
    """
    Pandas also has its own methods for obtaining many statistical results,
    which can be applied to the entire table at once efficiently.
    """
    r = DL1Reader(PATH)
    df = r.load_entire_table()
    mean_series = df.mean()
    print(mean_series)
def get_numpy_mean():
    """
    Pandas dataframe columns are essentially numpy array, and therefore can
    be operated on by any of the numpy methods.
    """
    r = DL1Reader(PATH)
    df = r.load_entire_table()
    charge_mean = np.mean(df['charge'])
    print(charge_mean)
Ejemplo n.º 9
0
def open_runlist_dl1(path, open_readers=True):
    from CHECLabPy.core.io import DL1Reader
    df = read_runlist(path)
    input_dir = os.path.dirname(path)
    input_run_path = os.path.join(input_dir, "Run{:05d}_dl1.h5")
    df['path'] = [input_run_path.format(i) for i in df.index]
    if open_readers:
        df['reader'] = [DL1Reader(fp) for fp in df['path'].values]
    return df
def main():
    description = ('Extract and fit the Single-Photoelectron spectrum '
                   'from N dl1 files simultaneously')
    parser = argparse.ArgumentParser(description=description,
                                     formatter_class=Formatter)
    parser.add_argument('-f', '--files', dest='input_paths', nargs='+',
                        help='path to the input dl1 run files')
    parser.add_argument('-o', '--output', dest='output_path', action='store',
                        required=True, help='path to store the output image')
    parser.add_argument('-s', '--fitter', dest='fitter', action='store',
                        default='GentileFitter',
                        choices=SpectrumFitterFactory.subclass_names,
                        help='SpectrumFitter to use')
    parser.add_argument('-c', '--config', dest='config', action='store',
                        default=None,
                        help='Path to SpectrumFitter configuration YAML file')
    parser.add_argument('-p', '--pixel', dest='plot_pixel', action='store',
                        default=None, type=int,
                        help='Pixel to plot. "-1" speciefies the '
                             'entire camera')
    args = parser.parse_args()

    input_paths = args.input_paths
    output_path = args.output_path
    fitter_str = args.fitter
    config_path = args.config
    poi = args.plot_pixel

    readers = [DL1Reader(path) for path in input_paths]
    kwargs = dict(
        product_name=fitter_str,
        n_illuminations=len(readers),
        config_path=config_path
    )
    fitter = SpectrumFitterFactory.produce(**kwargs)

    output_dir = os.path.dirname(output_path)
    if not os.path.exists(output_dir):
        os.makedirs(output_dir)
        print("Created directory: {}".format(output_dir))
    if os.path.exists(output_path):
        os.remove(output_path)

    charges = []
    for reader in readers:
        pixel, charge = reader.select_columns(['pixel', 'charge'])
        if poi != -1:
            charge_p = charge[pixel == poi]
        else:
            charge_p = charge
        charges.append(charge_p)
    fitter.apply(*charges)

    p_data = SPEPlotter()
    p_data.plot(fitter)
    p_data.save(output_path)
Ejemplo n.º 11
0
def get_multiple_statistics():
    """
    The `aggregate` method allows multiple operations to be performed at once
    """
    r = DL1Reader(PATH)
    df = r.load_entire_table()
    df['tm'] = df['pixel'] // 64
    df_stats = df[['tm', 'charge']].groupby('tm').agg(['mean', 'min', 'max'])
    print(df_stats)
    print(df_stats['charge']['mean'])
Ejemplo n.º 12
0
def main():
    path = "/Volumes/gct-jason/astri_onsky_archive/d2019-10-03_simulations/gamma_1deg/run1_dl1.h5"
    # path = "/Volumes/gct-jason/astri_onsky_archive/d2019-05-15_simulations/gamma_1deg/run1_dl1_old.h5"
    reader = DL1Reader(path)
    df = reader.load_entire_table()
    image = df.groupby(['pixel']).sum()['photons']

    ci = CameraImage.from_mapping(reader.mapping)
    ci.image = image
    ci.save(get_plot("d190717_alpha/true_images.pdf"))
Ejemplo n.º 13
0
def get_mean_per_tm():
    """
    The Pandas groupby method can be used to calculate statistics per group
    """
    r = DL1Reader(PATH)
    df = r.load_entire_table()
    df['tm'] = df['pixel'] // 64
    df_mean = df.groupby('tm').mean().reset_index()
    # reset_index() restores the tm column,
    # otherwise it will remain as the index
    print(df_mean)
Ejemplo n.º 14
0
def apply_different_statistic_to_different_column():
    """
    Passing a dict to `aggregate` allows you to specify a different operation
    depending on the column
    """
    r = DL1Reader(PATH)
    df = r.load_entire_table()
    df['tm'] = df['pixel'] // 64
    f = dict(pixel='first', charge='std')
    df_stats = df[['tm', 'pixel', 'charge']].groupby('tm').agg(f)
    print(df_stats)
Ejemplo n.º 15
0
def main():
    description = 'Plot DL1 images'
    parser = argparse.ArgumentParser(description=description,
                                     formatter_class=Formatter)
    parser.add_argument('-f', '--file', dest='input_path', required=True,
                        help='path to the HDF5 dl1 file')
    parser.add_argument('-o', '--output', dest='output_dir',
                        help='directory to save the plots')
    parser.add_argument('-t', dest='datetime', required=True,
                        help='Datetime to plot from (UTC)',
                        type=valid_datetime)
    parser.add_argument('-m', dest='max_events',
                        help='Max events', type=int)
    args = parser.parse_args()

    input_path = args.input_path
    output = args.output_dir
    dt = args.datetime
    max_events = args.max_events

    if output is None:
        output = input_path.replace("_dl1.h5", "_dl1images_dt")

    with DL1Reader(input_path) as reader:
        n_events = reader.n_events
        n_pixels = reader.n_pixels
        mapping = reader.mapping
        if max_events and max_events < n_events:
            n_events = max_events

        print(f"Ordering events from datetime: {dt}")
        t_cpu = reader.select_column("t_cpu").values.reshape(
            (n_events, n_pixels)
        )[:, 0]
        t_delta = t_cpu - np.datetime64(dt)
        indicis = np.argsort(np.absolute(t_delta))
        if max_events:
            indicis = indicis[:max_events]

        p_image = ImagePlotter(mapping)
        desc = "Looping over events"
        for ientry, index in tqdm(enumerate(indicis), desc=desc):
            df = reader.select_event_index(index)
            iev = df['iev'].values[0]
            t_cpu = df['t_cpu'].values[0]

            image_c = df['photons'].values
            image_t = df['pulse_time'].values
            image_h = df['pulse_height'].values

            p_image.set_image(iev, image_c, image_t, image_h)
            p_image.fig.suptitle(f"Event: {iev}, Time: {t_cpu}")
            p_image.save(join(output, f"i{ientry}_e{iev}.png"))
Ejemplo n.º 16
0
def plot_imshow():
    """
    Plot the camera image using imshow (essentially a 2D histogram). Therefore
    does not include module gaps
    """
    r = DL1Reader("/Users/Jason/Software/CHECLabPy/refdata/Run17473_dl1.h5")
    camera = CameraImageImshow.from_mapping(r.mapping)
    camera.add_colorbar("Charge (mV ns)")
    for df in r.iterate_over_events():
        charge = df['charge'].values
        camera.image = charge
        plt.pause(0.1)
Ejemplo n.º 17
0
def apply_custom_function_agg():
    """
    One can also apply a custom function inside the agg approach
    """
    r = DL1Reader(PATH)
    df = r.load_entire_table()
    df['tm'] = df['pixel'] // 64
    f_camera_first_half = lambda g: df.loc[g.index].iloc[0]['tm'] < 32/2
    f = dict(pixel=f_camera_first_half, charge='std')
    df_stats = df[['tm', 'pixel', 'charge']].groupby('tm').agg(f)
    df_stats = df_stats.rename(columns={'pixel': 'camera_first_half'})
    print(df_stats)
Ejemplo n.º 18
0
def main():
    poi = 1920

    files = [
        "/Volumes/gct-jason/data_checs/dynamicrange_180514/tf_poly/Run43520_dl1.h5",
        # "/Volumes/gct-jason/data_checs/dynamicrange_180514/tf_poly/Run43519_dl1.h5",
        "/Volumes/gct-jason/data_checs/dynamicrange_180514/tf_poly/Run43518_dl1.h5",
        # "/Volumes/gct-jason/data_checs/dynamicrange_180514/tf_poly/Run43517_dl1.h5",
        "/Volumes/gct-jason/data_checs/dynamicrange_180514/tf_poly/Run43516_dl1.h5",
        # "/Volumes/gct-jason/data_checs/dynamicrange_180514/tf_poly/Run43515_dl1.h5",
        "/Volumes/gct-jason/data_checs/dynamicrange_180514/tf_poly/Run43514_dl1.h5",
        # "/Volumes/gct-jason/data_checs/dynamicrange_180514/tf_poly/Run43513_dl1.h5",
        "/Volumes/gct-jason/data_checs/dynamicrange_180514/tf_poly/Run43512_dl1.h5",
        # "/Volumes/gct-jason/data_checs/dynamicrange_180514/tf_poly/Run43511_dl1.h5",
        "/Volumes/gct-jason/data_checs/dynamicrange_180514/tf_poly/Run43510_dl1.h5",
        # "/Volumes/gct-jason/data_checs/dynamicrange_180514/tf_poly/Run43509_dl1.h5",
    ]
    readers = [DL1Reader(i) for i in files]

    output_dir = os.path.abspath(os.path.dirname(__file__))
    if not os.path.exists(output_dir):
        os.makedirs(output_dir)
        print("Created directory: {}".format(output_dir))

    charges = []
    desc = "Extracting charges from files"
    for r in tqdm(readers, desc=desc):
        p, c = r.select_columns(['pixel', 'charge'])
        c_pixel = c[p == poi]
        charges.append(c_pixel)
        # charges.append(c)

    fitter = GentileFitter(n_illuminations=len(charges))
    fitter.range = [-25, 170]

    fitter.apply(*charges)

    coeff = fitter.coeff
    print(yaml.dump(coeff, indent=3))

    p_spectrum = SpectrumPlotter()

    for i in range(len(readers)):
        hist = fitter.hist[i]
        edges = fitter.edges
        between = fitter.between
        fit_x = fitter.fit_x
        fit = fitter.fit[i]
        lambda_ = fitter.coeff['lambda_{}'.format(i)]
        p_spectrum.plot(hist, edges, between, fit_x, fit, lambda_)

    p_spectrum.save(os.path.join(output_dir, "multi_spectrum.pdf"))
Ejemplo n.º 19
0
def process(file):
    input_path = file.dl1_path
    angular_response_path = file.angular_response_path
    illumination_profile_path = file.illumination_profile_path
    plot_dir = file.plot_dir

    ip = IlluminationProfile(angular_response_path)

    reader = DL1Reader(input_path)
    mapping = reader.mapping
    pixel, true = reader.select_columns(['pixel', 'mc_true'])
    xpix = mapping['xpix'].values
    ypix = mapping['ypix'].values
    dist = np.sqrt(xpix**2 + ypix**2)
    n_pixels = mapping.metadata['n_pixels']

    n_events = reader.n_events
    true_p = true.values.reshape((n_events, 2048)).mean(0)

    df = pd.DataFrame(
        dict(
            pixel=np.arange(n_pixels),
            distance=dist,
            true=true_p,
        ))

    pixel = df['pixel'].values
    true = df['true'].values
    dist = df['distance'].values

    params = polyfit(dist, true, [0, 2])
    params_norm = params / polyval(0, params)
    pixel_corrections = polyval(dist, params_norm)

    df_corr = pd.DataFrame(dict(
        pixel=pixel,
        correction=pixel_corrections,
    ))
    df_params = pd.DataFrame(params_norm)

    with HDF5Writer(illumination_profile_path) as writer:
        writer.write(correction=df_corr, params=df_params)
        writer.write_mapping(mapping)

    p_dvt = PixelScatter(ip)
    p_dvt.plot(dist, true, params)
    p_dvt.save(os.path.join(plot_dir, "illumination_profile.pdf"))

    p_f = CameraImage.from_mapping(mapping)
    p_f.image = pixel_corrections
    p_f.add_colorbar("Illumination Profile Correction")
    p_f.save(os.path.join(plot_dir, "illumination_profile_camera.pdf"))
Ejemplo n.º 20
0
def main():
    description = 'Plot the charge spectrum from a dl1 file'
    parser = argparse.ArgumentParser(description=description,
                                     formatter_class=Formatter)
    parser.add_argument('-f',
                        '--file',
                        dest='input_path',
                        action='store',
                        required=True,
                        help='path to the dl1 HDF5 run file')
    parser.add_argument('-p',
                        '--pixel',
                        dest='pixel',
                        action='store',
                        type=int,
                        default=0,
                        help='pixel to plot the spectrum of')
    parser.add_argument('-C',
                        '--charge_col_name',
                        dest='charge_col_name',
                        action='store',
                        default='charge',
                        type=str,
                        help='The column name of the charge to'
                        'be used in the fit.')
    args = parser.parse_args()

    input_path = args.input_path
    pixel = args.pixel

    with DL1Reader(input_path) as reader:
        pixel_arr, charge = reader.select_columns(
            ['pixel', args.charge_col_name])
        charge_pix = charge[pixel_arr == pixel]

    fig = plt.figure()
    ax = fig.add_subplot(1, 1, 1)

    _, edges, _ = ax.hist(charge,
                          bins=1000,
                          histtype='step',
                          normed=True,
                          label="All")
    ax.hist(charge_pix,
            bins=edges,
            histtype='step',
            normed=True,
            label="Pixel {}".format(pixel))
    ax.legend(loc="upper right")
    plt.show()
Ejemplo n.º 21
0
def plot_from_dl1():
    """
    Use the CHECLabPy mapping dataframe to plot an image
    """
    path = "/Users/Jason/Software/CHECLabPy/refdata/Run17473_dl1.h5"
    r = DL1Reader(path)
    camera = CameraImage.from_mapping(r.mapping)
    camera.add_colorbar("Charge (mV ns)")
    for i, df in enumerate(r.iterate_over_events()):
        if i > 10:
            break
        charge = df['charge'].values
        camera.image = charge
        plt.pause(0.1)
Ejemplo n.º 22
0
def process(spe_paths, spe_config, output_path, poi):
    readers = [DL1Reader(path) for path in spe_paths]
    n_illuminations = len(readers)
    fitter = SiPMGentileFitter(n_illuminations, spe_config)

    charges = []
    for reader in readers:
        pixel, charge = reader.select_columns(['pixel', 'charge_cc'])
        charge_p = charge[pixel == poi]
        charges.append(charge_p)
    fitter.apply(*charges)

    p_spe = SPEPlotter()
    p_spe.plot(fitter)
    p_spe.save(output_path)
Ejemplo n.º 23
0
def process(file):

    dl1_paths = file.dl1_paths
    pde = file.pde
    mc_calib_path = file.mc_calib_path
    output_path = file.intensity_resolution_path

    n_runs = len(dl1_paths)
    reader_list = [DL1Reader(p) for p in dl1_paths]
    mapping = reader_list[0].mapping
    n_pixels = reader_list[0].n_pixels
    n_rows = n_pixels * 1000

    with HDF5Reader(mc_calib_path) as reader:
        df = reader.read("data")
        mc_m = df['mc_m'].values

    cr = ChargeResolution(mc_true=True)
    cs = ChargeStatistics()

    desc0 = "Looping over files"
    for reader in tqdm(reader_list, total=n_runs, desc=desc0):
        pixel, charge, true = reader.select_columns(
            ['pixel', 'charge', 'mc_true'], stop=n_rows)
        true_photons = true / pde
        measured = charge / mc_m[pixel]

        f = true > 0
        true_photons = true_photons[f]
        measured = measured[f]

        cr.add(pixel, true_photons, measured)
        cs.add(pixel, true_photons, measured)
        reader.store.close()
    df_cr_pixel, df_cr_camera = cr.finish()
    df_cs_pixel, df_cs_camera = cs.finish()

    with HDF5Writer(output_path) as writer:
        writer.write(
            charge_resolution_pixel=df_cr_pixel,
            charge_resolution_camera=df_cr_camera,
            charge_statistics_pixel=df_cs_pixel,
            charge_statistics_camera=df_cs_camera,
        )
        writer.write_mapping(mapping)
        writer.write_metadata(n_pixels=n_pixels)
Ejemplo n.º 24
0
def apply_custom_function():
    """
    Any function can be passed to the `apply` method, including numpy functions

    You will notice that the numpy std method produces a different result to
    the pandas result. Thats because by default numpy calculates the sample
    standard deviation, whereas pandas includes the Bessel correction by
    default to correct for the bias in the estimation of the
    population variance.
    """
    r = DL1Reader(PATH)
    df = r.load_entire_table()
    df['tm'] = df['pixel'] // 64
    df_pd_std = df[['tm', 'charge']].groupby('tm').std()['charge']
    df_np_std = df[['tm', 'charge']].groupby('tm').apply(np.std)['charge']
    df_comparison = pd.DataFrame(dict(pd=df_pd_std, np=df_np_std))
    print(df_comparison)
Ejemplo n.º 25
0
def process(file):

    dl1_paths = file.dl1_paths
    pde = file.pde
    mc_calib_path = file.mc_calib_path

    with DL1Reader(dl1_paths[0]) as reader:
        n_pixels = reader.n_pixels
        mapping = reader.mapping
        cols = ['pixel', 'charge', 'mc_true']
        pixel, charge, true = reader.select_columns(cols)
        df = pd.DataFrame(dict(
            pixel=pixel,
            charge=charge,
            true=true,
        ))
        df_agg = df.groupby(['pixel', 'true']).agg({'charge': ['mean', 'std']}).reset_index()
        pixels = np.where(df.groupby('pixel').sum()['true'].values > 1000)[0]

    m_array = np.full(n_pixels, np.nan)
    for p in pixels:
        df_p = df_agg.loc[(df_agg['pixel'] == p) & (df_agg['true'] > 0)]
        x = df_p['true'].values / pde
        y = df_p['charge']['mean'].values
        yerr = df_p['charge']['std'].values
        yerr[np.isnan(yerr)] = 1000
        yerr[yerr == 0] = 1000
        c, m = polyfit(x, y, [1], w=y/yerr)
        m_array[p] = m

    df_calib = pd.DataFrame(dict(
        pixel=np.arange(n_pixels),
        mc_m=m_array,
    ))

    df_calib_mean = df_calib.copy()
    df_calib_mean['mc_m'] = np.nanmean(m_array)

    print("Average Gradient = {}".format(np.nanmean(m_array)))

    with HDF5Writer(mc_calib_path) as writer:
        writer.write(data=df_calib)
        writer.write_mapping(mapping)
        writer.write_metadata(n_pixels=n_pixels)
Ejemplo n.º 26
0
def process(file):
    illumination = 50

    r1_path, _, _ = file.get_run_with_illumination(illumination, r1=True)
    dl1_path, _, _ = file.get_run_with_illumination(illumination, r1=False)
    poi = file.poi
    plot_dir = file.waveforms_plot_dir
    ff_path = file.ff_path

    with HDF5Reader(ff_path) as reader:
        df = reader.read("data")
        ff_m = df['ff_m'].values
        ff_c = df['ff_c'].values

    reader = TIOReader(r1_path)
    n_events = reader.n_events
    n_pixels = reader.n_pixels
    n_samples = reader.n_samples

    wfs = np.zeros((n_events, n_pixels, n_samples))
    desc = "Processing events"
    for wf in tqdm(reader, total=n_events, desc=desc):
        wfs[wf.iev] = wf

    reader = DL1Reader(dl1_path)
    iev, pixel, charge_1d = reader.select_columns(['iev', 'pixel', 'charge'])
    charge_1d = (charge_1d - ff_c[pixel]) / ff_m[pixel]
    charge = np.zeros((n_events, n_pixels))
    charge[iev, pixel] = charge_1d

    x = np.arange(n_samples)

    p_wf = WFPlotter(switch_backend=True)
    p_wf.plot(x, wfs[:, poi].T)
    p_wf.save(os.path.join(plot_dir, "p{}.pdf".format(poi)))

    p_wf = WFPlotter()
    p_wf.plot(x, wfs.mean(0).T)
    p_wf.save(get_plot(os.path.join(plot_dir, "average.pdf")))

    p_hist = HistPlotter(switch_backend=True)
    p_hist.plot(charge[:, poi])
    p_hist.save(os.path.join(plot_dir, "hist.pdf"))
Ejemplo n.º 27
0
def datareadin(ieve):
    #path=(b'C:\Users\Jamie Williams\Desktop\New folder\NSB200PE\data_Run030_dl1.h5')
    path = (
        b'C:\Users\Jamie Williams\Desktop\New folder\Old data sets\Run17473_dl1.h5'
    )
    ievcount = 10
    reader = DL1Reader(path)
    reader.load_entire_table()
    charge = reader.select_column('charge').values
    iev = reader.select_column('iev').values
    charge = charge[iev == ievcount]
    #okpixel= okpixel[iev == ievcount]
    m = reader.mapping
    row = m['row'].values
    col = m['col'].values
    n_rows = m.metadata['n_rows']
    n_cols = m.metadata['n_columns']
    size = max(row + 1)
    okpixel = np.ones(len(charge), )
    lencharge = len(charge)
    okpixel[ieve] = 0
    data = np.ma.zeros((n_rows, n_cols))
    data[row, col] = np.multiply(charge, okpixel)
    okpixel2 = np.ma.zeros((n_rows, n_cols))
    okpixel2[row, col] = okpixel
    if size == 480:
        (mu, sigma) = norm.fit(data)  #used to identify dead pixels
        for i1 in range(0, 48):
            for j1 in range(0, 48):
                if (data[i1, j1] < mu - sigma):
                    #if (data[row,col]<mu-10*sigma):   #do it without the loop?
                    okpixel2[i1, j1] = 0
    return {
        'data': data,
        'okpixel2': okpixel2,
        'size': size,
        'lencharge': lencharge
    }
Ejemplo n.º 28
0
def main():
    description = ('Extract and fit the Single-Photoelectron spectrum '
                   'from N dl1 files simultaneously')
    parser = argparse.ArgumentParser(description=description,
                                     formatter_class=Formatter)
    parser.add_argument('-f',
                        '--files',
                        dest='input_paths',
                        nargs='+',
                        help='path to the input dl1 run files')
    parser.add_argument('-o',
                        '--output',
                        dest='output_path',
                        action='store',
                        help='path to store the output HDF5 file '
                        '(OPTIONAL, will be automatically set if '
                        'not specified)')
    parser.add_argument('-s',
                        '--fitter',
                        dest='fitter',
                        action='store',
                        default='GentileFitter',
                        choices=SpectrumFitterFactory.subclass_names,
                        help='SpectrumFitter to use')
    parser.add_argument('-c',
                        '--config',
                        dest='config',
                        action='store',
                        default=None,
                        help='Path to SpectrumFitter configuration YAML file')
    parser.add_argument('-p',
                        '--pixel',
                        dest='plot_pixel',
                        action='store',
                        default=None,
                        type=int,
                        help='Enter plot mode, and plot the spectrum and fit '
                        'for the pixel specified. "-1" speciefies the '
                        'entire camera')
    args = parser.parse_args()

    input_paths = args.input_paths
    output_path = args.output_path
    fitter_str = args.fitter
    config_path = args.config
    plot_pixel = args.plot_pixel

    readers = [DL1Reader(path) for path in input_paths]
    kwargs = dict(product_name=fitter_str,
                  n_illuminations=len(readers),
                  config_path=config_path)
    fitter = SpectrumFitterFactory.produce(**kwargs)

    fit_processor = SpectrumFitProcessor(fitter, *readers)
    if plot_pixel is not None:
        p_fit = SpectrumFitPlotter()
        if plot_pixel == -1:
            charges = fit_processor.charges
            p_fit.plot_from_fitter(fitter, charges)
        else:
            charges = fit_processor.get_pixel_charges(plot_pixel)
            p_fit.plot_from_fitter(fitter, charges)

        if not output_path:
            name = '_spe_fit_p{}.pdf'.format(plot_pixel)
            if len(input_paths) == 1:
                output_path = input_paths[0].replace('_dl1.h5', name)
            else:
                output_dir = os.path.dirname(input_paths[0])
                output_path = os.path.join(output_dir, name)

        p_fit.save(output_path)
        exit()

    # fit_processor.process()
    fit_processor.multiprocess()

    if not output_path:
        if len(input_paths) == 1:
            output_path = input_paths[0].replace('_dl1.h5', '_spe.h5')
        else:
            output_dir = os.path.dirname(input_paths[0])
            output_path = os.path.join(output_dir, "spe.h5")
    output_dir = os.path.dirname(output_path)
    if not os.path.exists(output_dir):
        os.makedirs(output_dir)
        print("Created directory: {}".format(output_dir))
    if os.path.exists(output_path):
        os.remove(output_path)

    print("Created HDFStore file: {}".format(output_path))
    with pd.HDFStore(output_path) as store:
        df_coeff, df_initial, df_array = fit_processor.get_df_result()
        store['coeff_pixel'] = df_coeff
        store['initial_pixel'] = df_initial
        with warnings.catch_warnings():
            warnings.simplefilter('ignore', PerformanceWarning)
            store['array_pixel'] = df_array

        df_coeff, df_initial, df_array = fit_processor.get_df_result_camera()
        store['coeff_camera'] = df_coeff
        store['initial_camera'] = df_initial
        with warnings.catch_warnings():
            warnings.simplefilter('ignore', PerformanceWarning)
            store['array_camera'] = df_array

        metadata = dict(files=input_paths,
                        fitter=fitter.__class__.__name__,
                        n_illuminations=fit_processor.n_readers,
                        n_pixels=fit_processor.n_pixels)
        store['metadata'] = pd.DataFrame()
        store.get_storer('metadata').attrs.metadata = metadata

        store['mapping'] = readers[0].mapping
        mapping_meta = readers[0].mapping.metadata
        store.get_storer('mapping').attrs.metadata = mapping_meta
Ejemplo n.º 29
0
def poisson(k, lamb):
    """poisson pdf, parameter lamb is the fit parameter"""
    return (lamb**k / factorial(k)) * np.exp(-lamb)


def negLogLikelihood(params, data):
    """ the negative log-Likelohood-Function"""
    lnl = -np.sum(np.log(poisson(data, params[0])))
    return lnl


# get poisson deviated random numbers
# data = np.random.poisson(2, 1000)
r = DL1Reader(
    "/Volumes/gct-jason/mc_checs/dynamic_range_firstRun/sim_tel/Run43516_dl1.h5"
)
data = r.select_column('mc_true').values

# minimize the negative log-Likelihood

result = minimize(
    negLogLikelihood,  # function to minimize
    x0=np.ones(1),  # start value
    args=(data, ),  # additional arguments for function
    method='Powell',  # minimization method, see docs
)
# result is a scipy optimize result object, the fit parameters
# are stored in result.x
print(result)
def obtain_readers(files):
    return [DL1Reader(fp) for fp in files]