Ejemplo n.º 1
0
def get_MCtables(filename, config, start_id=0):
    pathname, basename = os.path.split(filename)
    min_x, max_x = config.xlim
    min_y, max_y = config.ylim
    min_z, max_z = config.zlim
    bins_x = np.linspace(min_x, max_x, config.nbins_x)
    bins_y = np.linspace(min_y, max_y, config.nbins_y)
    bins_z = np.linspace(min_z, max_z, config.nbins_z)
    bins = (bins_x, bins_y, bins_z)

    hits = mio.load_mchits_df(filename)
    #select ACTIVE hits
    hits = hits[hits.label == 'ACTIVE']
    particles = mio.load_mcparticles_df(filename)

    if config.classification and config.segmentation:
        hits = utils.add_clf_seg_labels(hits,
                                        particles,
                                        delta_t=config.blob_delta_t,
                                        delta_e=config.blob_delta_e)
    elif config.classification:
        hits = utils.add_clf_labels(hits, particles)
    elif config.segmentation:
        hits = utils.add_seg_labels(hits,
                                    particles,
                                    delta_t=config.blob_delta_t,
                                    delta_e=config.blob_delta_e)
    else:
        hits = hits.reset_index()[['event_id', 'x', 'y', 'z', 'energy']]
    hits = utils.get_bin_indices(hits, bins, Rmax=config.Rmax)
    hits = hits.sort_values('event_id')
    eventInfo = hits[['event_id',
                      'binclass']].drop_duplicates().reset_index(drop=True)
    #create new unique identifier
    dct_map = {
        eventInfo.iloc[i].event_id: i + start_id
        for i in range(len(eventInfo))
    }
    #add dataset_id, pathname and basename to eventInfo
    eventInfo = eventInfo.assign(pathname=pathname,
                                 basename=basename,
                                 dataset_id=eventInfo.event_id.map(dct_map))
    #add dataset_id to hits and drop event_id
    hits = hits.assign(dataset_id=hits.event_id.map(dct_map))
    hits = hits.drop('event_id', axis=1)

    binsInfo = pd.Series({
        'min_x': min_x,
        'max_x': max_x,
        'nbins_x': config.nbins_x,
        'min_y': min_y,
        'max_y': max_y,
        'nbins_y': config.nbins_y,
        'min_z': min_z,
        'max_z': max_z,
        'nbins_z': config.nbins_z,
        'Rmax': config.Rmax
    }).to_frame().T

    return eventInfo, binsInfo, hits
Ejemplo n.º 2
0
def plot_mc_event(event_id: int, fnames: str, event_type: str) -> None:
    """
    Plots the MC information of the event_id.
    """

    # Getting the right file
    fname = get_fname_with_event(event_id, fnames)
    if fname == '':
        print(f"\nEvent id: {event_id} NOT FOUND in input mc files.")
        return
    else:
        print(f"\nEvent id: {event_id}  contained in {fname}\n")

    # Getting the mcParticles and mcHits of the right event
    mcParts = load_mcparticles_df(fname).loc[event_id]
    mcHits = load_mchits_df(fname).loc[event_id]

    # Plotting hits
    fig = plt.figure(figsize=(12, 9))
    ax = fig.add_subplot(111, projection='3d')
    ax.set_title(f"MC event {event_id}")
    ax.set_xlabel('X (mm)')
    ax.set_ylabel('Y (mm)')
    ax.set_zlabel('Z (mm)')
    p = ax.scatter(mcHits.x, mcHits.y, mcHits.z, c=(mcHits.energy / units.keV))
    cb = fig.colorbar(p, ax=ax)

    # Plotting True extrema
    ext1, ext2 = get_true_extrema(mcParts, event_type)
    ax.scatter3D(ext1[0],
                 ext1[1],
                 ext1[2],
                 marker="*",
                 lw=2,
                 s=100,
                 color='black')
    ax.scatter3D(ext2[0],
                 ext2[1],
                 ext2[2],
                 marker="*",
                 lw=2,
                 s=100,
                 color='black')

    cb.set_label('Energy (keV)')
    plt.show()
    return
Ejemplo n.º 3
0
def print_mc_event(event_id: int,
                   ifnames: str,
                   with_hits: bool = False) -> None:
    """Prints the information of the event corresponding to event_id."""

    # Getting the right file
    ifname = get_fname_with_event(event_id, ifnames)
    if ifname == '':
        print(f"\nEvent id: {event_id} NOT FOUND in input mc files.")
        return
    else:
        print(f"\nEvent Id: {event_id}  contained in {ifname}\n")

    # Getting the mcParticles and mcHits of the right event
    mcParts = load_mcparticles_df(ifname).loc[event_id]
    mcHits = load_mchits_df(ifname).loc[event_id]

    print_mc_particles(mcParts, mcHits, with_hits)

    return
Ejemplo n.º 4
0
def run_bb_analysis(detector     : Detector,
                    input_fnames : List[str],
                    output_fname : str,
                    params       : BBAnalysisParams
                   )            -> Tuple[pd.DataFrame,    # Event Counter
                                         pd.DataFrame,    # Event Data
                                         pd.DataFrame,    # Track Data
                                         pd.DataFrame] :  # Voxel Data:

    ### Data to collect
    all_events    = EventList()
    all_tracks    = TrackList()
    all_voxels    = VoxelList()
    event_counter = EventCounter()

    ### Obtaining the fiducial_checker
    fiducial_checker = detector.get_fiducial_checker(params.veto_width)

    ### Looping through all the input files
    verbose_every = 1
    for input_fname in input_fnames:

        # Updating simulated and stored event counters
        configuration_df = pd.read_hdf(input_fname, '/MC/configuration', mode='r')
        event_counter.simulated += \
            int(configuration_df[configuration_df.param_key == 'num_events'].param_value)
        event_counter.stored    += \
            int(configuration_df[configuration_df.param_key == 'saved_events'].param_value)

        # Getting event ids
        event_ids = get_event_numbers_in_file(input_fname)
        print(f'\n*** Processing {input_fname}  ({len(event_ids)} events) ...\n')

        # Getting mc hits & particles
        file_mcHits  = load_mchits_df(input_fname)
        file_mcParts = load_mcparticles_df(input_fname)

        # Looping through all the events in current input file
        for event_id in event_ids:

            # Updating counter of analyzed events
            event_counter.analyzed += 1
            logger.info(f"*** Analyzing event Id: {event_id} ...")

            # Analyze event
            event_data, event_tracks, event_voxels = \
                analyze_bb_event(detector, int(event_id),
                                 params, fiducial_checker,
                                 file_mcParts.loc[event_id, :],
                                 file_mcHits .loc[event_id, :])

            # Storing event data
            all_events.add(event_data)
            all_tracks.add(event_tracks)
            all_voxels.add(event_voxels)

            # Verbosing num analyzed events
            if (not(event_counter.analyzed % verbose_every)):
                print(f'* Num analyzed events: {event_counter.analyzed}')
            if (event_counter.analyzed == (10 * verbose_every)): verbose_every *= 10

    print(f'\n* Total analyzed events: {event_counter.analyzed}')

    # Filling filtered event counters
    event_counter.fill_filter_counters(all_events)

    ### Storing global analysis data
    print(f'\n* Storing results in output file ...\n  {output_fname}\n')
    all_events   .store(output_fname, 'FANAL')
    all_tracks   .store(output_fname, 'FANAL')
    all_voxels   .store(output_fname, 'FANAL')
    event_counter.store(output_fname, 'FANAL')

    ### Ending ...
    print('\n* BB analysis done !!\n')
    print(event_counter)

    return (event_counter.df(),
            all_events   .df(),
            all_tracks   .df(),
            all_voxels   .df())
Ejemplo n.º 5
0
def kr_dst(ifnames,
           sipm_map,
           key_sensor_fibres=100000,
           s1_time=1. * units.mus,
           verbose=False,
           ic=100):
    """Prepares an analysis dst for Krypton, including:

    1. True positions from the MC
    2. Computed positions from Barycenter (around SiPM with max charge) -- after integrating all time bins
    3. S1e from fibers --- MC response
    4. S2e from MC     --- MC response
    5. charge in the SiPMs (max, left, right, up, down)

    """
    def get_file_name(ifname):
        lname = ifname.split('.')
        t1 = ".".join(lname[1:-1])
        t = "".join([lname[0], t1])
        f = f"{t}.csv"
        return f

    def sipm_time_integral():
        grouped_multiple = sipm_response.groupby(['event_id', 'sensor_id'
                                                  ]).agg({'charge': ['sum']})
        grouped_multiple.columns = ['tot_charge']
        return grouped_multiple.reset_index()

    def get_q(evt, ix):
        if ix != NN:

            try:
                q = evt[evt.sensor_id == ix].tot_charge.values[0]
            except IndexError:
                print(
                    f'Warning no charge in SiPM adyacent to qmax, index ={ix}')
                q = 0
        else:
            q = 0
        return q

    def get_pos(vz, vq):
        return np.dot(vz, vq) / np.sum(vq)

    def get_krdf():

        sipmdf = sipm_time_integral()

        if verbose:
            print(sipmdf)

        krdf = get_evt_true_positions_and_energy(mcParts)
        krdf['S1e'] = fibers_response[fibers_response.time < s1_time].groupby(
            'event_id').charge.sum()
        krdf['S2e'] = fibers_response[fibers_response.time > s1_time].groupby(
            'event_id').charge.sum()

        if verbose:
            print(krdf)

        xMax = []
        xPos = []
        yMax = []
        yPos = []

        qMax = []
        qL = []
        qR = []
        qU = []
        qD = []

        if verbose:
            print(krdf.index)

        ii = 0
        for i in krdf.index:
            if ii % ic == 0:
                print(f' event = {ii} event number = {i}')

            ii += 1

            evt = sipmdf[sipmdf.event_id == i]
            qmax = evt.tot_charge.max()
            iqmax = evt[evt.tot_charge == qmax].sensor_id.values[0]

            qmaxdf = sipm_map[sipm_map.sensor_id == iqmax]
            xqmax, yqmax = qmaxdf.x.values[0], qmaxdf.y.values[0]
            xl, xr = qmaxdf.xl.values[0], qmaxdf.xr.values[0]
            yu, yd = qmaxdf.yu.values[0], qmaxdf.yd.values[0]

            ql = get_q(evt, qmaxdf.id_xl.values[0])
            qr = get_q(evt, qmaxdf.id_xr.values[0])
            qu = get_q(evt, qmaxdf.id_yu.values[0])
            qd = get_q(evt, qmaxdf.id_yd.values[0])

            xp = get_pos(np.array([xqmax, xl, xr]), np.array([qmax, ql, qr]))
            yp = get_pos(np.array([yqmax, yu, yd]), np.array([qmax, qu, qd]))

            xMax.append(xqmax)
            xPos.append(xp)
            yMax.append(yqmax)
            yPos.append(yp)
            qMax.append(qmax)
            qL.append(ql)
            qR.append(qr)
            qU.append(qu)
            qD.append(qd)

        krdf['xmax'] = xMax
        krdf['ymax'] = yMax
        krdf['xpos'] = xPos
        krdf['ypos'] = yPos
        krdf['qmax'] = qMax
        krdf['ql'] = qL
        krdf['qr'] = qR
        krdf['qu'] = qU
        krdf['qd'] = qD

        return krdf, ii

    # Glue files

    GF = []
    BF = []
    with warnings.catch_warnings(record=True) as w:
        warnings.simplefilter("always")
        for ifname in ifnames:
            print(f'reading file {ifname}')

            try:
                mcParts = load_mcparticles_df(ifname)
            except:
                print(f'Failed reading mcparticles ={ifname}')
                BF.append(ifname)
                continue

            try:
                sns_response = load_mcsensor_response_df(ifname)
            except:
                print(f'Failed reading sns_response ={ifname}')
                continue

            GF.append(ifname)
            fibers_response = sns_response[sns_response.index.get_level_values(
                "sensor_id") >= key_sensor_fibres]
            sipm_response = sns_response[sns_response.index.get_level_values(
                "sensor_id") < key_sensor_fibres]

            krdf, nof = get_krdf()
            file = get_file_name(ifname)
            print(f'saving file {file}, with {nof} events')
            krdf.to_csv(file)

    return GF, BF