示例#1
0
def get_skel_extremes(vox_size, energy_type, strict_vox_size, hitc):

    voxels = plf.voxelize_hits(hitc.hits, vox_size, strict_vox_size,
                               energy_type)
    tracks = plf.make_track_graphs(voxels)

    df = pd.DataFrame(columns=[
        'event', 'trackID', 'energy', 'skel_extr1_x', 'skel_extr1_y',
        'skel_extr1_z', 'skel_extr2_x', 'skel_extr2_y', 'skel_extr2_z'
    ])

    if (len(voxels) == 0):
        return df

    vox_size_x = voxels[0].size[0]
    vox_size_y = voxels[0].size[1]
    vox_size_z = voxels[0].size[2]

    def get_track_energy(track):
        return sum([vox.Ehits for vox in track.nodes()])

    #sort tracks in energy
    tracks = sorted(tracks, key=get_track_energy, reverse=True)

    track_hits = []

    for c, t in enumerate(tracks, 0):
        tID = c
        energy = get_track_energy(t)

        extr1, extr2 = plf.find_extrema(t)
        extr1_pos = extr1.XYZ
        extr2_pos = extr2.XYZ

        list_of_vars = [
            hitc.event, tID, energy, extr1_pos[0], extr1_pos[1], extr1_pos[2],
            extr2_pos[0], extr2_pos[1], extr2_pos[2]
        ]

        df.loc[c] = list_of_vars
        try:
            types_dict
        except NameError:
            types_dict = dict(zip(df.columns, [type(x) for x in list_of_vars]))

        #change dtype of columns to match type of variables
        df = df.apply(lambda x: x.astype(types_dict[x.name]))

    return df
示例#2
0
                # Creating the smHits with the smeared energies and translated positions
                active_smHits = []
                for i in range(num_hits):
                    smHit = MCHit(hits_transPositions[i],
                                  active_mcHits[i].time, hits_smE[i], 'ACTIVE')
                    active_smHits.append(smHit)

                # Filtering hits outside the ACTIVE region (due to translation)
                active_smHits = [
                    hit for hit in active_smHits if hit.Z < ACTIVE_ZMAX
                ]

                # Voxelizing using the active_smHits ...
                event_voxels = voxelize_hits(active_smHits,
                                             voxel_size,
                                             strict_voxel_size=True)
                eff_voxel_size = event_voxels[0].size

                # Storing voxels info
                for voxel in event_voxels:
                    extend_voxels_reco_data(voxels_dict, event_number, voxel)

                # Check fiduciality
                voxels_minZ, voxels_maxZ, voxels_maxRad, veto_energy, fiducial_filter = \
                 check_event_fiduciality(event_voxels, FID_minZ, FID_maxZ, FID_maxRAD,
                       MIN_VETO_ENERGY)

                # Storing data of NON smE_filter vents
                extend_events_reco_data(events_dict,
                                        event_number,
示例#3
0
with tb.open_file(the_file) as h5in:
    table = getattr(getattr(h5in.root, 'CHITS'), 'highTh').read()
    hits_df = pd.DataFrame.from_records(table)
    this_evt_df = hits_df[hits_df.event == evt_number]
    the_hits = []

    xs = this_evt_df.X
    ys = this_evt_df.Y
    zs = this_evt_df.Z
    es = this_evt_df.Ec

    for x, y, z, e in zip(xs, ys, zs, es):
        h = Hit(0, Cluster(0, xy(x, y), xy(0, 0), 0), z, e * 1000, xy(0, 0))
        the_hits.append(h)

voxels = voxelize_hits(the_hits, np.array([base_vsize, base_vsize,
                                           base_vsize]), False)

vsizex = voxels[0].size[0]
vsizey = voxels[0].size[1]
vsizez = voxels[0].size[2]

min_corner_x = min(v.X for v in voxels) - vsizex / 2.
min_corner_y = min(v.Y for v in voxels) - vsizey / 2.
min_corner_z = min(v.Z for v in voxels) - vsizez / 2.

x = [np.round(v.X / vsizex) for v in voxels]
y = [np.round(v.Y / vsizey) for v in voxels]
z = [np.round(v.Z / vsizez) for v in voxels]
e = [v.E for v in voxels]

x_min = int(min(x))
示例#4
0
    table = getattr(getattr(h5in.root, 'RECO'), 'Events').read()
    hits_df = pd.DataFrame.from_records(table)
    this_evt_df = hits_df[hits_df.event == evt_number]
    the_hits = []
    ## exclude NN hits from plot
    xs = this_evt_df[this_evt_df.Q >= 0].X
    ys = this_evt_df[this_evt_df.Q >= 0].Y
    zs = this_evt_df[this_evt_df.Q >= 0].Z
    es = this_evt_df[this_evt_df.Q >= 0].E

    for x, y, z, e in zip(xs, ys, zs, es):
        h = Hit(0, Cluster(0, xy(x, y), xy(0, 0), 0), z * drift_velocity, e,
                xy(0, 0))
        the_hits.append(h)

voxels = voxelize_hits(the_hits, np.array([10., 10., 10]), False)

fig = plt.figure()  #figsize=(20, 10))
ax = fig.add_subplot(111, projection='3d')
#ax = fig.gca(projection='3d')
#ax.set_aspect("equal")

energies = [v.E for v in voxels]
energies = np.array(energies)
min_energy = energies.min()
max_energy = energies.max()
print('Minimum energy = {}, maximum energy = {}'.format(
    min_energy, max_energy))

max_x = max_y = max_z = -1.e6
min_x = min_y = min_z = 1.e6
from invisible_cities.reco import paolina_functions as plf

df = pd.read_csv("data_1000keV_local.csv",
                 usecols=["event_number", "x", "y", "z"])

mask = df.event_number < 20
df = df[mask]

trk_lengths = []
for i in range(df.event_number.max() + 1):
    #df_i = df[df.event_number == i]
    hit_list = [
        BHit(row.x, row.y, row.z, 1.)
        for indx, row in df[df.event_number == i].iterrows()
    ]
    voxels = plf.voxelize_hits(hit_list, np.array((1., 1., 1.)))
    tracks = plf.make_track_graphs(voxels)

    all_lengths = []
    for j in np.arange(len(tracks)):
        all_lengths.append(plf.length(tracks[j]))

    trk_lengths.append(max(all_lengths))

plt.figure(0)
plt.hist(trk_lengths, range=(0, 200), bins=70)
plt.xlabel("Track length (mm)")
plt.savefig(
    "/data5/users/miryam/temp/voxel/img/voxel_track_length_1000keV_local_20events.pdf"
)
plt.close(0)
示例#6
0
def create_tracks_with_skel_extremes(vox_size, energy_type, strict_vox_size,
                                     blob_radius, df_extr, hitc):
    voxels = plf.voxelize_hits(hitc.hits, vox_size, strict_vox_size,
                               energy_type)
    tracks = plf.make_track_graphs(voxels)

    df = pd.DataFrame(columns=[
        'event', 'trackID', 'energy', 'length', 'numb_of_voxels',
        'numb_of_hits', 'numb_of_tracks', 'x_min', 'y_min', 'z_min', 'x_max',
        'y_max', 'z_max', 'r_max', 'x_ave', 'y_ave', 'z_ave', 'extreme1_x',
        'extreme1_y', 'extreme1_z', 'extreme2_x', 'extreme2_y', 'extreme2_z',
        'blob1_x', 'blob1_y', 'blob1_z', 'blob2_x', 'blob2_y', 'blob2_z',
        'eblob1', 'eblob2', 'ovlp_blob_energy', 'vox_size_x', 'vox_size_y',
        'vox_size_z'
    ])

    if (len(voxels) == 0):
        return df

    vox_size_x = voxels[0].size[0]
    vox_size_y = voxels[0].size[1]
    vox_size_z = voxels[0].size[2]

    def get_track_energy(track):
        return sum([vox.Ehits for vox in track.nodes()])

    #sort tracks in energy
    tracks = sorted(tracks, key=get_track_energy, reverse=True)

    track_hits = []

    for c, t in enumerate(tracks, 0):
        tID = c
        energy = get_track_energy(t)
        length = plf.length(t)
        numb_of_hits = len([h for vox in t.nodes() for h in vox.hits])
        numb_of_voxels = len(t.nodes())
        numb_of_tracks = len(tracks)

        min_x = min([h.X for v in t.nodes() for h in v.hits])
        max_x = max([h.X for v in t.nodes() for h in v.hits])
        min_y = min([h.Y for v in t.nodes() for h in v.hits])
        max_y = max([h.Y for v in t.nodes() for h in v.hits])
        min_z = min([h.Z for v in t.nodes() for h in v.hits])
        max_z = max([h.Z for v in t.nodes() for h in v.hits])
        max_r = max([
            np.sqrt(h.X * h.X + h.Y * h.Y) for v in t.nodes() for h in v.hits
        ])

        pos = [h.pos for v in t.nodes() for h in v.hits]
        e = [getattr(h, energy_type.value) for v in t.nodes() for h in v.hits]
        ave_pos = np.average(pos, weights=e, axis=0)

        # classic paolina extremes
        extr1, extr2 = plf.find_extrema(t)
        extr1_pos = extr1.XYZ
        extr2_pos = extr2.XYZ

        t_extr = df_extr[df_extr.trackID == tID]
        if len(t_extr) == 0:
            blob_pos1 = np.array([1.e6, 1.e6, 1.e6])
            blob_pos2 = np.array([1.e6, 1.e6, 1.e6])
            eblob1 = -1
            eblob2 = -1
            overlap = -1
        else:
            blob_pos1 = np.array([
                t_extr.skel_extr1_x.values[0], t_extr.skel_extr1_y.values[0],
                t_extr.skel_extr1_z.values[0]
            ])
            blob_pos2 = np.array([
                t_extr.skel_extr2_x.values[0], t_extr.skel_extr2_y.values[0],
                t_extr.skel_extr2_z.values[0]
            ])
            e_blob1, e_blob2, hits_blob1, hits_blob2, blob_pos1, blob_pos2 = blob_energies_hits(
                t, blob_radius, blob_pos1, blob_pos2)
            overlap = sum(
                [h.E for h in set(hits_blob1).intersection(set(hits_blob2))])

        list_of_vars = [
            hitc.event, tID, energy, length, numb_of_voxels, numb_of_hits,
            numb_of_tracks, min_x, min_y, min_z, max_x, max_y, max_z, max_r,
            ave_pos[0], ave_pos[1], ave_pos[2], extr1_pos[0], extr1_pos[1],
            extr1_pos[2], extr2_pos[0], extr2_pos[1], extr2_pos[2],
            blob_pos1[0], blob_pos1[1], blob_pos1[2], blob_pos2[0],
            blob_pos2[1], blob_pos2[2], e_blob1, e_blob2, overlap, vox_size_x,
            vox_size_y, vox_size_z
        ]
        df.loc[c] = list_of_vars
        try:
            types_dict
        except NameError:
            types_dict = dict(zip(df.columns, [type(x) for x in list_of_vars]))

    #change dtype of columns to match type of variables
    df = df.apply(lambda x: x.astype(types_dict[x.name]))

    return df
示例#7
0
def fanal_reco(det_name,    # Detector name: 'new', 'next100', 'next500'
               event_type,  # Event type: 'bb0nu', 'Tl208', 'Bi214'
               fwhm,        # FWHM at Qbb
               e_min,       # Minimum smeared energy for energy filtering
               e_max,       # Maximum smeared energy for energy filtering
               voxel_size,  # Voxel size (x, y, z)
               voxel_Eth,   # Voxel energy threshold
               veto_width,  # Veto width for fiducial filtering
               min_veto_e,  # Minimum energy in veto for fiducial filtering
               files_in,    # Input files
               event_range, # Range of events to analyze: all, ... ??
               file_out,    # Output file
               compression, # Compression of output file: 'ZLIB1', 'ZLIB4',
                            # 'ZLIB5', 'ZLIB9', 'BLOSC5', 'BLZ4HC5'
               verbosity_level):


    ### LOGGER
    logger = get_logger('FanalReco', verbosity_level)


    ### DETECTOR NAME & its ACTIVE dimensions
    det_name = getattr(DetName, det_name)
    ACTIVE_dimensions = get_active_size(det_name)
    fid_dimensions    = get_fiducial_size(det_name, veto_width)


    ### RECONSTRUCTION DATA
    # Smearing energy settings
    fwhm_Qbb  = fwhm * Qbb
    sigma_Qbb = fwhm_Qbb / 2.355
    assert e_max > e_min, 'SmE_filter settings not valid. e_max must be higher than e_min.'


    ### PRINTING GENERAL INFO
    print('\n***********************************************************************************')
    print('***** Detector: {}'.format(det_name.name))
    print('***** Reconstructing {} events'.format(event_type))
    print('***** Energy Resolution: {:.2f}% fwhm at Qbb'.format(fwhm / units.perCent))
    print('***** Voxel Size: ({}, {}, {}) mm'.format(voxel_size[0] / units.mm,
                                                     voxel_size[1] / units.mm,
                                                     voxel_size[2] / units.mm))
    print('***********************************************************************************\n')

    print('* Sigma at Qbb: {:.3f} keV.\n'.format(sigma_Qbb / units.keV))

    print('* Voxel_size: ({}, {}, {}) mm'.format(voxel_size[0] / units.mm,
                                                 voxel_size[1] / units.mm,
                                                 voxel_size[2] / units.mm))
    print('  Voxel Eth:  {:4.1f} keV\n'.format(voxel_Eth/units.keV))

    print('* Detector-Active dimensions [mm]:  Zmin: {:7.1f}   Zmax: {:7.1f}   Rmax: {:7.1f}'
          .format(ACTIVE_dimensions.z_min, ACTIVE_dimensions.z_max,
                  ACTIVE_dimensions.rad))
    print('         ... fiducial limits [mm]:  Zmin: {:7.1f}   Zmax: {:7.1f}   Rmax: {:7.1f}\n'
          .format(fid_dimensions.z_min, fid_dimensions.z_max, fid_dimensions.rad))

    print('* {0} {1} input files:'.format(len(files_in), event_type))
    for iFileName in files_in:
        print(' ', iFileName)


    ### OUTPUT FILE, ITS GROUPS & ATTRIBUTES
    # Output file
    oFile = tb.open_file(file_out, 'w', filters = tbl_filters(compression))

    # Reco group Name
    reco_group_name = get_reco_group_name(fwhm/units.perCent, voxel_size)
    oFile.create_group('/', 'FANALIC')
    oFile.create_group('/FANALIC', reco_group_name[9:])

    print('\n* Output file name:', file_out)
    print('  Reco group name:  {}\n'.format(reco_group_name))

    # Attributes
    oFile.set_node_attr(reco_group_name, 'input_sim_files',           files_in)
    oFile.set_node_attr(reco_group_name, 'event_type',                event_type)
    oFile.set_node_attr(reco_group_name, 'energy_resolution',         fwhm/units.perCent)
    oFile.set_node_attr(reco_group_name, 'voxel_sizeX',               voxel_size[0])
    oFile.set_node_attr(reco_group_name, 'voxel_sizeY',               voxel_size[1])
    oFile.set_node_attr(reco_group_name, 'voxel_sizeZ',               voxel_size[2])
    oFile.set_node_attr(reco_group_name, 'voxel_Eth',                 voxel_Eth)
    oFile.set_node_attr(reco_group_name, 'smE_filter_Emin',           e_min)
    oFile.set_node_attr(reco_group_name, 'smE_filter_Emax',           e_max)
    oFile.set_node_attr(reco_group_name, 'fiducial_filter_VetoWidth', veto_width)
    oFile.set_node_attr(reco_group_name, 'fiducial_filter_MinVetoE',  min_veto_e)


    ### DATA TO STORE
    # Event counters
    simulated_events = 0
    stored_events    = 0
    analyzed_events  = 0
    toUpdate_events  = 1

    # Dictionaries for events & voxels data
    events_dict = get_events_reco_dict()
    voxels_dict = get_voxels_reco_dict()


    ### RECONSTRUCTION PROCEDURE
    # Looping through all the input files
    for iFileName in files_in:
        # Updating simulated and stored event counters
        configuration_df  = pd.read_hdf(iFileName, '/MC/configuration', mode='r')
        simulated_events += int(configuration_df[configuration_df.param_key == 'num_events'].param_value)
        stored_events    += int(configuration_df[configuration_df.param_key == 'saved_events'].param_value)

        # Getting event numbers
        file_extents = pd.read_hdf(iFileName, '/MC/extents', mode='r')
        file_event_numbers = file_extents.evt_number

        print('* Processing {0}  ({1} events) ...'.format(iFileName, len(file_event_numbers)))

        # Getting mc hits
        file_mcHits = load_mc_hits(iFileName)

        # Looping through all the events in iFile
        for event_number in file_event_numbers:

            # Updating counter of analyzed events
            analyzed_events += 1
            logger.info('Reconstructing event Id: {0} ...'.format(event_number))

            # Getting event data
            event_data = get_event_reco_data()
            event_data['event_id'] = event_number
            
            event_mcHits  = file_mcHits.loc[event_number, :]
            active_mcHits = event_mcHits[event_mcHits.label == 'ACTIVE'].copy()

            event_data['num_MCparts'] = get_num_mc_particles(file_extents, event_number)
            event_data['num_MChits']  = len(active_mcHits)
            
            # The event mc energy is the sum of the energy of all the hits except
            # for Bi214 events, in which the number of S1 in the event is considered
            if (event_type == 'Bi214'):
                event_data['mcE'] = get_mc_energy(active_mcHits)
            else:
                event_data['mcE'] = active_mcHits.E.sum()
                
            # Smearing the event energy
            event_data['smE'] = smear_evt_energy(event_data['mcE'], sigma_Qbb, Qbb)

            # Applying the smE filter
            event_data['smE_filter'] = (e_min <= event_data['smE'] <= e_max)

            # Verbosing
            logger.info('  Num mcHits: {0:3}   mcE: {1:.1f} keV   smE: {2:.1f} keV   smE_filter: {3}' \
                        .format(event_data['num_MChits'], event_data['mcE']/units.keV,
                                event_data['smE']/units.keV, event_data['smE_filter']))
                
            # For those events passing the smE filter:
            if event_data['smE_filter']:

                # Smearing hit energies
                smearing_factor = event_data['smE'] / event_data['mcE']
                active_mcHits['smE'] = active_mcHits['E'] * smearing_factor

                # Translating hit Z positions from delayed hits
                translate_hit_positions(det_name, active_mcHits, DRIFT_VELOCITY)

                # Creating the IChits with the smeared energies and translated Z positions
                # to be passed to paolina functions
                #IChits = []
                #for i, hit in active_mcHits[active_mcHits.shifted_z < ACTIVE_dimensions.z_max].iterrows():
                #    IChit = MCHit((hit.x, hit.y, hit.shifted_z), hit.time, hit.smE, 'ACTIVE')
                #    IChits.append(IChit)
                IChits = active_mcHits[(active_mcHits.shifted_z < ACTIVE_dimensions.z_max) &
                                       (active_mcHits.shifted_z > ACTIVE_dimensions.z_min)] \
                    .apply(lambda hit: MCHit((hit.x, hit.y, hit.shifted_z),
                                             hit.time, hit.smE, 'ACTIVE'), axis=1).tolist()

                # Voxelizing using the IChits ...
                event_voxels = voxelize_hits(IChits, voxel_size, strict_voxel_size=False)
                event_data['num_voxels'] = len(event_voxels)

                eff_voxel_size = event_voxels[0].size
                event_data['voxel_sizeX'] = eff_voxel_size[0]
                event_data['voxel_sizeY'] = eff_voxel_size[1]
                event_data['voxel_sizeZ'] = eff_voxel_size[2]
    
                # Storing voxels info
                for voxel_id in range(len(event_voxels)):
                    extend_voxels_reco_dict(voxels_dict, event_number, voxel_id,
                                            event_voxels[voxel_id], voxel_Eth)
                    
                # Check fiduciality
                event_data['voxels_minZ'], event_data['voxels_maxZ'], \
                event_data['voxels_maxRad'], event_data['veto_energy'], \
                event_data['fid_filter'] = \
                check_event_fiduciality(det_name, veto_width, min_veto_e, event_voxels)
                   
                # Verbosing
                logger.info('  NumVoxels: {:3}   minZ: {:.1f} mm   maxZ: {:.1f} mm   maxR: {:.1f} mm   veto_E: {:.1f} keV   fid_filter: {}' \
                            .format(event_data['num_voxels'], event_data['voxels_minZ'],
                                    event_data['voxels_maxZ'], event_data['voxels_maxRad'],
                                    event_data['veto_energy'] / units.keV,
                                    event_data['fid_filter']))
                
                for voxel in event_voxels:
                    logger.debug('    Voxel pos: ({:5.1f}, {:5.1f}, {:5.1f}) mm   E: {:5.1f} keV'\
                                 .format(voxel.X/units.mm, voxel.Y/units.mm,
                                         voxel.Z/units.mm, voxel.E/units.keV))

            # Storing event_data
            extend_events_reco_dict(events_dict, event_data)

            # Verbosing
            if (not(analyzed_events % toUpdate_events)):
                print('* Num analyzed events: {}'.format(analyzed_events))
            if (analyzed_events == (10 * toUpdate_events)): toUpdate_events *= 10
            

    ### STORING RECONSTRUCTION DATA
    # Storing events and voxels dataframes
    print('\n* Storing data in the output file: {}'.format(file_out))
    store_events_reco_dict(file_out, reco_group_name, events_dict)
    store_voxels_reco_dict(file_out, reco_group_name, voxels_dict)

    # Storing event counters as attributes
    smE_filter_events = sum(events_dict['smE_filter'])
    fid_filter_events = sum(events_dict['fid_filter'])
    store_events_reco_counters(oFile, reco_group_name, simulated_events,
                               stored_events, smE_filter_events, fid_filter_events)

    oFile.close()
    print('* Reconstruction done !!\n')

    # Printing reconstruction numbers
    print('* Event counters ...')
    print('''  Simulated events:  {0:9}
  Stored events:     {1:9}
  smE_filter events: {2:9}
  fid_filter events: {3:9}\n'''
        .format(simulated_events, stored_events, smE_filter_events, fid_filter_events))
示例#8
0
            reco_["hit_geocorr"] = XYcorr(hh.X,hh.Y).value
            reco_["hit_ltcorr"] = LTcorr(hh.Z,hh.X,hh.Y).value**(ftlife)
            
            reco_.append()

A_evtnum = np.array(A_evtnum)

# Run Paolina for many events: note we now assume that all events have >= 2 hits.
A_eblob1 = []; A_eblob2 = []; A_emtrk = []; A_ntrks = []; A_lmtrk = []; A_nvox = []
for nevt in range(len(hitc_cevt)):
    
    hitc = hitc_cevt[nevt]
    print("Track {0} with {1} hits...".format(nevt,len(hitc)))

    # Make the tracks.
    voxels = plf.voxelize_hits(hitc,vox_size)
    trks = plf.make_track_graphs(voxels)
    l_etrks_all = []
    for t in trks:
        if(len(t.nodes()) < 1):
            etrk = 0
        else:
            etrk = sum([vox.E for vox in t.nodes()])
        l_etrks_all.append(etrk)
    
    Eblob1 = -1; Eblob2 = -1; etmax = -1; ltrk = -1
    if(len(l_etrks_all) > 0):
        
        # "max" track is the one with the most energy.
        itmax = np.argmax(l_etrks_all) 
        etmax = sum([vox.E for vox in trks[itmax].nodes()])
示例#9
0
    def create_extract_track_blob_info(hitc):
        df = pd.DataFrame(columns=list(types_dict_tracks.keys()))
        if len(hitc.hits) > max_num_hits:
            return df, hitc, True
        #track_hits is a new Hitcollection object that contains hits belonging to tracks, and hits that couldnt be corrected
        track_hitc = evm.HitCollection(hitc.event, hitc.time)
        out_of_map = np.any(np.isnan([h.Ep for h in hitc.hits]))
        if out_of_map:
            #add nan hits to track_hits, the track_id will be -1
            track_hitc.hits.extend([h for h in hitc.hits if np.isnan(h.Ep)])
            hits_without_nan = [h for h in hitc.hits if np.isfinite(h.Ep)]
            #create new Hitcollection object but keep the name hitc
            hitc = evm.HitCollection(hitc.event, hitc.time)
            hitc.hits = hits_without_nan

        if len(hitc.hits) > 0:
            voxels = plf.voxelize_hits(hitc.hits, vox_size, strict_vox_size,
                                       evm.HitEnergy.Ep)
            (mod_voxels, dropped_voxels) = plf.drop_end_point_voxels(
                voxels, energy_threshold, min_voxels)
            tracks = plf.make_track_graphs(mod_voxels)

            for v in dropped_voxels:
                track_hitc.hits.extend(v.hits)

            vox_size_x = voxels[0].size[0]
            vox_size_y = voxels[0].size[1]
            vox_size_z = voxels[0].size[2]
            del (voxels)
            #sort tracks in energy
            tracks = sorted(tracks, key=plf.get_track_energy, reverse=True)

            track_hits = []
            for c, t in enumerate(tracks, 0):
                tID = c
                energy = plf.get_track_energy(t)
                length = plf.length(t)
                numb_of_hits = len([h for vox in t.nodes() for h in vox.hits])
                numb_of_voxels = len(t.nodes())
                numb_of_tracks = len(tracks)
                pos = [h.pos for v in t.nodes() for h in v.hits]
                x, y, z = map(np.array, zip(*pos))
                r = np.sqrt(x**2 + y**2)

                e = [h.Ep for v in t.nodes() for h in v.hits]
                ave_pos = np.average(pos, weights=e, axis=0)
                ave_r = np.average(r, weights=e, axis=0)
                extr1, extr2 = plf.find_extrema(t)
                extr1_pos = extr1.XYZ
                extr2_pos = extr2.XYZ

                blob_pos1, blob_pos2 = plf.blob_centres(t, blob_radius)

                e_blob1, e_blob2, hits_blob1, hits_blob2 = plf.blob_energies_and_hits(
                    t, blob_radius)
                overlap = float(
                    sum(h.Ep for h in set(hits_blob1).intersection(
                        set(hits_blob2))))
                list_of_vars = [
                    hitc.event, tID, energy, length, numb_of_voxels,
                    numb_of_hits, numb_of_tracks,
                    min(x),
                    min(y),
                    min(z),
                    min(r),
                    max(x),
                    max(y),
                    max(z),
                    max(r), *ave_pos, ave_r, *extr1_pos, *extr2_pos,
                    *blob_pos1, *blob_pos2, e_blob1, e_blob2, overlap,
                    vox_size_x, vox_size_y, vox_size_z
                ]

                df.loc[c] = list_of_vars

                for vox in t.nodes():
                    for hit in vox.hits:
                        hit.track_id = tID
                        track_hits.append(hit)

            #change dtype of columns to match type of variables
            df = df.apply(lambda x: x.astype(types_dict_tracks[x.name]))
            track_hitc.hits.extend(track_hits)
        return df, mod_voxels, track_hitc, out_of_map
示例#10
0
    def create_extract_track_blob_info(hitc):
        voxels = plf.voxelize_hits(hitc.hits, vox_size, strict_vox_size,
                                   energy_type)
        tracks = plf.make_track_graphs(voxels)

        df = pd.DataFrame(columns=[
            'event', 'trackID', 'energy', 'length', 'numb_of_voxels',
            'numb_of_hits', 'numb_of_tracks', 'x_min', 'y_min', 'z_min',
            'x_max', 'y_max', 'z_max', 'r_max', 'x_ave', 'y_ave', 'z_ave',
            'extreme1_x', 'extreme1_y', 'extreme1_z', 'extreme2_x',
            'extreme2_y', 'extreme2_z', 'blob1_x', 'blob1_y', 'blob1_z',
            'blob2_x', 'blob2_y', 'blob2_z', 'eblob1', 'eblob2',
            'ovlp_blob_energy', 'vox_size_x', 'vox_size_y', 'vox_size_z'
        ])

        if (len(voxels) == 0):
            return df, None

        vox_size_x = voxels[0].size[0]
        vox_size_y = voxels[0].size[1]
        vox_size_z = voxels[0].size[2]

        def get_track_energy(track):
            return sum([vox.Ehits for vox in track.nodes()])

        #sort tracks in energy
        tracks = sorted(tracks, key=get_track_energy, reverse=True)

        track_hits = []

        for c, t in enumerate(tracks, 0):
            tID = c
            energy = get_track_energy(t)
            length = plf.length(t)
            numb_of_hits = len([h for vox in t.nodes() for h in vox.hits])
            numb_of_voxels = len(t.nodes())
            numb_of_tracks = len(tracks)

            min_x = min([h.X for v in t.nodes() for h in v.hits])
            max_x = max([h.X for v in t.nodes() for h in v.hits])
            min_y = min([h.Y for v in t.nodes() for h in v.hits])
            max_y = max([h.Y for v in t.nodes() for h in v.hits])
            min_z = min([h.Z for v in t.nodes() for h in v.hits])
            max_z = max([h.Z for v in t.nodes() for h in v.hits])
            max_r = max([
                np.sqrt(h.X * h.X + h.Y * h.Y) for v in t.nodes()
                for h in v.hits
            ])

            pos = [h.pos for v in t.nodes() for h in v.hits]
            e = [
                getattr(h, energy_type.value) for v in t.nodes()
                for h in v.hits
            ]
            ave_pos = np.average(pos, weights=e, axis=0)

            extr1, extr2 = plf.find_extrema(t)
            extr1_pos = extr1.XYZ
            extr2_pos = extr2.XYZ

            blob_pos1, blob_pos2 = plf.blob_centres(t, blob_radius)

            e_blob1, e_blob2, hits_blob1, hits_blob2 = plf.blob_energies_and_hits(
                t, blob_radius)
            overlap = False
            overlap = sum(
                [h.E for h in set(hits_blob1).intersection(hits_blob2)])
            list_of_vars = [
                hitc.event, tID, energy, length, numb_of_voxels, numb_of_hits,
                numb_of_tracks, min_x, min_y, min_z, max_x, max_y, max_z,
                max_r, ave_pos[0], ave_pos[1], ave_pos[2], extr1_pos[0],
                extr1_pos[1], extr1_pos[2], extr2_pos[0], extr2_pos[1],
                extr2_pos[2], blob_pos1[0], blob_pos1[1], blob_pos1[2],
                blob_pos2[0], blob_pos2[1], blob_pos2[2], e_blob1, e_blob2,
                overlap, vox_size_x, vox_size_y, vox_size_z
            ]

            df.loc[c] = list_of_vars
            try:
                types_dict
            except NameError:
                types_dict = dict(
                    zip(df.columns, [type(x) for x in list_of_vars]))

            for vox in t.nodes():
                for hit in vox.hits:
                    hit.track_id = tID
                    track_hits.append(hit)

        track_hitc = evm.HitCollection(hitc.event, hitc.time)
        track_hitc.hits = track_hits
        #change dtype of columns to match type of variables
        df = df.apply(lambda x: x.astype(types_dict[x.name]))

        return df, track_hitc
示例#11
0
bad_evt = False
corr_hits = []

for hh in good_hits[evt_number].hits:
    if XYcorrection(hh.X, hh.Y).value == 0:
        bad_evt = True
        break
    e_corr = hh.E * LTcorrection(hh.Z, hh.X, hh.Y).value * XYcorrection(
        hh.X, hh.Y).value
    z_corr = hh.Z * drift_velocity
    hcorr = Hit(0, Cluster(0, xy(hh.X, hh.Y), xy(0, 0), 0), z_corr, e_corr,
                xy(0, 0))
    corr_hits.append(hcorr)

voxels = voxelize_hits(corr_hits, np.array([vxl_size, vxl_size, vxl_size]),
                       False)

fig = plt.figure()  #figsize=(20, 10))
ax = fig.add_subplot(111, projection='3d')
#ax = fig.gca(projection='3d')
#ax.set_aspect("equal")

energies = [v.E for v in voxels]
energies = np.array(energies)
min_energy = energies.min()
max_energy = energies.max()
print('Minimum energy = {}, maximum energy = {}'.format(
    min_energy, max_energy))

max_x = max_y = max_z = -1.e6
min_x = min_y = min_z = 1.e6
示例#12
0
def fanal_reco(
    det_name,  # Detector name: 'new', 'next100', 'next500'
    event_type,  # Event type: 'bb0nu', 'Tl208', 'Bi214'
    fwhm,  # FWHM at Qbb
    e_min,  # Minimum smeared energy for energy filtering
    e_max,  # Maximum smeared energy for energy filtering
    spatial_def,  # Spatial definition: 'low', 'high'
    veto_width,  # Veto width for fiducial filtering
    min_veto_e,  # Minimum energy in veto for fiducial filtering
    files_in,  # Input files
    event_range,  # Range of events to analyze: all, ... ??
    file_out,  # Output file
    compression,  # Compression of output file: 'ZLIB1', 'ZLIB4',
    # 'ZLIB5', 'ZLIB9', 'BLOSC5', 'BLZ4HC5'
    verbosity_level):

    ### LOGGER
    logger = get_logger('FanalReco', verbosity_level)

    ### DETECTOR NAME & its ACTIVE dimensions
    det_name = getattr(DetName, det_name)
    ACTIVE_dimensions = get_active_size(det_name)

    ### RECONSTRUCTION DATA
    # Smearing energy settings
    fwhm_Qbb = fwhm * Qbb
    sigma_Qbb = fwhm_Qbb / 2.355
    assert e_max > e_min, 'SmE_filter settings not valid. e_max must be higher than e_min.'

    # Spatial definition
    spatial_def = getattr(SpatialDef, spatial_def)

    # Voxel size
    voxel_size = get_voxel_size(spatial_def)

    # Fiducial limits
    fid_dimensions = get_fiducial_size(ACTIVE_dimensions, veto_width)

    ### PRINTING GENERAL INFO
    print(
        '\n***********************************************************************************'
    )
    print('***** Detector: {}'.format(det_name.name))
    print('***** Reconstructing {} events'.format(event_type))
    print('***** Energy Resolution: {:.2f}% FWFM at Qbb'.format(fwhm /
                                                                units.perCent))
    print('***** Spatial definition: {}'.format(spatial_def.name))
    print(
        '***********************************************************************************\n'
    )

    print(
        '* Detector-Active dimensions [mm]:  Zmin: {:7.1f}   Zmax: {:7.1f}   Rmax: {:7.1f}'
        .format(ACTIVE_dimensions.z_min, ACTIVE_dimensions.z_max,
                ACTIVE_dimensions.rad))
    print(
        '         ... fiducial limits [mm]:  Zmin: {:7.1f}   Zmax: {:7.1f}   Rmax: {:7.1f}\n'
        .format(fid_dimensions.z_min, fid_dimensions.z_max,
                fid_dimensions.rad))
    print('* Sigma at Qbb: {:.3f} keV.\n'.format(sigma_Qbb / units.keV))
    print('* Voxel_size: {} mm.\n'.format(voxel_size))

    print('* {0} {1} input files:'.format(len(files_in), event_type))
    for iFileName in files_in:
        print(' ', iFileName)

    ### OUTPUT FILE, ITS GROUPS & ATTRIBUTES
    # Output file
    oFile = tb.open_file(file_out, 'w', filters=tbl_filters(compression))

    # Reco group Name
    reco_group_name = get_reco_group_name(fwhm / units.perCent, spatial_def)
    oFile.create_group('/', 'FANALIC')
    oFile.create_group('/FANALIC', reco_group_name[9:])

    print('\n* Output file name:', file_out)
    print('  Reco group name:  {}\n'.format(reco_group_name))

    # Attributes
    oFile.set_node_attr(reco_group_name, 'input_sim_files', files_in)
    oFile.set_node_attr(reco_group_name, 'event_type', event_type)
    oFile.set_node_attr(reco_group_name, 'energy_resolution',
                        fwhm / units.perCent)
    oFile.set_node_attr(reco_group_name, 'smE_filter_Emin', e_min)
    oFile.set_node_attr(reco_group_name, 'smE_filter_Emax', e_max)
    oFile.set_node_attr(reco_group_name, 'fiducial_filter_VetoWidth',
                        veto_width)
    oFile.set_node_attr(reco_group_name, 'fiducial_filter_MinVetoE',
                        min_veto_e)

    ### DATA TO STORE
    # Event counters
    simulated_events = 0
    stored_events = 0
    analyzed_events = 0

    # Dictionaries for events & voxels data
    events_dict = get_events_reco_dict()
    voxels_dict = get_voxels_reco_dict()

    ### RECONSTRUCTION PROCEDURE
    # Looping through all the input files
    for iFileName in files_in:
        # Updating simulated and stored event counters
        configuration_df = pd.read_hdf(iFileName,
                                       '/MC/configuration',
                                       mode='r')
        simulated_events += int(configuration_df[configuration_df.param_key ==
                                                 'num_events'].param_value)
        stored_events += int(configuration_df[configuration_df.param_key ==
                                              'saved_events'].param_value)

        with tb.open_file(iFileName, mode='r') as iFile:
            file_event_numbers = iFile.root.MC.extents.cols.evt_number
            print('* Processing {0}  ({1} events) ...'.format(
                iFileName, len(file_event_numbers)))

            # Loading into memory all the particles & hits in the file
            file_mcParts = load_mcparticles(iFileName)
            file_mcHits = load_mchits(iFileName)

            # Looping through all the events in the file
            for event_number in file_event_numbers:

                # Updating counter of analyzed events
                analyzed_events += 1
                #if not int(str(analyzed_events)[-int(math.log10(analyzed_events)):]):
                #    print('* Num analyzed events: {}'.format(analyzed_events))

                # Verbosing
                logger.info(
                    'Reconstructing event Id: {0} ...'.format(event_number))

                # Getting mcParts of the event, using the event_number as the key
                event_mcParts = file_mcParts[event_number]
                num_parts = len(event_mcParts)

                # Getting mcHits of the event, using the event_number as the key
                event_mcHits = file_mcHits[event_number]
                active_mcHits = [
                    hit for hit in event_mcHits if hit.label == 'ACTIVE'
                ]
                num_hits = len(active_mcHits)

                # The event mc energy is the sum of the energy of all the hits
                event_mcE = sum([hit.E for hit in active_mcHits])

                # Smearing the event energy
                event_smE = smear_evt_energy(event_mcE, sigma_Qbb, Qbb)

                # Applying the smE filter
                event_smE_filter = (e_min <= event_smE <= e_max)

                # Verbosing
                logger.info(
                    '  Num mcHits: {0:3}   mcE: {1:.1f} keV   smE: {2:.1f} keV   smE_filter: {3}'
                    .format(num_hits, event_mcE / units.keV,
                            event_smE / units.keV, event_smE_filter))

                # For those events NOT passing the smE filter:
                # Storing data of NON smE_filter vents
                if not event_smE_filter:
                    extend_events_reco_data(events_dict,
                                            event_number,
                                            evt_num_MCparts=num_parts,
                                            evt_num_MChits=num_hits,
                                            evt_mcE=event_mcE,
                                            evt_smE=event_smE,
                                            evt_smE_filter=event_smE_filter)

                # Only for those events passing the smE filter:
                else:
                    # Smearing hit energies
                    hits_smE = smear_hit_energies(active_mcHits,
                                                  event_smE / event_mcE)

                    # Translating hit positions
                    hits_transPositions = translate_hit_positions(
                        active_mcHits, DRIFT_VELOCITY)

                    # Creating the smHits with the smeared energies and translated positions
                    active_smHits = []
                    for i in range(num_hits):
                        smHit = MCHit(hits_transPositions[i],
                                      active_mcHits[i].time, hits_smE[i],
                                      'ACTIVE')
                        active_smHits.append(smHit)

                    # Filtering hits outside the ACTIVE region (due to translation)
                    active_smHits = [hit for hit in active_smHits \
                                     if hit.Z < ACTIVE_dimensions.z_max]

                    # Voxelizing using the active_smHits ...
                    event_voxels = voxelize_hits(active_smHits,
                                                 voxel_size,
                                                 strict_voxel_size=True)
                    eff_voxel_size = event_voxels[0].size

                    # Storing voxels info
                    for voxel in event_voxels:
                        extend_voxels_reco_data(voxels_dict, event_number,
                                                voxel)

                    # Check fiduciality
                    voxels_minZ, voxels_maxZ, voxels_maxRad, veto_energy, fiducial_filter = \
                        check_event_fiduciality(event_voxels, fid_dimensions, min_veto_e)

                    # Storing data of NON smE_filter vents
                    extend_events_reco_data(events_dict,
                                            event_number,
                                            evt_num_MCparts=num_parts,
                                            evt_num_MChits=num_hits,
                                            evt_mcE=event_mcE,
                                            evt_smE=event_smE,
                                            evt_smE_filter=event_smE_filter,
                                            evt_num_voxels=len(event_voxels),
                                            evt_voxel_sizeX=eff_voxel_size[0],
                                            evt_voxel_sizeY=eff_voxel_size[1],
                                            evt_voxel_sizeZ=eff_voxel_size[2],
                                            evt_voxels_minZ=voxels_minZ,
                                            evt_voxels_maxZ=voxels_maxZ,
                                            evt_voxels_maxRad=voxels_maxRad,
                                            evt_veto_energy=veto_energy,
                                            evt_fid_filter=fiducial_filter)

                    # Verbosing
                    logger.info(
                        '  NumVoxels: {:3}   minZ: {:.1f} mm   maxZ: {:.1f} mm   maxR: {:.1f} mm   veto_E: {:.1f} keV   fid_filter: {}'
                        .format(len(event_voxels), voxels_minZ, voxels_maxZ,
                                voxels_maxRad, veto_energy / units.keV,
                                fiducial_filter))
                    for voxel in event_voxels:
                        logger.debug(
                            '    Voxel pos: ({:5.1f}, {:5.1f}, {:5.1f}) mm   E: {:5.1f} keV'
                            .format(voxel.X / units.mm, voxel.Y / units.mm,
                                    voxel.Z / units.mm, voxel.E / units.keV))

    ### STORING DATA
    # Storing events and voxels dataframes
    print('\n* Storing data in the output file ...\n  {}\n'.format(file_out))
    store_events_reco_data(file_out, reco_group_name, events_dict)
    store_voxels_reco_data(file_out, reco_group_name, voxels_dict)

    # Storing event counters as attributes
    smE_filter_events = sum(events_dict['smE_filter'])
    fid_filter_events = sum(events_dict['fid_filter'])
    store_events_reco_counters(oFile, reco_group_name, simulated_events,
                               stored_events, smE_filter_events,
                               fid_filter_events)

    oFile.close()
    print('* Reconstruction done !!\n')

    # Printing reconstruction numbers
    print('* Event counters ...')
    print('''  Simulated events:  {0:9}
  Stored events:     {1:9}
  smE_filter events: {2:9}
  fid_filter events: {3:9}\n'''.format(simulated_events, stored_events,
                                       smE_filter_events, fid_filter_events))
示例#13
0
def analyze_bb_event_ic(
        detector: Detector, event_id: int, params: BBAnalysisParams,
        fiducial_checker: Callable, event_mcParts: pd.DataFrame,
        event_mcHits: pd.DataFrame) -> Tuple[Event, TrackList, VoxelList]:
    """
    It assess the global acceptance factor after fiducial, topology and ROI cuts
    based on the paolina functions implemented into IC.
    """
    # Data to be filled
    event_data = Event()
    tracks_data = TrackList()
    voxels_data = VoxelList()

    # Storing basic MC data
    event_data.event_id = event_id
    event_data.num_mcParts = len(event_mcParts)
    event_data.num_mcHits = len(event_mcHits)

    logger.info(f"Num mcParticles: {event_data.num_mcParts:3}   " + \
                f"Num mcHits: {event_data.num_mcHits:3}   ")

    # Processing MC data
    event_data.mc_energy, event_data.mc_filter = \
        check_mc_data(event_mcHits, params.buffer_Eth, params.e_min, params.e_max)
    if not event_data.mc_filter: return event_data, tracks_data, voxels_data

    ### Continue analysis of events passing the mc_filter ###
    # Reconstruct hits
    active_mcHits = event_mcHits[event_mcHits.label == 'ACTIVE']
    recons_hits = reconstruct_hits(detector, active_mcHits,
                                   event_data.mc_energy, params.fwhm,
                                   params.trans_diff, params.long_diff)

    # Event smeared energy
    event_data.sm_energy = recons_hits.energy.sum()
    event_data.energy_filter = (params.e_min <= event_data.sm_energy <=
                                params.e_max)
    logger.info(f"smE: {event_data.sm_energy/units.keV:.1f} keV   " + \
                f"ENERGY filter: {event_data.energy_filter}")
    if not event_data.energy_filter:
        return event_data, tracks_data, voxels_data

    ### Continue analysis of events passing the energy_filter ###
    # Creating the IChits from reconstructed hits
    ic_hits = recons_hits.apply(lambda hit: \
        MCHit((hit.x, hit.y, hit.z), hit.time, hit.energy, 'ACTIVE'), axis=1).tolist()

    # Voxelizing using the ic_hits ...
    ic_voxels = voxelize_hits(
        ic_hits,
        [params.voxel_size_x, params.voxel_size_y, params.voxel_size_z],
        params.strict_voxel_size)

    # Cleaning voxels with energy < voxel_Eth
    ic_voxels = clean_voxels(ic_voxels, params.voxel_Eth)

    event_data.num_voxels = len(ic_voxels)
    eff_voxel_size = ic_voxels[0].size
    event_data.voxel_size_x = eff_voxel_size[0]
    event_data.voxel_size_y = eff_voxel_size[1]
    event_data.voxel_size_z = eff_voxel_size[2]
    logger.info(
        f"Num Voxels: {event_data.num_voxels:3}  of size: {eff_voxel_size} mm")

    # Check fiduciality
    event_data.veto_energy, event_data.fiduc_filter = \
        check_event_fiduciality(fiducial_checker, ic_voxels, params.veto_Eth)
    logger.info(f"Veto_E: {event_data.veto_energy/units.keV:.1f} keV   " + \
                f"FIDUC filter: {event_data.fiduc_filter}")

    if not event_data.fiduc_filter:
        # Storing voxels without track-id info
        for voxel_id in range(len(ic_voxels)):
            voxels_data.add(
                Voxel.from_icVoxel(event_id, -1, voxel_id,
                                   ic_voxels[voxel_id]))
        logger.debug(voxels_data)
        return event_data, tracks_data, voxels_data

    ### Continue analysis of events passing the fiduc_filter ###
    # Make tracks
    ic_tracks = make_track_graphs(ic_voxels)

    # Storing tracks from ic_tracks
    for track_id in range(len(ic_tracks)):
        ic_track = ic_tracks[track_id]
        tracks_data.add(Track.from_icTrack(event_id, track_id, ic_track))

        # Storing voxels from ic_voxels
        ic_voxels = list(ic_track.nodes())
        for voxel_id in range(len(ic_voxels)):
            voxels_data.add(
                Voxel.from_icVoxel(event_id, track_id, voxel_id,
                                   ic_voxels[voxel_id]))

    logger.debug(voxels_data)

    event_data.num_tracks = tracks_data.len()

    event_data.track_filter = (
        (event_data.num_tracks > 0) &
        (event_data.num_tracks <= params.max_num_tracks))

    logger.info(f"Num tracks: {event_data.num_tracks:3}  ->" + \
                f"  TRACK filter: {event_data.track_filter}")

    if not event_data.track_filter: return event_data, tracks_data, voxels_data

    ### Continue analysis of events passing the track_filter ###
    the_track = tracks_data.tracks[0]

    # Getting & Storing Blobs info
    blob1_energy, blob2_energy, blob1_hits, blob2_hits, blob1_pos, blob2_pos = \
        blob_energies_hits_and_centres(ic_tracks[0], params.blob_radius)
    blob1_pos, blob2_pos = XYZ.from_array(blob1_pos), XYZ.from_array(blob2_pos)

    the_track.blob1_energy, the_track.blob1_num_hits = blob1_energy, len(
        blob1_hits)
    the_track.blob1_x, the_track.blob1_y, the_track.blob1_z = \
        blob1_pos.x, blob1_pos.y, blob1_pos.z
    the_track.blob2_energy, the_track.blob2_num_hits = blob2_energy, len(
        blob2_hits)
    the_track.blob2_x, the_track.blob2_y, the_track.blob2_z = \
        blob2_pos.x, blob2_pos.y, blob2_pos.z

    the_track.ovlp_energy = \
        float(sum(hit.E for hit in set(blob1_hits).intersection(set(blob2_hits))))

    # Getting & Storing True extrema info
    ext1, ext2 = get_true_extrema(event_mcParts, params.event_type)
    ext1, ext2 = order_true_extrema(ext1, ext2, blob1_pos, blob2_pos)

    the_track.t_ext1_x, the_track.t_ext1_y, the_track.t_ext1_z = ext1.x, ext1.y, ext1.z
    the_track.t_ext2_x, the_track.t_ext2_y, the_track.t_ext2_z = ext2.x, ext2.y, ext2.z

    # Storing Track info in event data
    event_data.track_length = the_track.length
    event_data.blob1_energy, event_data.blob2_energy = blob1_energy, blob2_energy

    logger.info(tracks_data)

    # Applying the blob filter
    event_data.blob_filter = ((event_data.blob2_energy > params.blob_Eth) &
                              (the_track.ovlp_energy == 0.))

    logger.info(f"Blob 1 energy: {event_data.blob1_energy/units.keV:4.1f} keV " + \
                f"  Blob 2 energy: {event_data.blob2_energy/units.keV:4.1f} keV"  + \
                f"  Overlap: {the_track.ovlp_energy/units.keV:4.1f} keV"  + \
                f"  ->  BLOB filter: {event_data.blob_filter}")

    if not event_data.blob_filter: return event_data, tracks_data, voxels_data

    ### Continue analysis of events passing the blob_filter ###
    # Applying the ROI filter
    event_data.roi_filter = ((event_data.sm_energy >= params.roi_Emin) &
                             (event_data.sm_energy <= params.roi_Emax))

    logger.info(f"Event energy: {event_data.sm_energy/units.keV:6.1f} keV" + \
                f"  ->  ROI filter: {event_data.roi_filter}")

    return event_data, tracks_data, voxels_data