Exemple #1
0
def test_logger():
    log_file_name = 'logger_test.log'
    logger = get_logger('Test Logger', logging.INFO, log_file_name)

    logger.warning('This is a warning message.')
    logger.info('This is an info message.')
    logger.debug('This is a debug message.')

    log_file = open(log_file_name, 'r')
    assert log_file.read(
    ) == 'Test Logger - This is a warning message.\nTest Logger - This is an info message.\n'
Exemple #2
0
def fanal_ana(
    det_name,  # Detector name: 'new', 'next100', 'next500'
    event_type,  # Event type: 'bb0nu', 'Tl208', 'Bi214'
    fwhm,  # FWHM at Qbb
    voxel_size,  # Voxel size (x, y, z)
    track_Eth,  # Track energy threshold
    max_num_tracks,  # Maximum number of tracks
    blob_radius,  # Blob radius
    blob_Eth,  # Blob energy threshold
    roi_Emin,  # ROI minimum energy
    roi_Emax,  # ROI maximum energy
    files_in,  # Input files
    event_range,  # Range of events to analyze: all, ... ??
    file_out,  # Output file
    compression,  # Compression of output file: 'ZLIB1', 'ZLIB4',
    # 'ZLIB5', 'ZLIB9', 'BLOSC5', 'BLZ4HC5'
    verbosity_level):

    ### LOGGER
    logger = get_logger('FanalAna', verbosity_level)

    ### DETECTOR NAME
    det_name = getattr(DetName, det_name)

    ### PRINTING GENERAL INFO
    print(
        '\n***********************************************************************************'
    )
    print('***** Detector: {}'.format(det_name.name))
    print('***** Analizing {} events'.format(event_type))
    print('***** Energy Resolution: {:.2f}% FWFM at Qbb'.format(fwhm /
                                                                units.perCent))
    print('***** Voxel Size: ({}, {}, {}) mm'.format(voxel_size[0] / units.mm,
                                                     voxel_size[1] / units.mm,
                                                     voxel_size[2] / units.mm))
    print(
        '***********************************************************************************\n'
    )

    print('* Track Eth: {:4.1f} keV   Max Num Tracks: {}\n'.format(
        track_Eth / units.keV, max_num_tracks))
    print('* Blob radius: {:.1f} mm   Blob Eth: {:4.1f} keV\n'.format(
        blob_radius, blob_Eth / units.keV))
    print('* ROI limits: [{:4.1f}, {:4.1f}] keV\n'.format(
        roi_Emin / units.keV, roi_Emax / units.keV))

    ### INPUT RECONSTRUCTION FILE AND GROUP
    reco_group_name = get_reco_group_name(fwhm / units.perCent, voxel_size)
    print('* {} {} input reco file names:'.format(len(files_in), event_type))
    for iFileName in files_in:
        print(' ', iFileName)
    print('  Reco group name: {}\n'.format(reco_group_name))

    ### OUTPUT FILE, ITS GROUPS & ATTRIBUTES
    # Output analysis file
    oFile = tb.open_file(file_out, 'w', filters=tbl_filters(compression))

    # Analysis group Name
    ana_group_name = get_ana_group_name(fwhm / units.perCent, voxel_size)
    oFile.create_group('/', 'FANALIC')
    oFile.create_group('/FANALIC', ana_group_name[9:])

    print('* Output analysis file name:', file_out)
    print('  Ana group name: {}\n'.format(ana_group_name))

    # Attributes
    oFile.set_node_attr(ana_group_name, 'input_reco_files', files_in)
    oFile.set_node_attr(ana_group_name, 'input_reco_group', reco_group_name)
    oFile.set_node_attr(ana_group_name, 'event_type', event_type)
    oFile.set_node_attr(ana_group_name, 'energy_resolution',
                        fwhm / units.perCent)
    oFile.set_node_attr(ana_group_name, 'track_Eth', track_Eth)
    oFile.set_node_attr(ana_group_name, 'max_num_tracks', max_num_tracks)
    oFile.set_node_attr(ana_group_name, 'blob_radius', blob_radius)
    oFile.set_node_attr(ana_group_name, 'blob_Eth', blob_Eth)
    oFile.set_node_attr(ana_group_name, 'roi_Emin', roi_Emin)
    oFile.set_node_attr(ana_group_name, 'roi_Emax', roi_Emax)

    ### DATA TO STORE
    # Event counters
    simulated_events = 0
    stored_events = 0
    smE_filter_events = 0
    fid_filter_events = 0
    tracks_filter_events = 0
    blobs_filter_events = 0
    roi_filter_events = 0

    analyzed_events = 0
    toUpdate_events = 1

    # Dictionaries for events & voxels data
    events_dict = get_events_ana_dict()
    voxels_dict = get_voxels_ana_dict()

    events_reco_df = pd.DataFrame()
    voxels_reco_df = pd.DataFrame()

    ### ANALYSIS PROCEDURE
    print('* Analyzing events ...\n')

    # Looping through all the input files
    for iFileName in files_in:

        # Updating reconstruction counters
        with tb.open_file(iFileName, mode='r') as reco_file:
            simulated_events += reco_file.get_node_attr(
                reco_group_name, 'simulated_events')
            stored_events += reco_file.get_node_attr(reco_group_name,
                                                     'stored_events')
            smE_filter_events += reco_file.get_node_attr(
                reco_group_name, 'smE_filter_events')
            fid_filter_events += reco_file.get_node_attr(
                reco_group_name, 'fid_filter_events')

        # Getting the events & voxels data from the reconstruction phase
        file_events = pd.read_hdf(iFileName, reco_group_name + '/events')
        file_voxels = pd.read_hdf(iFileName, reco_group_name + '/voxels')

        # Updating reconstruction dataframes
        events_reco_df = pd.concat([events_reco_df, file_events], axis=0)
        voxels_reco_df = pd.concat([voxels_reco_df, file_voxels], axis=0)

        print('* Processing {} ...'.format(iFileName))

        ### Looping through all the events that passed the fiducial filter
        for event_number, event_df in file_events[
                file_events.fid_filter].iterrows():

            # Updating counter of analyzed events
            analyzed_events += 1
            logger.info('Analyzing event Id: {0} ...'.format(event_number))

            # Getting event data
            event_data = get_event_ana_data()
            event_data['event_id'] = event_number

            event_voxels = file_voxels.loc[event_number]
            num_event_voxels = len(event_voxels)
            num_event_voxels_negli = len(event_voxels[event_voxels.negli])
            voxel_dimensions = (event_df.voxel_sizeX, event_df.voxel_sizeY,
                                event_df.voxel_sizeZ)

            logger.info('  Total Voxels: {}   Negli. Voxels: {}   Voxels Size: ({:3.1f}, {:3.1f}, {:3.1f}) mm'\
                        .format(num_event_voxels, num_event_voxels_negli, voxel_dimensions[0],
                                voxel_dimensions[1], voxel_dimensions[2]))

            # If there is any negligible Voxel, distribute its energy between its neighbours,
            # if not, all voxels maintain their previous energies
            if num_event_voxels_negli:
                event_voxels_newE = get_new_energies(event_voxels)
            else:
                event_voxels_newE = event_voxels.E.tolist()

            # Translate fanalIC voxels info to IC voxels to make tracks
            #ic_voxels = [Voxel(event_voxels.iloc[i].X, event_voxels.iloc[i].Y, event_voxels.iloc[i].Z,
            #                   event_voxels_newE[i], voxel_dimensions) for i in range(num_event_voxels)]
            ic_voxels = []
            for i, voxel in event_voxels.iterrows():
                ic_voxel = Voxel(voxel.X, voxel.Y, voxel.Z,
                                 event_voxels_newE[i], voxel_dimensions)
                ic_voxels.append(ic_voxel)

            # Make tracks
            event_tracks = make_track_graphs(ic_voxels)
            num_ini_tracks = len(event_tracks)
            logger.info('  Num initial tracks: {:2}'.format(num_ini_tracks))

            # Appending to every voxel, the track it belongs to
            event_voxels_tracks = get_voxel_track_relations(
                event_voxels, event_tracks)

            # Appending ana-info to this event voxels
            extend_voxels_ana_dict(voxels_dict, event_number,
                                   event_voxels.index.tolist(),
                                   event_voxels_newE, event_voxels_tracks)

            # Processing tracks: Getting energies, sorting and filtering ...
            event_sorted_tracks = process_tracks(event_tracks, track_Eth)
            event_data['num_tracks'] = len(event_sorted_tracks)

            # Getting 3 hottest tracks info
            if event_data['num_tracks'] >= 1:
                event_data['track0_E'] = event_sorted_tracks[0][0]
                event_data['track0_length'] = event_sorted_tracks[0][1]
                event_data['track0_voxels'] = len(
                    event_sorted_tracks[0][2].nodes())
            if event_data['num_tracks'] >= 2:
                event_data['track1_E'] = event_sorted_tracks[1][0]
                event_data['track1_length'] = event_sorted_tracks[1][1]
                event_data['track1_voxels'] = len(
                    event_sorted_tracks[1][2].nodes())
            if event_data['num_tracks'] >= 3:
                event_data['track2_E'] = event_sorted_tracks[2][0]
                event_data['track2_length'] = event_sorted_tracks[2][1]
                event_data['track2_voxels'] = len(
                    event_sorted_tracks[2][2].nodes())

            # Applying the tracks filter consisting on:
            # 0 < num tracks < max_num_tracks
            # the track length must be longer than 2 times the blob_radius
            event_data['tracks_filter'] = (
                (event_data['num_tracks'] > 0) &
                (event_data['num_tracks'] <= max_num_tracks) &
                (event_data['track0_length'] >= 2. * blob_radius))

            # Verbosing
            logger.info('  Num final tracks: {:2}  -->  tracks_filter: {}' \
                        .format(event_data['num_tracks'], event_data['tracks_filter']))

            ### For those events passing the tracks filter:
            if event_data['tracks_filter']:

                # Getting the blob energies of the track with highest energy
                event_data['blob1_E'], event_data['blob2_E'] = \
                    blob_energies(event_sorted_tracks[0][2], blob_radius)

                # Applying the blobs filter
                event_data['blobs_filter'] = (event_data['blob2_E'] > blob_Eth)

                # Verbosing
                logger.info('  Blob 1 energy: {:4.1f} keV   Blob 2 energy: {:4.1f} keV  -->  Blobs filter: {}'\
                            .format(event_data['blob1_E']/units.keV, event_data['blob2_E']/units.keV,
                                    event_data['blobs_filter']))

                ### For those events passing the blobs filter:
                if event_data['blobs_filter']:

                    # Getting the total event smeared energy
                    event_smE = file_events.loc[event_number].smE

                    # Applying the ROI filter
                    event_data['roi_filter'] = ((event_smE >= roi_Emin) &
                                                (event_smE <= roi_Emax))

                    # Verbosing
                    logger.info('  Event energy: {:6.1f} keV  -->  ROI filter: {}'\
                                .format(event_smE / units.keV, event_data['roi_filter']))

            # Storing event_data
            extend_events_ana_dict(events_dict, event_data)

            # Verbosing
            if (not (analyzed_events % toUpdate_events)):
                print('* Num analyzed events: {}'.format(analyzed_events))
            if (analyzed_events == (10 * toUpdate_events)):
                toUpdate_events *= 10

    ### STORING ANALYSIS DATA
    print('* Total analyzed events: {}'.format(analyzed_events))

    # Storing events and voxels dataframes
    print('\n* Storing data in the output file ...\n  {}\n'.format(file_out))
    store_events_ana_dict(file_out, ana_group_name, events_reco_df,
                          events_dict)
    store_voxels_ana_dict(file_out, ana_group_name, voxels_reco_df,
                          voxels_dict)

    # Storing event counters as attributes
    tracks_filter_events = sum(events_dict['tracks_filter'])
    blobs_filter_events = sum(events_dict['blobs_filter'])
    roi_filter_events = sum(events_dict['roi_filter'])

    store_events_ana_counters(oFile, ana_group_name, simulated_events,
                              stored_events, smE_filter_events,
                              fid_filter_events, tracks_filter_events,
                              blobs_filter_events, roi_filter_events)

    ### Ending ...
    oFile.close()
    print('* Analysis done !!\n')

    print('''* Event counters:
  Simulated events:     {0:9}
  Stored events:        {1:9}
  smE_filter events:    {2:9}
  fid_filter events:    {3:9}
  tracks_filter events: {4:9}
  blobs_filter events:  {5:9}
  roi_filter events:    {6:9}'''.format(simulated_events, stored_events,
                                        smE_filter_events, fid_filter_events,
                                        tracks_filter_events,
                                        blobs_filter_events,
                                        roi_filter_events))
Exemple #3
0
def fanal_reco(det_name,    # Detector name: 'new', 'next100', 'next500'
               event_type,  # Event type: 'bb0nu', 'Tl208', 'Bi214'
               fwhm,        # FWHM at Qbb
               e_min,       # Minimum smeared energy for energy filtering
               e_max,       # Maximum smeared energy for energy filtering
               voxel_size,  # Voxel size (x, y, z)
               voxel_Eth,   # Voxel energy threshold
               veto_width,  # Veto width for fiducial filtering
               min_veto_e,  # Minimum energy in veto for fiducial filtering
               files_in,    # Input files
               event_range, # Range of events to analyze: all, ... ??
               file_out,    # Output file
               compression, # Compression of output file: 'ZLIB1', 'ZLIB4',
                            # 'ZLIB5', 'ZLIB9', 'BLOSC5', 'BLZ4HC5'
               verbosity_level):


    ### LOGGER
    logger = get_logger('FanalReco', verbosity_level)


    ### DETECTOR NAME & its ACTIVE dimensions
    det_name = getattr(DetName, det_name)
    ACTIVE_dimensions = get_active_size(det_name)
    fid_dimensions    = get_fiducial_size(det_name, veto_width)


    ### RECONSTRUCTION DATA
    # Smearing energy settings
    fwhm_Qbb  = fwhm * Qbb
    sigma_Qbb = fwhm_Qbb / 2.355
    assert e_max > e_min, 'SmE_filter settings not valid. e_max must be higher than e_min.'


    ### PRINTING GENERAL INFO
    print('\n***********************************************************************************')
    print('***** Detector: {}'.format(det_name.name))
    print('***** Reconstructing {} events'.format(event_type))
    print('***** Energy Resolution: {:.2f}% fwhm at Qbb'.format(fwhm / units.perCent))
    print('***** Voxel Size: ({}, {}, {}) mm'.format(voxel_size[0] / units.mm,
                                                     voxel_size[1] / units.mm,
                                                     voxel_size[2] / units.mm))
    print('***********************************************************************************\n')

    print('* Sigma at Qbb: {:.3f} keV.\n'.format(sigma_Qbb / units.keV))

    print('* Voxel_size: ({}, {}, {}) mm'.format(voxel_size[0] / units.mm,
                                                 voxel_size[1] / units.mm,
                                                 voxel_size[2] / units.mm))
    print('  Voxel Eth:  {:4.1f} keV\n'.format(voxel_Eth/units.keV))

    print('* Detector-Active dimensions [mm]:  Zmin: {:7.1f}   Zmax: {:7.1f}   Rmax: {:7.1f}'
          .format(ACTIVE_dimensions.z_min, ACTIVE_dimensions.z_max,
                  ACTIVE_dimensions.rad))
    print('         ... fiducial limits [mm]:  Zmin: {:7.1f}   Zmax: {:7.1f}   Rmax: {:7.1f}\n'
          .format(fid_dimensions.z_min, fid_dimensions.z_max, fid_dimensions.rad))

    print('* {0} {1} input files:'.format(len(files_in), event_type))
    for iFileName in files_in:
        print(' ', iFileName)


    ### OUTPUT FILE, ITS GROUPS & ATTRIBUTES
    # Output file
    oFile = tb.open_file(file_out, 'w', filters = tbl_filters(compression))

    # Reco group Name
    reco_group_name = get_reco_group_name(fwhm/units.perCent, voxel_size)
    oFile.create_group('/', 'FANALIC')
    oFile.create_group('/FANALIC', reco_group_name[9:])

    print('\n* Output file name:', file_out)
    print('  Reco group name:  {}\n'.format(reco_group_name))

    # Attributes
    oFile.set_node_attr(reco_group_name, 'input_sim_files',           files_in)
    oFile.set_node_attr(reco_group_name, 'event_type',                event_type)
    oFile.set_node_attr(reco_group_name, 'energy_resolution',         fwhm/units.perCent)
    oFile.set_node_attr(reco_group_name, 'voxel_sizeX',               voxel_size[0])
    oFile.set_node_attr(reco_group_name, 'voxel_sizeY',               voxel_size[1])
    oFile.set_node_attr(reco_group_name, 'voxel_sizeZ',               voxel_size[2])
    oFile.set_node_attr(reco_group_name, 'voxel_Eth',                 voxel_Eth)
    oFile.set_node_attr(reco_group_name, 'smE_filter_Emin',           e_min)
    oFile.set_node_attr(reco_group_name, 'smE_filter_Emax',           e_max)
    oFile.set_node_attr(reco_group_name, 'fiducial_filter_VetoWidth', veto_width)
    oFile.set_node_attr(reco_group_name, 'fiducial_filter_MinVetoE',  min_veto_e)


    ### DATA TO STORE
    # Event counters
    simulated_events = 0
    stored_events    = 0
    analyzed_events  = 0
    toUpdate_events  = 1

    # Dictionaries for events & voxels data
    events_dict = get_events_reco_dict()
    voxels_dict = get_voxels_reco_dict()


    ### RECONSTRUCTION PROCEDURE
    # Looping through all the input files
    for iFileName in files_in:
        # Updating simulated and stored event counters
        configuration_df  = pd.read_hdf(iFileName, '/MC/configuration', mode='r')
        simulated_events += int(configuration_df[configuration_df.param_key == 'num_events'].param_value)
        stored_events    += int(configuration_df[configuration_df.param_key == 'saved_events'].param_value)

        # Getting event numbers
        file_extents = pd.read_hdf(iFileName, '/MC/extents', mode='r')
        file_event_numbers = file_extents.evt_number

        print('* Processing {0}  ({1} events) ...'.format(iFileName, len(file_event_numbers)))

        # Getting mc hits
        file_mcHits = load_mc_hits(iFileName)

        # Looping through all the events in iFile
        for event_number in file_event_numbers:

            # Updating counter of analyzed events
            analyzed_events += 1
            logger.info('Reconstructing event Id: {0} ...'.format(event_number))

            # Getting event data
            event_data = get_event_reco_data()
            event_data['event_id'] = event_number
            
            event_mcHits  = file_mcHits.loc[event_number, :]
            active_mcHits = event_mcHits[event_mcHits.label == 'ACTIVE'].copy()

            event_data['num_MCparts'] = get_num_mc_particles(file_extents, event_number)
            event_data['num_MChits']  = len(active_mcHits)
            
            # The event mc energy is the sum of the energy of all the hits except
            # for Bi214 events, in which the number of S1 in the event is considered
            if (event_type == 'Bi214'):
                event_data['mcE'] = get_mc_energy(active_mcHits)
            else:
                event_data['mcE'] = active_mcHits.E.sum()
                
            # Smearing the event energy
            event_data['smE'] = smear_evt_energy(event_data['mcE'], sigma_Qbb, Qbb)

            # Applying the smE filter
            event_data['smE_filter'] = (e_min <= event_data['smE'] <= e_max)

            # Verbosing
            logger.info('  Num mcHits: {0:3}   mcE: {1:.1f} keV   smE: {2:.1f} keV   smE_filter: {3}' \
                        .format(event_data['num_MChits'], event_data['mcE']/units.keV,
                                event_data['smE']/units.keV, event_data['smE_filter']))
                
            # For those events passing the smE filter:
            if event_data['smE_filter']:

                # Smearing hit energies
                smearing_factor = event_data['smE'] / event_data['mcE']
                active_mcHits['smE'] = active_mcHits['E'] * smearing_factor

                # Translating hit Z positions from delayed hits
                translate_hit_positions(det_name, active_mcHits, DRIFT_VELOCITY)

                # Creating the IChits with the smeared energies and translated Z positions
                # to be passed to paolina functions
                #IChits = []
                #for i, hit in active_mcHits[active_mcHits.shifted_z < ACTIVE_dimensions.z_max].iterrows():
                #    IChit = MCHit((hit.x, hit.y, hit.shifted_z), hit.time, hit.smE, 'ACTIVE')
                #    IChits.append(IChit)
                IChits = active_mcHits[(active_mcHits.shifted_z < ACTIVE_dimensions.z_max) &
                                       (active_mcHits.shifted_z > ACTIVE_dimensions.z_min)] \
                    .apply(lambda hit: MCHit((hit.x, hit.y, hit.shifted_z),
                                             hit.time, hit.smE, 'ACTIVE'), axis=1).tolist()

                # Voxelizing using the IChits ...
                event_voxels = voxelize_hits(IChits, voxel_size, strict_voxel_size=False)
                event_data['num_voxels'] = len(event_voxels)

                eff_voxel_size = event_voxels[0].size
                event_data['voxel_sizeX'] = eff_voxel_size[0]
                event_data['voxel_sizeY'] = eff_voxel_size[1]
                event_data['voxel_sizeZ'] = eff_voxel_size[2]
    
                # Storing voxels info
                for voxel_id in range(len(event_voxels)):
                    extend_voxels_reco_dict(voxels_dict, event_number, voxel_id,
                                            event_voxels[voxel_id], voxel_Eth)
                    
                # Check fiduciality
                event_data['voxels_minZ'], event_data['voxels_maxZ'], \
                event_data['voxels_maxRad'], event_data['veto_energy'], \
                event_data['fid_filter'] = \
                check_event_fiduciality(det_name, veto_width, min_veto_e, event_voxels)
                   
                # Verbosing
                logger.info('  NumVoxels: {:3}   minZ: {:.1f} mm   maxZ: {:.1f} mm   maxR: {:.1f} mm   veto_E: {:.1f} keV   fid_filter: {}' \
                            .format(event_data['num_voxels'], event_data['voxels_minZ'],
                                    event_data['voxels_maxZ'], event_data['voxels_maxRad'],
                                    event_data['veto_energy'] / units.keV,
                                    event_data['fid_filter']))
                
                for voxel in event_voxels:
                    logger.debug('    Voxel pos: ({:5.1f}, {:5.1f}, {:5.1f}) mm   E: {:5.1f} keV'\
                                 .format(voxel.X/units.mm, voxel.Y/units.mm,
                                         voxel.Z/units.mm, voxel.E/units.keV))

            # Storing event_data
            extend_events_reco_dict(events_dict, event_data)

            # Verbosing
            if (not(analyzed_events % toUpdate_events)):
                print('* Num analyzed events: {}'.format(analyzed_events))
            if (analyzed_events == (10 * toUpdate_events)): toUpdate_events *= 10
            

    ### STORING RECONSTRUCTION DATA
    # Storing events and voxels dataframes
    print('\n* Storing data in the output file: {}'.format(file_out))
    store_events_reco_dict(file_out, reco_group_name, events_dict)
    store_voxels_reco_dict(file_out, reco_group_name, voxels_dict)

    # Storing event counters as attributes
    smE_filter_events = sum(events_dict['smE_filter'])
    fid_filter_events = sum(events_dict['fid_filter'])
    store_events_reco_counters(oFile, reco_group_name, simulated_events,
                               stored_events, smE_filter_events, fid_filter_events)

    oFile.close()
    print('* Reconstruction done !!\n')

    # Printing reconstruction numbers
    print('* Event counters ...')
    print('''  Simulated events:  {0:9}
  Stored events:     {1:9}
  smE_filter events: {2:9}
  fid_filter events: {3:9}\n'''
        .format(simulated_events, stored_events, smE_filter_events, fid_filter_events))
Exemple #4
0
def fanal_ana(det_name,       # Detector name: 'new', 'next100', 'next500'
              event_type,     # Event type: 'bb0nu', 'Tl208', 'Bi214'
              fwhm,           # FWHM at Qbb
              spatial_def,    # Spatial definition: 'low', 'high'
              voxel_Eth,      # Voxel energy threshold
              track_Eth,      # Track energy threshold
              max_num_tracks, # Maximum number of tracks
              blob_radius,    # Blob radius
              blob_Eth,       # Blob energy threshold
              roi_Emin,       # ROI minimum energy
              roi_Emax,       # ROI maximum energy
              files_in,       # Input files
              event_range,    # Range of events to analyze: all, ... ??
              file_out,       # Output file
              compression,    # Compression of output file: 'ZLIB1', 'ZLIB4',
                              # 'ZLIB5', 'ZLIB9', 'BLOSC5', 'BLZ4HC5'
              verbosity_level):

    ### LOGGER
    logger = get_logger('FanalAna', verbosity_level)

    ### DETECTOR NAME
    det_name = getattr(DetName, det_name)

    ### SPATIAL DEFINITION
    spatial_def = getattr(SpatialDef, spatial_def)


    ### PRINTING GENERAL INFO
    print('\n***********************************************************************************')
    print('***** Detector: {}'.format(det_name.name))
    print('***** Analizing {} events'.format(event_type))
    print('***** Energy Resolution: {:.2f}% FWFM at Qbb'.format(fwhm / units.perCent))
    print('***** Spatial definition: {}'.format(spatial_def.name))
    print('***********************************************************************************\n')

    print('* Voxel Eth: {:4.1f} keV   Track Eth: {:4.1f} keV   Max Num Tracks: {}\n'
          .format(voxel_Eth/units.keV, track_Eth/units.keV, max_num_tracks))
    print('* Blob radius: {:.1f} mm   Blob Eth: {:4.1f} keV\n'
          .format(blob_radius, blob_Eth / units.keV))
    print('* ROI limits: [{:4.1f}, {:4.1f}] keV\n'
          .format(roi_Emin/units.keV, roi_Emax/units.keV))


    ### INPUT RECONSTRUCTION FILE AND GROUP
    reco_group_name = get_reco_group_name(fwhm/units.perCent, spatial_def)
    print('* Input reco file name:', files_in)
    print('  Reco group name: {}\n'.format(reco_group_name))


    ### OUTPUT FILE, ITS GROUPS & ATTRIBUTES
    # Output analysis file
    oFile = tb.open_file(file_out, 'w', filters=tbl_filters(compression))

    # Analysis group Name
    ana_group_name = get_ana_group_name(fwhm/units.perCent, spatial_def)
    oFile.create_group('/', 'FANALIC')
    oFile.create_group('/FANALIC', ana_group_name[9:])

    print('* Output analysis file name:', file_out)
    print('  Ana group name: {}\n'.format(ana_group_name))

    # Attributes
    oFile.set_node_attr(ana_group_name, 'input_reco_file', files_in[0])
    oFile.set_node_attr(ana_group_name, 'input_reco_group', reco_group_name)
    oFile.set_node_attr(ana_group_name, 'event_type', event_type)
    oFile.set_node_attr(ana_group_name, 'energy_resolution', fwhm / units.perCent)
    oFile.set_node_attr(ana_group_name, 'voxel_Eth', voxel_Eth)
    oFile.set_node_attr(ana_group_name, 'track_Eth', track_Eth)
    oFile.set_node_attr(ana_group_name, 'max_num_tracks', max_num_tracks)
    oFile.set_node_attr(ana_group_name, 'blob_radius', blob_radius)
    oFile.set_node_attr(ana_group_name, 'blob_Eth', blob_Eth)
    oFile.set_node_attr(ana_group_name, 'roi_Emin', roi_Emin)
    oFile.set_node_attr(ana_group_name, 'roi_Emax', roi_Emax)


    ### DATA TO STORE
    # Dictionaries for events & voxels data
    events_dict = get_events_ana_dict()
    voxels_dict = get_voxels_ana_dict()


    ### ANALYSIS PROCEDURE

    # Getting the events & voxels data from the reconstruction phase
    # This is the option a little bit slower that requires less memory ... (TO BE CHECKED!!)
    # event_numbers_toAnalize = pd.read_hdf(files_in[0], reco_group_name + '/events',
    #                                      where=['fid_filter = True']).index
    # And this is the fastest option requiring more memory
    events_df = pd.read_hdf(files_in[0], reco_group_name + '/events')
    voxels_df = pd.read_hdf(files_in[0], reco_group_name + '/voxels')

    # Identifying as negligible all the voxels with energy lower than threshold
    voxels_df['negli'] = voxels_df.E < voxel_Eth
    print('* Total Voxels in File: {0}     Negligible Voxels (below {1:3.1f} keV): {2}\n'
          .format(len(voxels_df), voxel_Eth / units.keV,
                  len(voxels_df[voxels_df.negli == True])))

    # Analyzing only the fiducial events ...
    print('* Analyzing events ...\n')

    # Counter of analyzed events for verbosing pourpouses
    num_analyzed_events = 0

    # Looping through all the events that passed the fiducial filter
    for event_id in events_df[events_df.fid_filter].index:

        # Updating counter of analyzed events
        num_analyzed_events += 1
        if not int(str(num_analyzed_events)[-int(math.log10(num_analyzed_events)):]):
            print('* Num analyzed events: {}'.format(num_analyzed_events))

        # Verbosing
        logger.info('Analyzing event Id: {0} ...'.format(event_id))

        # Getting the voxels of current event and their sizes
        event_voxels = voxels_df[voxels_df.event_id == event_id]
        num_event_voxels = len(event_voxels)
        num_event_voxels_negli = len(event_voxels[event_voxels.negli == True])
        event_data = events_df.loc[event_id]
        voxel_dimensions = (event_data.voxel_sizeX, event_data.voxel_sizeY,
                            event_data.voxel_sizeZ)

        logger.info('  Total Voxels: {}   Negli. Voxels: {}   Voxels Size: ({:3.1f}, {:3.1f}, {:3.1f}) mm'
                    .format(num_event_voxels, num_event_voxels_negli,
                            voxel_dimensions[0], voxel_dimensions[1],
                            voxel_dimensions[2]))

        # If there is any negligible Voxel,
        # distribute its energy between its neighbours,
        # if not, all voxels maintain their previous energies
        if num_event_voxels_negli:
            event_voxels_newE = get_new_energies(event_voxels)
        else:
            event_voxels_newE = event_voxels.E.tolist()

        # Translate fanalIC voxels info to IC voxels to make tracks
        ic_voxels = [Voxel(event_voxels.iloc[i].X, event_voxels.iloc[i].Y,
                           event_voxels.iloc[i].Z, event_voxels_newE[i],
                           voxel_dimensions) for i in range(num_event_voxels)]

        # Make tracks
        event_tracks = make_track_graphs(ic_voxels)
        num_event_tracks = len(event_tracks)
        logger.info('  Num initial tracks: {:2}'.format(num_event_tracks))

        # Appending to every voxel, the track it belongs to
        event_voxels_tracks = get_voxel_track_relations(event_voxels, event_tracks)

        # Appending info of this event voxels
        extend_voxels_ana_data(voxels_dict, event_voxels.index,
                               event_voxels_newE, event_voxels_tracks)

        # Processing tracks: Getting energies, sorting and filtering ...
        event_sorted_tracks = process_tracks(event_tracks, track_Eth)
        num_event_tracks    = len(event_sorted_tracks)

        # Storing 3 hottest track info
        if num_event_tracks >= 1:
            track0_E      = event_sorted_tracks[0][0]
            track0_voxels = len(event_sorted_tracks[0][1].nodes())
            track0_length = track_length(event_sorted_tracks[0][1])
        else:
            track0_E = track0_voxels = track0_length = np.nan
        if num_event_tracks >= 2:
            track1_E      = event_sorted_tracks[1][0]
            track1_voxels = len(event_sorted_tracks[1][1].nodes())
            track1_length = track_length(event_sorted_tracks[1][1])
        else:
            track1_E = track1_voxels = track1_length = np.nan
        if num_event_tracks >= 3:
            track2_E      = event_sorted_tracks[2][0]
            track2_voxels = len(event_sorted_tracks[2][1].nodes())
            track2_length = track_length(event_sorted_tracks[2][1])
        else:
            track2_E = track2_voxels = track2_length = np.nan

        # Applying the tracks filter
        tracks_filter = ((num_event_tracks > 0) &
                         (num_event_tracks <= max_num_tracks))

        # Verbosing
        logger.info('  Num final tracks: {:2}  -->  tracks_filter: {}'
                    .format(num_event_tracks, tracks_filter))

        # For those events NOT passing the tracks filter:
        # Storing data of NON tracks_filter vents
        if not tracks_filter:
            extend_events_ana_data(events_dict, event_id, num_event_tracks,
                                   track0_E, track0_voxels, track0_length,
                                   track1_E, track1_voxels, track1_length,
                                   track2_E, track2_voxels, track2_length,
                                   tracks_filter)

        # Only for those events passing the tracks filter:
        else:
            # Getting the blob energies of the track with highest energy
            blobs_E = blob_energies(event_sorted_tracks[0][1], blob_radius)
            blob1_E = blobs_E[1]
            blob2_E = blobs_E[0]

            # Applying the blobs filter
            blobs_filter = (blob2_E > blob_Eth)

            # Verbosing
            logger.info('  Blob 1 energy: {:4.1f} keV   Blob 2 energy: {:4.1f} keV  -->  Blobs filter: {}'
                        .format(blob1_E/units.keV, blob2_E/units.keV, blobs_filter))

            # For those events NOT passing the blobs filter:
            # Storing data of NON blobs_filter vents
            if not blobs_filter:
                extend_events_ana_data(events_dict, event_id, num_event_tracks,
                                       track0_E, track0_voxels, track0_length,
                                       track1_E, track1_voxels, track1_length,
                                       track2_E, track2_voxels,  track2_length,
                                       tracks_filter, blob1_E = blob1_E,
                                       blob2_E = blob2_E, blobs_filter = blobs_filter)

            # Only for those events passing the blobs filter:
            else:
                # Getting the total event smeared energy
                event_smE = events_df.loc[event_id].smE

                # Applying the ROI filter
                roi_filter = ((event_smE >= roi_Emin) & (event_smE <= roi_Emax))

                # Verbosing
                logger.info('  Event energy: {:6.1f} keV  -->  ROI filter: {}'
                            .format(event_smE / units.keV, roi_filter))

                # Storing all the events (as this is the last filter)
                extend_events_ana_data(events_dict, event_id, num_event_tracks,
                                       track0_E, track0_voxels, track0_length,
                                       track1_E, track1_voxels, track1_length,
                                       track2_E, track2_voxels, track2_length,
                                       tracks_filter, blob1_E = blob1_E,
                                       blob2_E = blob2_E, blobs_filter = blobs_filter,
                                       roi_filter = roi_filter)


    ### STORING DATA
    # Storing events and voxels dataframes
    print('\n* Storing data in the output file ...\n  {}\n'.format(file_out))
    store_events_ana_data(file_out, ana_group_name, events_df, events_dict)
    store_voxels_ana_data(file_out, ana_group_name, voxels_df, voxels_dict)

    # Storing event counters as attributes
    tracks_filter_events, blobs_filter_events, roi_filter_events = \
        store_events_ana_counters(oFile, ana_group_name, events_df)


    ### Ending ...
    oFile.close()
    print('* Analysis done !!\n')

    # Printing analysis numbers
    with tb.open_file(files_in[0], mode='r') as iFile:
        simulated_events  = iFile.get_node_attr(reco_group_name, 'simulated_events')
        stored_events     = iFile.get_node_attr(reco_group_name, 'stored_events')
        smE_filter_events = iFile.get_node_attr(reco_group_name, 'smE_filter_events')
        fid_filter_events = iFile.get_node_attr(reco_group_name, 'fid_filter_events')

    print('''* Event counters:
  Simulated events:     {0:9}
  Stored events:        {1:9}
  smE_filter events:    {2:9}
  fid_filter events:    {3:9}
  tracks_filter events: {4:9}
  blobs_filter events:  {5:9}
  roi_filter events:    {6:9}\n'''
    .format(simulated_events, stored_events, smE_filter_events,
            fid_filter_events, tracks_filter_events, blobs_filter_events,
            roi_filter_events))
Exemple #5
0
def fanal_reco(
    det_name,  # Detector name: 'new', 'next100', 'next500'
    event_type,  # Event type: 'bb0nu', 'Tl208', 'Bi214'
    fwhm,  # FWHM at Qbb
    e_min,  # Minimum smeared energy for energy filtering
    e_max,  # Maximum smeared energy for energy filtering
    spatial_def,  # Spatial definition: 'low', 'high'
    veto_width,  # Veto width for fiducial filtering
    min_veto_e,  # Minimum energy in veto for fiducial filtering
    files_in,  # Input files
    event_range,  # Range of events to analyze: all, ... ??
    file_out,  # Output file
    compression,  # Compression of output file: 'ZLIB1', 'ZLIB4',
    # 'ZLIB5', 'ZLIB9', 'BLOSC5', 'BLZ4HC5'
    verbosity_level):

    ### LOGGER
    logger = get_logger('FanalReco', verbosity_level)

    ### DETECTOR NAME & its ACTIVE dimensions
    det_name = getattr(DetName, det_name)
    ACTIVE_dimensions = get_active_size(det_name)

    ### RECONSTRUCTION DATA
    # Smearing energy settings
    fwhm_Qbb = fwhm * Qbb
    sigma_Qbb = fwhm_Qbb / 2.355
    assert e_max > e_min, 'SmE_filter settings not valid. e_max must be higher than e_min.'

    # Spatial definition
    spatial_def = getattr(SpatialDef, spatial_def)

    # Voxel size
    voxel_size = get_voxel_size(spatial_def)

    # Fiducial limits
    fid_dimensions = get_fiducial_size(ACTIVE_dimensions, veto_width)

    ### PRINTING GENERAL INFO
    print(
        '\n***********************************************************************************'
    )
    print('***** Detector: {}'.format(det_name.name))
    print('***** Reconstructing {} events'.format(event_type))
    print('***** Energy Resolution: {:.2f}% FWFM at Qbb'.format(fwhm /
                                                                units.perCent))
    print('***** Spatial definition: {}'.format(spatial_def.name))
    print(
        '***********************************************************************************\n'
    )

    print(
        '* Detector-Active dimensions [mm]:  Zmin: {:7.1f}   Zmax: {:7.1f}   Rmax: {:7.1f}'
        .format(ACTIVE_dimensions.z_min, ACTIVE_dimensions.z_max,
                ACTIVE_dimensions.rad))
    print(
        '         ... fiducial limits [mm]:  Zmin: {:7.1f}   Zmax: {:7.1f}   Rmax: {:7.1f}\n'
        .format(fid_dimensions.z_min, fid_dimensions.z_max,
                fid_dimensions.rad))
    print('* Sigma at Qbb: {:.3f} keV.\n'.format(sigma_Qbb / units.keV))
    print('* Voxel_size: {} mm.\n'.format(voxel_size))

    print('* {0} {1} input files:'.format(len(files_in), event_type))
    for iFileName in files_in:
        print(' ', iFileName)

    ### OUTPUT FILE, ITS GROUPS & ATTRIBUTES
    # Output file
    oFile = tb.open_file(file_out, 'w', filters=tbl_filters(compression))

    # Reco group Name
    reco_group_name = get_reco_group_name(fwhm / units.perCent, spatial_def)
    oFile.create_group('/', 'FANALIC')
    oFile.create_group('/FANALIC', reco_group_name[9:])

    print('\n* Output file name:', file_out)
    print('  Reco group name:  {}\n'.format(reco_group_name))

    # Attributes
    oFile.set_node_attr(reco_group_name, 'input_sim_files', files_in)
    oFile.set_node_attr(reco_group_name, 'event_type', event_type)
    oFile.set_node_attr(reco_group_name, 'energy_resolution',
                        fwhm / units.perCent)
    oFile.set_node_attr(reco_group_name, 'smE_filter_Emin', e_min)
    oFile.set_node_attr(reco_group_name, 'smE_filter_Emax', e_max)
    oFile.set_node_attr(reco_group_name, 'fiducial_filter_VetoWidth',
                        veto_width)
    oFile.set_node_attr(reco_group_name, 'fiducial_filter_MinVetoE',
                        min_veto_e)

    ### DATA TO STORE
    # Event counters
    simulated_events = 0
    stored_events = 0
    analyzed_events = 0

    # Dictionaries for events & voxels data
    events_dict = get_events_reco_dict()
    voxels_dict = get_voxels_reco_dict()

    ### RECONSTRUCTION PROCEDURE
    # Looping through all the input files
    for iFileName in files_in:
        # Updating simulated and stored event counters
        configuration_df = pd.read_hdf(iFileName,
                                       '/MC/configuration',
                                       mode='r')
        simulated_events += int(configuration_df[configuration_df.param_key ==
                                                 'num_events'].param_value)
        stored_events += int(configuration_df[configuration_df.param_key ==
                                              'saved_events'].param_value)

        with tb.open_file(iFileName, mode='r') as iFile:
            file_event_numbers = iFile.root.MC.extents.cols.evt_number
            print('* Processing {0}  ({1} events) ...'.format(
                iFileName, len(file_event_numbers)))

            # Loading into memory all the particles & hits in the file
            file_mcParts = load_mcparticles(iFileName)
            file_mcHits = load_mchits(iFileName)

            # Looping through all the events in the file
            for event_number in file_event_numbers:

                # Updating counter of analyzed events
                analyzed_events += 1
                #if not int(str(analyzed_events)[-int(math.log10(analyzed_events)):]):
                #    print('* Num analyzed events: {}'.format(analyzed_events))

                # Verbosing
                logger.info(
                    'Reconstructing event Id: {0} ...'.format(event_number))

                # Getting mcParts of the event, using the event_number as the key
                event_mcParts = file_mcParts[event_number]
                num_parts = len(event_mcParts)

                # Getting mcHits of the event, using the event_number as the key
                event_mcHits = file_mcHits[event_number]
                active_mcHits = [
                    hit for hit in event_mcHits if hit.label == 'ACTIVE'
                ]
                num_hits = len(active_mcHits)

                # The event mc energy is the sum of the energy of all the hits
                event_mcE = sum([hit.E for hit in active_mcHits])

                # Smearing the event energy
                event_smE = smear_evt_energy(event_mcE, sigma_Qbb, Qbb)

                # Applying the smE filter
                event_smE_filter = (e_min <= event_smE <= e_max)

                # Verbosing
                logger.info(
                    '  Num mcHits: {0:3}   mcE: {1:.1f} keV   smE: {2:.1f} keV   smE_filter: {3}'
                    .format(num_hits, event_mcE / units.keV,
                            event_smE / units.keV, event_smE_filter))

                # For those events NOT passing the smE filter:
                # Storing data of NON smE_filter vents
                if not event_smE_filter:
                    extend_events_reco_data(events_dict,
                                            event_number,
                                            evt_num_MCparts=num_parts,
                                            evt_num_MChits=num_hits,
                                            evt_mcE=event_mcE,
                                            evt_smE=event_smE,
                                            evt_smE_filter=event_smE_filter)

                # Only for those events passing the smE filter:
                else:
                    # Smearing hit energies
                    hits_smE = smear_hit_energies(active_mcHits,
                                                  event_smE / event_mcE)

                    # Translating hit positions
                    hits_transPositions = translate_hit_positions(
                        active_mcHits, DRIFT_VELOCITY)

                    # Creating the smHits with the smeared energies and translated positions
                    active_smHits = []
                    for i in range(num_hits):
                        smHit = MCHit(hits_transPositions[i],
                                      active_mcHits[i].time, hits_smE[i],
                                      'ACTIVE')
                        active_smHits.append(smHit)

                    # Filtering hits outside the ACTIVE region (due to translation)
                    active_smHits = [hit for hit in active_smHits \
                                     if hit.Z < ACTIVE_dimensions.z_max]

                    # Voxelizing using the active_smHits ...
                    event_voxels = voxelize_hits(active_smHits,
                                                 voxel_size,
                                                 strict_voxel_size=True)
                    eff_voxel_size = event_voxels[0].size

                    # Storing voxels info
                    for voxel in event_voxels:
                        extend_voxels_reco_data(voxels_dict, event_number,
                                                voxel)

                    # Check fiduciality
                    voxels_minZ, voxels_maxZ, voxels_maxRad, veto_energy, fiducial_filter = \
                        check_event_fiduciality(event_voxels, fid_dimensions, min_veto_e)

                    # Storing data of NON smE_filter vents
                    extend_events_reco_data(events_dict,
                                            event_number,
                                            evt_num_MCparts=num_parts,
                                            evt_num_MChits=num_hits,
                                            evt_mcE=event_mcE,
                                            evt_smE=event_smE,
                                            evt_smE_filter=event_smE_filter,
                                            evt_num_voxels=len(event_voxels),
                                            evt_voxel_sizeX=eff_voxel_size[0],
                                            evt_voxel_sizeY=eff_voxel_size[1],
                                            evt_voxel_sizeZ=eff_voxel_size[2],
                                            evt_voxels_minZ=voxels_minZ,
                                            evt_voxels_maxZ=voxels_maxZ,
                                            evt_voxels_maxRad=voxels_maxRad,
                                            evt_veto_energy=veto_energy,
                                            evt_fid_filter=fiducial_filter)

                    # Verbosing
                    logger.info(
                        '  NumVoxels: {:3}   minZ: {:.1f} mm   maxZ: {:.1f} mm   maxR: {:.1f} mm   veto_E: {:.1f} keV   fid_filter: {}'
                        .format(len(event_voxels), voxels_minZ, voxels_maxZ,
                                voxels_maxRad, veto_energy / units.keV,
                                fiducial_filter))
                    for voxel in event_voxels:
                        logger.debug(
                            '    Voxel pos: ({:5.1f}, {:5.1f}, {:5.1f}) mm   E: {:5.1f} keV'
                            .format(voxel.X / units.mm, voxel.Y / units.mm,
                                    voxel.Z / units.mm, voxel.E / units.keV))

    ### STORING DATA
    # Storing events and voxels dataframes
    print('\n* Storing data in the output file ...\n  {}\n'.format(file_out))
    store_events_reco_data(file_out, reco_group_name, events_dict)
    store_voxels_reco_data(file_out, reco_group_name, voxels_dict)

    # Storing event counters as attributes
    smE_filter_events = sum(events_dict['smE_filter'])
    fid_filter_events = sum(events_dict['fid_filter'])
    store_events_reco_counters(oFile, reco_group_name, simulated_events,
                               stored_events, smE_filter_events,
                               fid_filter_events)

    oFile.close()
    print('* Reconstruction done !!\n')

    # Printing reconstruction numbers
    print('* Event counters ...')
    print('''  Simulated events:  {0:9}
  Stored events:     {1:9}
  smE_filter events: {2:9}
  fid_filter events: {3:9}\n'''.format(simulated_events, stored_events,
                                       smE_filter_events, fid_filter_events))