예제 #1
0
def get_kr_pos_rec(tracking_Q   : pd.Series,
                   energy_s2    : pd.DataFrame,
                   tracking_map : pd.DataFrame
                  )            -> XYZ :
    """
    Reconstruct XY position from barycenter around sensor with max charge.
    Reconstruct  Z position from s2 starting time * drift_velocity
    """
    hot_id  = tracking_Q.idxmax()
    hot_sns = tracking_map.loc[hot_id]
    hot_q   = tracking_Q.loc[hot_id]

    if np.isnan(hot_sns.id_left): left_pos, left_q = XYZ(0., 0., 0.), 0.
    else: left_pos, left_q = get_pos_q(hot_sns.id_left, tracking_map, tracking_Q)

    if np.isnan(hot_sns.id_right): right_pos, right_q = XYZ(0., 0., 0.), 0.
    else: right_pos, right_q = get_pos_q(hot_sns.id_right, tracking_map, tracking_Q)

    if np.isnan(hot_sns.id_up): up_pos, up_q = XYZ(0., 0., 0.), 0.
    else: up_pos, up_q = get_pos_q(hot_sns.id_up, tracking_map, tracking_Q)

    if np.isnan(hot_sns.id_down): down_pos, down_q = XYZ(0., 0., 0.), 0.
    else: down_pos, down_q = get_pos_q(hot_sns.id_down, tracking_map, tracking_Q)

    # Reconstructing position
    x_rec = get_barycenter(np.array([hot_sns.x, left_pos.x, right_pos.x]),
                           np.array([hot_q    , left_q    , right_q]))

    y_rec = get_barycenter(np.array([hot_sns.y, up_pos.y, down_pos.y]),
                           np.array([hot_q    , up_q    , down_q]))

    z_rec = energy_s2.time.min() * DRIFT_VELOCITY

    return XYZ(x_rec, y_rec, z_rec)
예제 #2
0
def get_and_store_blobs(track: Track, graph: nx.Graph,
                        blob_rad: float) -> Tuple[XYZ, XYZ]:
    """
    Get extremes and blobs info from the nx.Graph and stores it in the Track
    Returns the blob positions ordered by energy
    """
    distances = dict(
        nx.all_pairs_dijkstra_path_length(graph, weight='distance'))
    ext1, ext2, length = find_extrema_and_length_from_dict(distances)

    # Getting blobs info
    blob1_energy, blob1_num_voxels, blob2_energy, blob2_num_voxels, ovlp_energy = \
        blobs_info(distances, (ext1, ext2), blob_rad)

    # Storing ordered information into the Track
    track.length = length
    track.ovlp_energy = ovlp_energy

    if blob1_energy >= blob2_energy:
        track.blob1_x, track.blob1_y, track.blob1_z = ext1[0], ext1[1], ext1[2]
        track.blob2_x, track.blob2_y, track.blob2_z = ext2[0], ext2[1], ext2[2]
        track.blob1_energy = blob1_energy
        track.blob2_energy = blob2_energy
        track.blob1_num_hits = blob1_num_voxels
        track.blob2_num_hits = blob2_num_voxels
    else:
        track.blob1_x, track.blob1_y, track.blob1_z = ext2[0], ext2[1], ext2[2]
        track.blob2_x, track.blob2_y, track.blob2_z = ext1[0], ext1[1], ext1[2]
        track.blob1_energy = blob2_energy
        track.blob2_energy = blob1_energy
        track.blob1_num_hits = blob2_num_voxels
        track.blob2_num_hits = blob1_num_voxels

    return XYZ(track.blob1_x, track.blob1_y, track.blob1_z), \
           XYZ(track.blob2_x, track.blob2_y, track.blob2_z)
예제 #3
0
def order_true_extrema(ext1_pos  : XYZ,
                       ext2_pos  : XYZ,
                       blob1_pos : XYZ,
                       blob2_pos : XYZ
                      ) -> Tuple[XYZ, XYZ] :
    """
    Returns the true extrema ordered following the Blobs order.
    The order minimizing the total distances between true extrema
    and blobs positions is the one selected
    """
    ini_dist  = ext1_pos.distance(blob1_pos) + ext2_pos.distance(blob2_pos)
    swap_dist = ext2_pos.distance(blob1_pos) + ext1_pos.distance(blob2_pos)

    if (ini_dist <= swap_dist): return (ext1_pos, ext2_pos)
    else                      : return (ext2_pos, ext1_pos)
예제 #4
0
def voxelize_hits2(mcHits: pd.DataFrame,
                   voxel_size: XYZ,
                   baryc: bool = True) -> Tuple[pd.DataFrame, XYZ]:
    """
    Takes an mcHits DF from nexus with fields (x,y,z,energy) and
    voxelizes the data in cubic voxels of size 'bin_size' and returns a
    DataFrame with voxels and the effective voxel size.
    """
    def voxelize_hits_bc(df: pd.DataFrame) -> pd.Series:
        """
        Computes the barycenters in x,y,z
        """
        def barycenter(df, var, etot):
            return np.sum([np.dot(a,b)\
                           for a, b  in zip(df[var] , df.energy)]) / etot

        d = {}
        etot = df['energy'].sum()
        d['x'] = barycenter(df, 'x', etot)
        d['y'] = barycenter(df, 'y', etot)
        d['z'] = barycenter(df, 'z', etot)
        d['energy'] = etot
        return pd.Series(d)

    def voxelize_hits_mean(df: pd.DataFrame) -> pd.Series:
        """
        Compute the averages in x, y, z
        """
        d = {}
        d['x'] = df['x'].mean()
        d['y'] = df['y'].mean()
        d['z'] = df['z'].mean()
        d['energy'] = df['energy'].sum()
        return pd.Series(d)

    df = mcHits.copy()
    (xbins, ybins, zbins), eff_sizes = \
        bin_data_with_equal_bin_size([df.x, df.y, df.z], voxel_size)
    num_voxels = len(xbins) * len(ybins) * len(zbins)
    if (num_voxels >= 1e+6):
        print(f"*** Caution: Number of voxels: {num_voxels} is too high.")

    df['x_bins'] = pd.cut(df['x'], bins=xbins, labels=range(len(xbins) - 1))
    df['y_bins'] = pd.cut(df['y'], bins=ybins, labels=range(len(ybins) - 1))
    df['z_bins'] = pd.cut(df['z'], bins=zbins, labels=range(len(zbins) - 1))

    if baryc:
        vhits = df.groupby(['x_bins','y_bins','z_bins']) \
                  .apply(voxelize_hits_bc).dropna().reset_index(drop=True)
    else:
        vhits = df.groupby(['x_bins','y_bins','z_bins']) \
                  .apply(voxelize_hits_mean).dropna().reset_index(drop=True)

    return vhits, XYZ.from_array(eff_sizes)
예제 #5
0
def get_true_extrema(mcParticles : pd.DataFrame,
                     event_type  : str
                    ) -> Tuple[XYZ, XYZ] :
    """
    Returns the true extrema got from MC particles
    """

    # If event_type is bb decay, true extrema correspond to the final positions
    # of the 2 primary particles (those with ID: 1 and 2)
    if 'bb' in event_type:
        ini_part1 = mcParticles.loc[1]
        ini_part2 = mcParticles.loc[2]
        return (XYZ(ini_part1.final_x, ini_part1.final_y, ini_part1.final_z),
                XYZ(ini_part2.final_x, ini_part2.final_y, ini_part2.final_z))

    # If event_type is a single e-, true extrema correspond to the initial
    # and final positions off the initial particle
    if 'e-' in event_type:
        ini_part = mcParticles.loc[1]
        return (XYZ(ini_part.initial_x, ini_part.initial_y, ini_part.initial_z),
                XYZ(ini_part.final_x  , ini_part.final_y  , ini_part.final_z))

    # If event type of any other kind (basically any real background),
    # true extrema are set to initial and final positions of the particle
    # with highest length
    longest_part = mcParticles.iloc[mcParticles.length.argmax()]
    return (XYZ(longest_part.initial_x, longest_part.initial_y, longest_part.initial_z),
            XYZ(longest_part.final_x  , longest_part.final_y  , longest_part.final_z))
예제 #6
0
 def pos_rec(self):
     return XYZ(self.x_rec, self.y_rec, self.z_rec)
예제 #7
0
 def pos_true(self):
     return XYZ(self.x_true, self.y_true, self.z_true)
예제 #8
0
파일: voxels.py 프로젝트: next-exp/FANAL
 def position(self):
     return XYZ(self.x, self.y, self.z)
예제 #9
0
def get_kr_pos_true(mcHits : pd.DataFrame) -> XYZ :
    """
    True position is the mean pos of all the hits
    """
    return XYZ(mcHits.x.mean(), mcHits.y.mean(), mcHits.z.mean())
예제 #10
0
def analyze_bb_event_ic(
        detector: Detector, event_id: int, params: BBAnalysisParams,
        fiducial_checker: Callable, event_mcParts: pd.DataFrame,
        event_mcHits: pd.DataFrame) -> Tuple[Event, TrackList, VoxelList]:
    """
    It assess the global acceptance factor after fiducial, topology and ROI cuts
    based on the paolina functions implemented into IC.
    """
    # Data to be filled
    event_data = Event()
    tracks_data = TrackList()
    voxels_data = VoxelList()

    # Storing basic MC data
    event_data.event_id = event_id
    event_data.num_mcParts = len(event_mcParts)
    event_data.num_mcHits = len(event_mcHits)

    logger.info(f"Num mcParticles: {event_data.num_mcParts:3}   " + \
                f"Num mcHits: {event_data.num_mcHits:3}   ")

    # Processing MC data
    event_data.mc_energy, event_data.mc_filter = \
        check_mc_data(event_mcHits, params.buffer_Eth, params.e_min, params.e_max)
    if not event_data.mc_filter: return event_data, tracks_data, voxels_data

    ### Continue analysis of events passing the mc_filter ###
    # Reconstruct hits
    active_mcHits = event_mcHits[event_mcHits.label == 'ACTIVE']
    recons_hits = reconstruct_hits(detector, active_mcHits,
                                   event_data.mc_energy, params.fwhm,
                                   params.trans_diff, params.long_diff)

    # Event smeared energy
    event_data.sm_energy = recons_hits.energy.sum()
    event_data.energy_filter = (params.e_min <= event_data.sm_energy <=
                                params.e_max)
    logger.info(f"smE: {event_data.sm_energy/units.keV:.1f} keV   " + \
                f"ENERGY filter: {event_data.energy_filter}")
    if not event_data.energy_filter:
        return event_data, tracks_data, voxels_data

    ### Continue analysis of events passing the energy_filter ###
    # Creating the IChits from reconstructed hits
    ic_hits = recons_hits.apply(lambda hit: \
        MCHit((hit.x, hit.y, hit.z), hit.time, hit.energy, 'ACTIVE'), axis=1).tolist()

    # Voxelizing using the ic_hits ...
    ic_voxels = voxelize_hits(
        ic_hits,
        [params.voxel_size_x, params.voxel_size_y, params.voxel_size_z],
        params.strict_voxel_size)

    # Cleaning voxels with energy < voxel_Eth
    ic_voxels = clean_voxels(ic_voxels, params.voxel_Eth)

    event_data.num_voxels = len(ic_voxels)
    eff_voxel_size = ic_voxels[0].size
    event_data.voxel_size_x = eff_voxel_size[0]
    event_data.voxel_size_y = eff_voxel_size[1]
    event_data.voxel_size_z = eff_voxel_size[2]
    logger.info(
        f"Num Voxels: {event_data.num_voxels:3}  of size: {eff_voxel_size} mm")

    # Check fiduciality
    event_data.veto_energy, event_data.fiduc_filter = \
        check_event_fiduciality(fiducial_checker, ic_voxels, params.veto_Eth)
    logger.info(f"Veto_E: {event_data.veto_energy/units.keV:.1f} keV   " + \
                f"FIDUC filter: {event_data.fiduc_filter}")

    if not event_data.fiduc_filter:
        # Storing voxels without track-id info
        for voxel_id in range(len(ic_voxels)):
            voxels_data.add(
                Voxel.from_icVoxel(event_id, -1, voxel_id,
                                   ic_voxels[voxel_id]))
        logger.debug(voxels_data)
        return event_data, tracks_data, voxels_data

    ### Continue analysis of events passing the fiduc_filter ###
    # Make tracks
    ic_tracks = make_track_graphs(ic_voxels)

    # Storing tracks from ic_tracks
    for track_id in range(len(ic_tracks)):
        ic_track = ic_tracks[track_id]
        tracks_data.add(Track.from_icTrack(event_id, track_id, ic_track))

        # Storing voxels from ic_voxels
        ic_voxels = list(ic_track.nodes())
        for voxel_id in range(len(ic_voxels)):
            voxels_data.add(
                Voxel.from_icVoxel(event_id, track_id, voxel_id,
                                   ic_voxels[voxel_id]))

    logger.debug(voxels_data)

    event_data.num_tracks = tracks_data.len()

    event_data.track_filter = (
        (event_data.num_tracks > 0) &
        (event_data.num_tracks <= params.max_num_tracks))

    logger.info(f"Num tracks: {event_data.num_tracks:3}  ->" + \
                f"  TRACK filter: {event_data.track_filter}")

    if not event_data.track_filter: return event_data, tracks_data, voxels_data

    ### Continue analysis of events passing the track_filter ###
    the_track = tracks_data.tracks[0]

    # Getting & Storing Blobs info
    blob1_energy, blob2_energy, blob1_hits, blob2_hits, blob1_pos, blob2_pos = \
        blob_energies_hits_and_centres(ic_tracks[0], params.blob_radius)
    blob1_pos, blob2_pos = XYZ.from_array(blob1_pos), XYZ.from_array(blob2_pos)

    the_track.blob1_energy, the_track.blob1_num_hits = blob1_energy, len(
        blob1_hits)
    the_track.blob1_x, the_track.blob1_y, the_track.blob1_z = \
        blob1_pos.x, blob1_pos.y, blob1_pos.z
    the_track.blob2_energy, the_track.blob2_num_hits = blob2_energy, len(
        blob2_hits)
    the_track.blob2_x, the_track.blob2_y, the_track.blob2_z = \
        blob2_pos.x, blob2_pos.y, blob2_pos.z

    the_track.ovlp_energy = \
        float(sum(hit.E for hit in set(blob1_hits).intersection(set(blob2_hits))))

    # Getting & Storing True extrema info
    ext1, ext2 = get_true_extrema(event_mcParts, params.event_type)
    ext1, ext2 = order_true_extrema(ext1, ext2, blob1_pos, blob2_pos)

    the_track.t_ext1_x, the_track.t_ext1_y, the_track.t_ext1_z = ext1.x, ext1.y, ext1.z
    the_track.t_ext2_x, the_track.t_ext2_y, the_track.t_ext2_z = ext2.x, ext2.y, ext2.z

    # Storing Track info in event data
    event_data.track_length = the_track.length
    event_data.blob1_energy, event_data.blob2_energy = blob1_energy, blob2_energy

    logger.info(tracks_data)

    # Applying the blob filter
    event_data.blob_filter = ((event_data.blob2_energy > params.blob_Eth) &
                              (the_track.ovlp_energy == 0.))

    logger.info(f"Blob 1 energy: {event_data.blob1_energy/units.keV:4.1f} keV " + \
                f"  Blob 2 energy: {event_data.blob2_energy/units.keV:4.1f} keV"  + \
                f"  Overlap: {the_track.ovlp_energy/units.keV:4.1f} keV"  + \
                f"  ->  BLOB filter: {event_data.blob_filter}")

    if not event_data.blob_filter: return event_data, tracks_data, voxels_data

    ### Continue analysis of events passing the blob_filter ###
    # Applying the ROI filter
    event_data.roi_filter = ((event_data.sm_energy >= params.roi_Emin) &
                             (event_data.sm_energy <= params.roi_Emax))

    logger.info(f"Event energy: {event_data.sm_energy/units.keV:6.1f} keV" + \
                f"  ->  ROI filter: {event_data.roi_filter}")

    return event_data, tracks_data, voxels_data
예제 #11
0
def analyze_bb_event_2(
        detector: Detector, event_id: int, params: BBAnalysisParams,
        fiducial_checker: Callable, event_mcParts: pd.DataFrame,
        event_mcHits: pd.DataFrame) -> Tuple[Event, TrackList, VoxelList]:
    """
    It assess the global acceptance factor after fiducial, topology and ROI cuts
    based on the paolina2 functions implemented into FANAL.
    Main differences respect to paolina_ic are:
    * Voxel position from the hits contained, not the centre.
    * Blob positions are voxels at the track extrema.
    * No need of IC event data model
    """
    # Data to be filled
    event_data = Event()
    tracks_data = TrackList()
    voxels_data = VoxelList()

    # Storing basic MC data
    event_data.event_id = event_id
    event_data.num_mcParts = len(event_mcParts)
    event_data.num_mcHits = len(event_mcHits)

    logger.info(f"Num mcParticles: {event_data.num_mcParts:3}   " + \
                f"Num mcHits: {event_data.num_mcHits:3}   ")

    # Processing MC data
    event_data.mc_energy, event_data.mc_filter = \
        check_mc_data(event_mcHits, params.buffer_Eth, params.e_min, params.e_max)
    if not event_data.mc_filter: return event_data, tracks_data, voxels_data

    ### Continue analysis of events passing the mc_filter ###
    # Reconstruct hits
    active_mcHits = event_mcHits[event_mcHits.label == 'ACTIVE']
    recons_hits = reconstruct_hits(detector, active_mcHits,
                                   event_data.mc_energy, params.fwhm,
                                   params.trans_diff, params.long_diff)

    # Event smeared energy
    event_data.sm_energy = recons_hits.energy.sum()
    event_data.energy_filter = (params.e_min <= event_data.sm_energy <=
                                params.e_max)
    logger.info(f"smE: {event_data.sm_energy/units.keV:.1f} keV   " + \
                f"ENERGY filter: {event_data.energy_filter}")
    if not event_data.energy_filter:
        return event_data, tracks_data, voxels_data

    ### Continue analysis of events passing the energy_filter ###
    # Add an analysis parameter to use barycenter or not
    voxel_size = XYZ(params.voxel_size_x, params.voxel_size_y,
                     params.voxel_size_z)
    tmp_voxels, eff_voxel_size = voxelize_hits2(recons_hits,
                                                voxel_size,
                                                baryc=params.barycenter)

    # Cleaning voxels with energy < voxel_Eth
    # TODO voxels = clean_voxels_df(voxels, params.voxel_Eth)

    event_data.num_voxels = len(tmp_voxels)
    event_data.voxel_size_x = eff_voxel_size.x
    event_data.voxel_size_y = eff_voxel_size.y
    event_data.voxel_size_z = eff_voxel_size.z
    logger.info(
        f"Num Voxels: {event_data.num_voxels:3}  of size: {voxel_size} mm")

    # Check fiduciality
    event_data.veto_energy, event_data.fiduc_filter = \
        check_event_fiduciality_df(fiducial_checker, tmp_voxels, params.veto_Eth)
    logger.info(f"Veto_E: {event_data.veto_energy/units.keV:.1f} keV   " + \
                f"FIDUC filter: {event_data.fiduc_filter}")

    if not event_data.fiduc_filter:
        # Storing voxels without track-id info
        for voxel_id, voxel in tmp_voxels.iterrows():
            voxels_data.add(
                Voxel(event_id, -1, voxel_id, voxel.x, voxel.y, voxel.z,
                      voxel.energy))
        logger.debug(voxels_data)
        return event_data, tracks_data, voxels_data

    ### Continue analysis of events passing the fiduc_filter ###
    # Make tracks
    graphs = make_track_graphs2(tmp_voxels, contiguity=params.contiguity)

    # Storing tracks from graphs
    for graph_id in range(len(graphs)):
        graph = graphs[graph_id]
        tracks_data.add(Track.from_graph(event_id, graph_id, graph))

        # Storing voxels from ic_voxels
        nodes = list(graph.nodes())
        for node_id in range(len(nodes)):
            voxels_data.add(
                Voxel.from_node(event_id, graph_id, node_id, nodes[node_id]))

    logger.debug(voxels_data)

    event_data.num_tracks = tracks_data.len()

    event_data.track_filter = (
        (event_data.num_tracks > 0) &
        (event_data.num_tracks <= params.max_num_tracks))

    logger.info(f"Num tracks: {event_data.num_tracks:3}  ->" + \
                f"  TRACK filter: {event_data.track_filter}")

    if not event_data.track_filter: return event_data, tracks_data, voxels_data

    ### Continue analysis of events passing the track_filter ###
    the_track = tracks_data.tracks[0]

    # Getting & Storing Blobs info
    blob1_pos, blob2_pos = get_and_store_blobs(the_track, graphs[0],
                                               params.blob_radius)

    # Getting & Storing True extrema info
    t_ext1, t_ext2 = get_true_extrema(event_mcParts, params.event_type)
    t_ext1, t_ext2 = order_true_extrema(t_ext1, t_ext2, blob1_pos, blob2_pos)

    the_track.t_ext1_x, the_track.t_ext1_y, the_track.t_ext1_z = t_ext1.x, t_ext1.y, t_ext1.z
    the_track.t_ext2_x, the_track.t_ext2_y, the_track.t_ext2_z = t_ext2.x, t_ext2.y, t_ext2.z

    # Storing Track info in event data
    event_data.track_length = the_track.length
    event_data.blob1_energy = the_track.blob1_energy
    event_data.blob2_energy = the_track.blob2_energy

    logger.info(tracks_data)

    # Applying the blob filter
    event_data.blob_filter = ((event_data.blob2_energy > params.blob_Eth) &
                              (the_track.ovlp_energy == 0.))

    logger.info(f"Blob 1 energy: {event_data.blob1_energy/units.keV:4.1f} keV " + \
                f"  Blob 2 energy: {event_data.blob2_energy/units.keV:4.1f} keV"  + \
                f"  Overlap: {the_track.ovlp_energy/units.keV:4.1f} keV"  + \
                f"  ->  BLOB filter: {event_data.blob_filter}")

    if not event_data.blob_filter: return event_data, tracks_data, voxels_data

    ### Continue analysis of events passing the blob_filter ###
    # Applying the ROI filter
    event_data.roi_filter = ((event_data.sm_energy >= params.roi_Emin) &
                             (event_data.sm_energy <= params.roi_Emax))

    logger.info(f"Event energy: {event_data.sm_energy/units.keV:6.1f} keV" + \
                f"  ->  ROI filter: {event_data.roi_filter}")

    return event_data, tracks_data, voxels_data
예제 #12
0
def get_sensor_pos(sns_id: int, tracking_map: pd.DataFrame) -> XYZ:
    """
    It returns the position xy of a tracking sensor
    """
    sensor = tracking_map.loc[sns_id]
    return XYZ(sensor.x, sensor.y, 0.)