Exemplo n.º 1
0
def group_points(ppn_pts, batch, label):
    """
    if there are multiple ppn points in a very similar location, return the average pos

    Parameters
    ----------
    ppn_pts: np.array
    batch: np.array
    label: np.array

    Returns
    -------
    np.array
    """
    ppn_pts_new = []
    batch_new = []
    label_new = []
    for b in np.unique(batch):
        bsel = batch == b
        ppn_pts_sel = ppn_pts[bsel]
        label_sel = label[bsel]
        clusts = dbscan_types(ppn_pts_sel,
                              label_sel,
                              epsilon=1.99,
                              minpts=1,
                              typemin=0,
                              typemax=5)
        for c in clusts:
            # append mean of points
            ppn_pts_new.append(np.mean(ppn_pts_sel[c], axis=0))
            # append batch
            batch_new.append(b)
            label_new.append(np.mean(label_sel[c]))

    return np.array(ppn_pts_new), np.array(batch_new), np.array(label_new)
Exemplo n.º 2
0
def parse_dbscan(data):
    """
    A function to create dbscan tensor
    Args:
        length 1 array of larcv::EventSparseTensor3D
    Return:
        voxels - numpy array(int32) with shape (N,3) - coordinates
        data   - numpy array(float32) with shape (N,1) - dbscan cluster. -1 if not assigned
    """
    np_voxels, np_types = parse_sparse3d_scn(data)
    # now run dbscan on data
    clusts = dbscan_types(np_voxels, np_types)
    # start with no clusters assigned.
    np_types.fill(-1)
    for i, c in enumerate(clusts):
        np_types[c] = i
    return np_voxels, np_types
Exemplo n.º 3
0
def uresnet_ppn_type_point_selector(data,
                                    out,
                                    score_threshold=0.5,
                                    type_threshold=100,
                                    entry=0,
                                    **kwargs):
    """
    Postprocessing of PPN points.
    Parameters
    ----------
    data - 5-types sparse tensor
    out - uresnet_ppn_type output
    Returns
    -------
    [x,y,z,bid,label] of ppn-predicted points
    """
    event_data = data  #.cpu().detach().numpy()
    points = out['points'][entry]  #.cpu().detach().numpy()
    mask = out['mask_ppn2'][entry]  #.cpu().detach().numpy()
    # predicted type labels
    # uresnet_predictions = torch.argmax(out['segmentation'][0], -1).cpu().detach().numpy()
    uresnet_predictions = np.argmax(out['segmentation'][entry], -1)
    scores = scipy.special.softmax(points[:, 3:5], axis=1)

    if 'ghost' in out:
        mask_ghost = np.argmax(out['ghost'][entry], axis=1) == 0
        event_data = event_data[mask_ghost]
        points = points[mask_ghost]
        mask = mask[mask_ghost]
        uresnet_predictions = uresnet_predictions[mask_ghost]
        scores = scores[mask_ghost]

    all_points = []
    all_batch = []
    all_labels = []
    batch_ids = event_data[:, 3]
    for b in np.unique(batch_ids):
        final_points = []
        final_scores = []
        final_labels = []
        batch_index = batch_ids == b
        mask = ((~(mask[batch_index] == 0)).any(
            axis=1)) & (scores[batch_index][:, 1] > score_threshold)
        num_classes = 5
        ppn_type_predictions = np.argmax(scipy.special.softmax(
            points[batch_index][mask][:, 5:], axis=1),
                                         axis=1)
        for c in range(num_classes):
            uresnet_points = uresnet_predictions[batch_index][mask] == c
            ppn_points = ppn_type_predictions == c
            if ppn_points.shape[0] > 0 and uresnet_points.shape[0] > 0:
                d = scipy.spatial.distance.cdist(
                    points[batch_index][mask][ppn_points][:, :3] +
                    event_data[batch_index][mask][ppn_points][:, :3] + 0.5,
                    event_data[batch_index][mask][uresnet_points][:, :3])
                ppn_mask = (d < type_threshold).any(axis=1)
                final_points.append(
                    points[batch_index][mask][ppn_points][ppn_mask][:, :3] +
                    0.5 +
                    event_data[batch_index][mask][ppn_points][ppn_mask][:, :3])
                final_scores.append(
                    scores[batch_index][mask][ppn_points][ppn_mask])
                final_labels.append(ppn_type_predictions[ppn_points][ppn_mask])
        final_points = np.concatenate(final_points, axis=0)
        final_scores = np.concatenate(final_scores, axis=0)
        final_labels = np.concatenate(final_labels, axis=0)
        clusts = dbscan_types(final_points,
                              final_labels,
                              epsilon=1.99,
                              minpts=1,
                              typemin=0,
                              typemax=5)
        for c in clusts:
            # append mean of points
            all_points.append(np.mean(final_points[c], axis=0))
            all_batch.append(b)
            all_labels.append(np.mean(final_labels[c]))

    return np.column_stack((all_points, all_batch, all_labels))