def get_current_soln(self, edge_weights):
        p_min = 0.001
        p_max = 1.
        segmentations = []
        node_labels = []
        n_lbl_offs = [0]
        for i in range(1, len(self.e_offs)):
            probs = edge_weights[self.e_offs[i-1]:self.e_offs[i]]
            costs = (p_max - p_min) * probs + p_min
            costs = (torch.log((1. - costs) / costs)).detach().cpu().numpy()
            _node_labels = elf.segmentation.multicut.multicut_decomposition(self.rags[i-1], costs, internal_solver='greedy-additive', n_threads=4)
            mc_seg = project_node_labels_to_pixels(self.rags[i-1], _node_labels).squeeze()
            mc_seg = torch.from_numpy(mc_seg.astype(np.long)).to(self.device)
            segmentations.append(mc_seg)
            node_labels.append(torch.from_numpy(_node_labels.astype(np.long)).to(self.device) + n_lbl_offs[i-1])
            n_lbl_offs.append(_node_labels.max())

        node_labels = torch.cat(node_labels)
        object_node_mask = (node_labels[None] == torch.unique(node_labels)[:, None]).long()
        object_nodes = object_node_mask * torch.arange(1, len(node_labels) + 1, device=self.device)[None]
        nodes_per_edge = (object_nodes[:, :, None, None] == (self.edge_ids[None, None] + 1)).long().sum(1).sum(1)
        object_edge_ind_critic = torch.nonzero(nodes_per_edge == 2)[:, 1]
        object_edges_actor = (nodes_per_edge >= 1).long()

        # G = nx.path_graph(4)
        # G.add_edges_from(self.edge_ids[:, object_edge_ind_critic].T.tolist())
        # nx.connected_components(G)
        # print("############", len(list(nx.connected_components(G))) + (object_node_mask.sum(1)==1).sum() - object_node_mask.shape[0])
        return torch.stack(segmentations, dim=0), object_edge_ind_critic, object_node_mask, object_edges_actor
Exemple #2
0
def segment_mc(pred, seg, delta):
    rag = feats.compute_rag(seg)
    edge_probs = embed.edge_probabilities_from_embeddings(
        pred, seg, rag, delta)
    edge_sizes = feats.compute_boundary_mean_and_length(rag, pred[0])[:, 1]
    costs = mc.transform_probabilities_to_costs(edge_probs,
                                                edge_sizes=edge_sizes)
    mc_seg = mc.multicut_kernighan_lin(rag, costs)
    mc_seg = feats.project_node_labels_to_pixels(rag, mc_seg)
    return mc_seg
Exemple #3
0
def multicut_from_probas(segmentation, edges, edge_weights):
    rag = compute_rag(segmentation)
    edge_dict = dict(zip(list(map(tuple, edges)), edge_weights))
    costs = np.empty(len(edge_weights))
    for i, neighbors in enumerate(rag.uvIds()):
        if tuple(neighbors) in edge_dict:
            costs[i] = edge_dict[tuple(neighbors)]
        else:
            costs[i] = edge_dict[(neighbors[1], neighbors[0])]
    costs = transform_probabilities_to_costs(costs)
    node_labels = multicut_kernighan_lin(rag, costs)

    return project_node_labels_to_pixels(rag, node_labels).squeeze()
def supervoxel_merging(mem, sv, beta=0.5, verbose=False):

    rag = feats.compute_rag(sv)
    costs = feats.compute_boundary_features(rag, mem)[:, 0]

    edge_sizes = feats.compute_boundary_mean_and_length(rag, mem)[:, 1]
    costs = mc.transform_probabilities_to_costs(costs,
                                                edge_sizes=edge_sizes,
                                                beta=beta)

    node_labels = mc.multicut_kernighan_lin(rag, costs)
    segmentation = feats.project_node_labels_to_pixels(rag, node_labels)

    return segmentation
def get_multicut_sln(rag, edge_weights):
    p_min = 1e-5
    p_max = 1.
    probs = edge_weights.copy()
    costs = (p_max - p_min) * probs + p_min
    costs = (np.log((1. - costs) / costs.clip(1e-10, 1)))
    node_labels = multicut_decomposition(rag,
                                         costs,
                                         internal_solver='kernighan-lin',
                                         n_threads=4)  # greedy-additive
    node_labels = vigra.analysis.relabelConsecutive(node_labels,
                                                    keep_zeros=False,
                                                    start_label=0)[0]
    return project_node_labels_to_pixels(rag, node_labels).squeeze()
Exemple #6
0
def segment_volume_lmc_from_seg(boundary_pmaps,
                                nuclei_seg,
                                threshold=0.4,
                                sigma=2.0,
                                sp_min_size=100):
    watershed = distance_transform_watershed(boundary_pmaps,
                                             threshold,
                                             sigma,
                                             min_size=sp_min_size)[0]

    # compute the region adjacency graph
    rag = compute_rag(watershed)

    # compute the edge costs
    features = compute_boundary_mean_and_length(rag, boundary_pmaps)
    costs, sizes = features[:, 0], features[:, 1]

    # transform the edge costs from [0, 1] to  [-inf, inf], which is
    # necessary for the multicut. This is done by intepreting the values
    # as probabilities for an edge being 'true' and then taking the negative log-likelihood.
    # in addition, we weight the costs by the size of the corresponding edge

    # we choose a boundary bias smaller than 0.5 in order to
    # decrease the degree of over segmentation
    boundary_bias = .6

    costs = transform_probabilities_to_costs(costs,
                                             edge_sizes=sizes,
                                             beta=boundary_bias)
    max_cost = np.abs(np.max(costs))
    lifted_uvs, lifted_costs = lifted_problem_from_segmentation(
        rag,
        watershed,
        nuclei_seg,
        overlap_threshold=0.2,
        graph_depth=4,
        same_segment_cost=5 * max_cost,
        different_segment_cost=-5 * max_cost)

    # solve the full lifted problem using the kernighan lin approximation introduced in
    # http://openaccess.thecvf.com/content_iccv_2015/html/Keuper_Efficient_Decomposition_of_ICCV_2015_paper.html
    node_labels = lmc.lifted_multicut_kernighan_lin(rag, costs, lifted_uvs,
                                                    lifted_costs)
    lifted_segmentation = project_node_labels_to_pixels(rag, node_labels)
    return lifted_segmentation
Exemple #7
0
    def get_current_soln(self, edge_weights):
        p_min = 0.001
        p_max = 1.
        segmentations = []
        for i in range(1, len(self.e_offs)):
            probs = edge_weights[self.e_offs[i - 1]:self.e_offs[i]]
            probs -= probs.min()
            probs /= probs.max()
            costs = (p_max - p_min) * probs + p_min
            costs = (torch.log((1. - costs) / costs)).detach().cpu().numpy()
            node_labels = elf.segmentation.multicut.multicut_decomposition(
                self.rags[i - 1],
                costs,
                internal_solver='greedy-additive',
                n_threads=4)
            mc_seg = project_node_labels_to_pixels(self.rags[i - 1],
                                                   node_labels).squeeze()

            mc_seg = torch.from_numpy(mc_seg.astype(np.long)).to(self.device)
            # mask = mc_seg[None] == torch.unique(mc_seg)[:, None, None]
            # mc_seg = (mask * (torch.arange(len(torch.unique(mc_seg)), device=mc_seg.device)[:, None, None] + 1)).sum(0) - 1

            segmentations.append(mc_seg)
        return torch.stack(segmentations, dim=0)
Exemple #8
0
affs = np.transpose(affs.cpu().numpy(), (1, 0, 2, 3))
gt_affs = np.transpose(gt_affs.cpu().numpy(), (1, 0, 2, 3))
seg = seg.cpu().numpy()
gt_seg = gt_seg.cpu().numpy()
boundary_input = np.mean(affs, axis=0)
gt_boundary_input = np.mean(gt_affs, axis=0)

rag = feats.compute_rag(seg)
# edges rag.uvIds() [[1, 2], ...]

costs = feats.compute_affinity_features(rag, affs, offsets)[:, 0]
gt_costs = calculate_gt_edge_costs(rag.uvIds(), seg.squeeze(),
                                   gt_seg.squeeze())

edge_sizes = feats.compute_boundary_mean_and_length(rag, boundary_input)[:, 1]
gt_edge_sizes = feats.compute_boundary_mean_and_length(rag,
                                                       gt_boundary_input)[:, 1]
costs = mc.transform_probabilities_to_costs(costs, edge_sizes=edge_sizes)
gt_costs = mc.transform_probabilities_to_costs(gt_costs, edge_sizes=edge_sizes)

node_labels = mc.multicut_kernighan_lin(rag, costs)
gt_node_labels = mc.multicut_kernighan_lin(rag, gt_costs)

segmentation = feats.project_node_labels_to_pixels(rag, node_labels)
gt_segmentation = feats.project_node_labels_to_pixels(rag, gt_node_labels)
plt.imshow(
    np.concatenate(
        (gt_segmentation.squeeze(), segmentation.squeeze(), seg.squeeze()),
        axis=1))
plt.show()
Exemple #9
0
def refine_seg(raw,
               seeds,
               restrict_to_seeds=True,
               restrict_to_bb=False,
               return_intermediates=False):
    pred = get_prediction(raw, cache=False)

    n_threads = 1
    # make watershed
    ws, _ = stacked_watershed(pred,
                              threshold=.5,
                              sigma_seeds=1.,
                              n_threads=n_threads)
    rag = compute_rag(ws, n_threads=n_threads)
    edge_feats = compute_boundary_mean_and_length(rag,
                                                  pred,
                                                  n_threads=n_threads)
    edge_feats, edge_sizes = edge_feats[:, 0], edge_feats[:, 1]
    z_edges = compute_z_edge_mask(rag, ws)
    edge_costs = compute_edge_costs(edge_feats,
                                    beta=.4,
                                    weighting_scheme='xyz',
                                    edge_sizes=edge_sizes,
                                    z_edge_mask=z_edges)

    # make seeds and map them to edges
    bb = tuple(
        slice(sh // 2 - ha // 2, sh // 2 + ha // 2)
        for sh, ha in zip(pred.shape, seeds.shape))

    seeds[seeds < 0] = 0
    seeds = vigra.analysis.labelVolumeWithBackground(seeds.astype('uint32'))
    seed_ids = np.unique(seeds)
    seed_mask = binary_erosion(seeds, iterations=2)

    seeds_new = seeds.copy()
    seeds_new[~seed_mask] = 0
    seed_ids_new = np.unique(seeds_new)
    for seed_id in seed_ids:
        if seed_id in seed_ids_new:
            continue
        seeds_new[seeds == seed_id] = seed_id

    seeds_full = np.zeros(pred.shape, dtype=seeds.dtype)
    seeds_full[bb] = seeds
    seeds = seeds_full

    seed_labels = compute_maximum_label_overlap(ws, seeds, ignore_zeros=True)

    edge_ids = rag.uvIds()
    labels_u = seed_labels[edge_ids[:, 0]]
    labels_v = seed_labels[edge_ids[:, 1]]

    seed_mask = np.logical_and(labels_u != 0, labels_v != 0)
    same_seed = np.logical_and(seed_mask, labels_u == labels_v)
    diff_seed = np.logical_and(seed_mask, labels_u != labels_v)

    max_att = edge_costs.max() + .1
    max_rep = edge_costs.min() - .1
    edge_costs[same_seed] = max_att
    edge_costs[diff_seed] = max_rep

    # run multicut
    node_labels = multicut_kernighan_lin(rag, edge_costs)
    if restrict_to_seeds:
        seed_nodes = np.unique(node_labels[seed_labels > 0])
        node_labels[~np.isin(node_labels, seed_nodes)] = 0
        vigra.analysis.relabelConsecutive(node_labels, out=node_labels)

    seg = project_node_labels_to_pixels(rag, node_labels, n_threads=n_threads)

    if restrict_to_bb:
        bb_mask = np.zeros(seg.shape, dtype='bool')
        bb_mask[bb] = 1
        seg[~bb_mask] = 0

    if return_intermediates:
        return pred, ws, seeds, seg
    else:
        return seg