コード例 #1
0
 def get_size_map(label_image):
     node_sizes = np.bincount(label_image.flatten())
     # rag = nrag.gridRag(label_image)
     # _, node_features = nrag.accumulateMeanAndLength(rag, label_image.astype('float32'),
     #                                                 blockShape=[1, 100, 100],
     #                                                 numberOfThreads=8,
     #                                                 saveMemory=True)
     # nodeSizes = node_features[:, [1]]
     return ntools.mapFeaturesToLabelArray(label_image, node_sizes[:,None], nb_threads=6).squeeze()
コード例 #2
0
ファイル: watershed.py プロジェクト: weihuang527/GASP
 def get_size_map(label_image):
     rag = nrag.gridRag(label_image)
     _, node_features = nrag.accumulateMeanAndLength(
         rag,
         label_image.astype('float32'),
         blockShape=[1, 100, 100],
         numberOfThreads=8,
         saveMemory=True)
     nodeSizes = node_features[:, [1]]
     return ntools.mapFeaturesToLabelArray(
         label_image, nodeSizes, number_of_threads=6).squeeze()
コード例 #3
0
def map_features_to_label_array(label_array, features, ignore_label=-1,
                                fill_value=0.,number_of_threads=-1):
    """

    :param label_array:
    :param features:
    :param ignore_label: the label in label_array that should be ignored in the mapping
    :param fill_value: the fill value used in the mapped array to replace the ignore_label
    :return:
    """
    # TODO: deprecate and directly use veersion in nifty
    return ntools.mapFeaturesToLabelArray(label_array, features, ignore_label, fill_value, number_of_threads)
コード例 #4
0
    def run_GASP_from_superpixels(self,
                                  affinities,
                                  superpixel_segmentation,
                                  mask_used_edges=None):
        featurer = AccumulatorLongRangeAffs(
            self.offsets,
            offsets_weights=self.offsets_weights,
            used_offsets=self.used_offsets,
            verbose=self.verbose,
            n_threads=self.n_threads,
            invert_affinities=False,
            statistic='mean',
            offset_probabilities=self.offsets_probabilities,
            mask_used_edges=mask_used_edges,
            return_dict=True)

        # Compute graph and edge weights by accumulating over the affinities:
        featurer_outputs = featurer(affinities, superpixel_segmentation)
        graph = featurer_outputs['graph']
        edge_indicators = featurer_outputs['edge_indicators']
        edge_sizes = featurer_outputs['edge_sizes']
        is_local_edge = featurer_outputs['is_local_edge']

        # Optionally, use logarithmic weights and apply bias parameter
        if self.use_logarithmic_weights:
            log_costs = probs_to_costs(1 - edge_indicators,
                                       beta=self.beta_bias)
            signed_weights = log_costs
        else:
            signed_weights = edge_indicators - self.beta_bias

        # Run GASP:
        node_labels, runtime = \
            run_GASP(graph, signed_weights,
                     edge_sizes=edge_sizes,
                     is_mergeable_edge=is_local_edge,
                     verbose=self.verbose,
                     **self.run_GASP_kwargs)

        # Map node labels back to the original superpixel segmentation:
        final_segm = ntools.mapFeaturesToLabelArray(
            superpixel_segmentation,
            np.expand_dims(node_labels, axis=-1),
            nb_threads=self.n_threads,
            fill_value=-1.,
            ignore_label=-1,
        )[..., 0].astype(np.int64)

        # Increase by one, so ignore label becomes 0:
        final_segm += 1

        return final_segm, runtime
コード例 #5
0
    def run_GASP_from_superpixels(self,
                                  affinities,
                                  superpixel_segmentation,
                                  mask_used_edges=None,
                                  affinities_weights=None):
        # TODO: compute affiniteis_weights automatically from segmentation if needed
        # When I will implement the mask_edge, remeber to crop it depending on the used offsets
        assert mask_used_edges is None, "Edge mask cannot be used when starting from a segmentation."
        featurer = AccumulatorLongRangeAffs(
            self.offsets,
            offsets_weights=self.offsets_weights,
            used_offsets=self.used_offsets,
            verbose=self.verbose,
            n_threads=self.n_threads,
            invert_affinities=False,
            statistic='mean',
            offset_probabilities=self.offsets_probabilities,
            return_dict=True)

        # Compute graph and edge weights by accumulating over the affinities:
        featurer_outputs = featurer(affinities,
                                    superpixel_segmentation,
                                    affinities_weights=affinities_weights)
        graph = featurer_outputs['graph']
        edge_indicators = featurer_outputs['edge_indicators']
        edge_sizes = featurer_outputs['edge_sizes']
        is_local_edge = featurer_outputs['is_local_edge']

        # Optionally, use logarithmic weights and apply bias parameter
        log_costs = probs_to_costs(1 - edge_indicators, beta=self.beta_bias)
        if self.use_logarithmic_weights:
            signed_weights = log_costs
        else:
            signed_weights = edge_indicators - self.beta_bias

        # Run GASP:
        node_labels, runtime = \
            run_GASP(graph, signed_weights,
                     edge_sizes=edge_sizes,
                     is_mergeable_edge=is_local_edge,
                     verbose=self.verbose,
                     **self.run_GASP_kwargs)

        # Map node labels back to the original superpixel segmentation:
        final_segm = ntools.mapFeaturesToLabelArray(
            superpixel_segmentation,
            np.expand_dims(node_labels, axis=-1),
            nb_threads=self.n_threads,
            fill_value=-1.,
            ignore_label=-1,
        )[..., 0].astype(np.int64)

        # Increase by one, so ignore label becomes 0:
        final_segm += 1

        if self.return_extra_outputs:
            MC_energy = self.get_multicut_energy(graph, node_labels, log_costs)
            out_dict = {"multicut_energy": MC_energy, "runtime": runtime}
            return final_segm, out_dict
        else:
            return final_segm, runtime
コード例 #6
0
    def run_GASP_from_superpixels(self,
                                  affinities,
                                  superpixel_segmentation,
                                  foreground_mask=None,
                                  mask_used_edges=None,
                                  affinities_weights=None):
        # TODO: compute affiniteis_weights automatically from segmentation if needed
        # When I will implement the mask_edge, remeber to crop it depending on the used offsets
        assert mask_used_edges is None, "Edge mask cannot be used when starting from a segmentation."
        assert self.set_only_direct_neigh_as_mergeable, "Not implemented atm from superpixels"
        featurer = gasp_utils.AccumulatorLongRangeAffs(
            self.offsets,
            offsets_weights=self.offsets_weights,
            used_offsets=self.used_offsets,
            verbose=self.verbose,
            n_threads=self.n_threads,
            invert_affinities=False,
            statistic='mean',
            offset_probabilities=self.offsets_probabilities,
            return_dict=True)

        # Compute graph and edge weights by accumulating over the affinities:
        featurer_outputs = featurer(affinities,
                                    superpixel_segmentation,
                                    affinities_weights=affinities_weights)
        graph = featurer_outputs['graph']
        edge_indicators = featurer_outputs['edge_indicators']
        edge_sizes = featurer_outputs['edge_sizes']
        is_local_edge = featurer_outputs['is_local_edge']

        # Optionally, use logarithmic weights and apply bias parameter
        log_costs = compute_edge_costs(1 - edge_indicators,
                                       beta=self.beta_bias)
        if self.use_logarithmic_weights:
            signed_weights = log_costs
        else:
            signed_weights = edge_indicators - self.beta_bias

        # Run GASP:
        export_agglomeration_data = self.run_GASP_kwargs.get(
            "export_agglomeration_data", False)
        outputs = \
            run_GASP(graph, signed_weights,
                     edge_sizes=edge_sizes,
                     is_mergeable_edge=is_local_edge,
                     verbose=self.verbose,
                     **self.run_GASP_kwargs)

        if export_agglomeration_data:
            node_labels, runtime, exported_data = outputs
        else:
            exported_data = {}
            node_labels, runtime = outputs

        # Map node labels back to the original superpixel segmentation:
        final_segm = ntools.mapFeaturesToLabelArray(
            superpixel_segmentation,
            np.expand_dims(node_labels, axis=-1),
            nb_threads=self.n_threads,
            fill_value=-1.,
            ignore_label=-1,
        )[..., 0].astype(np.int64)

        # If there was a background label, reset it to zero:
        min_label = final_segm.min()
        if min_label < 0:
            assert min_label == -1

            # Move bacground label to 0, and map 0 segment (if any) to MAX_LABEL+1.
            # In this way, most of the final labels will stay consistent with the graph and given over-segmentation
            background_mask = final_segm == min_label
            zero_mask = final_segm == 0
            if np.any(zero_mask):
                max_label = final_segm.max()
                warnings.warn(
                    "Zero segment remapped to {} in final segmentation".format(
                        max_label + 1))
                final_segm[zero_mask] = max_label + 1
            final_segm[background_mask] = 0

        if self.return_extra_outputs:
            MC_energy = self.get_multicut_energy(graph, node_labels,
                                                 signed_weights, edge_sizes)
            out_dict = {
                "multicut_energy": MC_energy,
                "runtime": runtime,
                "graph": graph,
                "is_local_edge": is_local_edge,
                "edge_sizes": edge_sizes
            }
            if export_agglomeration_data:
                out_dict.update(exported_data)
            return final_segm, out_dict
        else:
            if export_agglomeration_data:
                warnings.warn(
                    "In order to export agglomeration data, also set the `return_extra_outputs` to True"
                )
            return final_segm, runtime
コード例 #7
0
    def run_GASP_from_pixels(self,
                             affinities,
                             mask_used_edges=None,
                             foreground_mask=None,
                             affinities_weights=None):
        assert affinities_weights is None, "Not yet implemented from pixels"
        assert affinities.shape[0] == len(self.offsets)
        offsets = self.offsets
        if self.used_offsets is not None:
            affinities = affinities[self.used_offsets]
            offsets = offsets[self.used_offsets]
            if mask_used_edges is not None:
                mask_used_edges = mask_used_edges[self.used_offsets]

        image_shape = affinities.shape[1:]

        # affinities = affinities - np.mean(affinities, axis=(1,2,3))[:,np.newaxis,np.newaxis, np.newaxis]
        # Check if I should use efficient implementation of the MWS:
        run_kwargs = self.run_GASP_kwargs
        export_agglomeration_data = run_kwargs.get("export_agglomeration_data",
                                                   False)
        # TODO: add implementation of single-linkage from pixels using affogato.segmentation.connected_components
        if run_kwargs.get("use_efficient_implementations", True) and\
           run_kwargs.get("linkage_criteria") in ['mutex_watershed', 'abs_max']:
            assert compute_mws_segmentation_from_affinities is not None,\
                "Efficient MWS implementation not available. Update the affogato repository "
            assert not export_agglomeration_data, "Exporting extra agglomeration data is not possible when using " \
                                                  "the efficient implementation of MWS."
            if self.set_only_direct_neigh_as_mergeable:
                warnings.warn(
                    "With efficient implementation of MWS, it is not possible to set only direct neighbors"
                    "as mergeable.")
            tick = time.time()
            segmentation, valid_edge_mask = compute_mws_segmentation_from_affinities(
                affinities,
                offsets,
                beta_parameter=self.beta_bias,
                foreground_mask=foreground_mask,
                edge_mask=mask_used_edges,
                return_valid_edge_mask=True)
            runtime = time.time() - tick
            if self.return_extra_outputs:
                MC_energy = self.get_multicut_energy_segmentation(
                    segmentation, affinities, offsets, valid_edge_mask)
                out_dict = {'runtime': runtime, 'multicut_energy': MC_energy}
                return segmentation, out_dict
            else:
                return segmentation, runtime

        # Build graph:
        if self.verbose:
            print("Building graph...")
        graph, projected_node_ids_to_pixels, edge_weights, is_local_edge, edge_sizes = \
            gasp_utils.build_pixel_long_range_grid_graph_from_offsets(
                image_shape,
                offsets,
                affinities,
                offsets_probabilities=self.offsets_probabilities,
                mask_used_edges=mask_used_edges,
                offset_weights=self.offsets_weights,
                foreground_mask=foreground_mask,
                set_only_direct_neigh_as_mergeable=self.set_only_direct_neigh_as_mergeable
            )

        # Compute log costs:
        log_costs = compute_edge_costs(1 - edge_weights, beta=self.beta_bias)
        if self.use_logarithmic_weights:
            signed_weights = log_costs
        else:
            # signed_weights = edge_weights + 0.3
            signed_weights = edge_weights - self.beta_bias

        # Run GASP:
        if self.verbose:
            print("Start agglo...")

        outputs = run_GASP(graph,
                           signed_weights,
                           edge_sizes=edge_sizes,
                           is_mergeable_edge=is_local_edge,
                           verbose=self.verbose,
                           **self.run_GASP_kwargs)

        if export_agglomeration_data:
            nodeSeg, runtime, exported_data = outputs
        else:
            exported_data = {}
            nodeSeg, runtime = outputs

        segmentation = ntools.mapFeaturesToLabelArray(
            projected_node_ids_to_pixels,
            np.expand_dims(nodeSeg, axis=-1),
            nb_threads=self.n_threads,
            fill_value=-1.,
            ignore_label=-1,
        )[..., 0].astype(np.int64)

        if self.return_extra_outputs:
            frustration = self.get_frustration(graph, nodeSeg, signed_weights)
            MC_energy = self.get_multicut_energy(graph, nodeSeg,
                                                 signed_weights, edge_sizes)
            out_dict = {
                "multicut_energy": MC_energy,
                "runtime": runtime,
                "graph": graph,
                "is_local_edge": is_local_edge,
                "edge_sizes": edge_sizes,
                "edge_weights": signed_weights,
                "frustration": frustration
            }
            if export_agglomeration_data:
                out_dict.update(exported_data)
            return segmentation, out_dict
        else:
            if export_agglomeration_data:
                warnings.warn(
                    "In order to export agglomeration data, also set the `return_extra_outputs` to True"
                )
            return segmentation, runtime