def labelisation_seeded_watershed(graph, edge_weights, vertex_seeds): """ Seeded watershed cut on an edge weighted graph. Seeds are defined as vertex weights: any flat zone of value strictly greater than 0 is considered as a seed. Note that if two different seeds are places in a minima of the edge weighted graph, and if the altitude of this minima is equal to the smallest representable value for the given `dtype` of the edge weights, then the algorithm won't be able to produce two different regions for these two seeds. :param graph: input graph :param edge_weights: Weights on the edges of the graph :param vertex_seeds: Seeds on the vertices of the graph :return: A labelisation of the graph vertices """ # edges inside a seed take the value of the seed and 0 otherwise edges_in_or_between_seeds = hg.weight_graph(graph, vertex_seeds, hg.WeightFunction.L0) edges_outside_seeds = hg.weight_graph(graph, vertex_seeds, hg.WeightFunction.min) edges_in_seed = np.logical_and(edges_outside_seeds > 0, 1 - edges_in_or_between_seeds) # set edges inside seeds at minimum level edge_weights = edge_weights.copy() edge_weights[edges_in_seed > 0] = hg.dtype_info(edge_weights.dtype).min tree, altitudes = hg.watershed_hierarchy_by_attribute( graph, edge_weights, lambda tree, _: hg.accumulate_sequential( tree, vertex_seeds, hg.Accumulators.max)) return hg.labelisation_hierarchy_supervertices(tree, altitudes)
def get_contour_masked(self, output, masked): import numpy as np import higra as hg hh = np.copy(masked) hh[hh > 0] = 255 from scipy import signal Ix = signal.correlate2d(hh[:, :, 0], [[1, 2, 1], [0, 0, 0], [-1, -2, -1]], mode='same', boundary='symm') Iy = signal.correlate2d(hh[:, :, 0], [[1, 0, -1], [2, 0, -2], [1, 0, -1]], mode='same', boundary='symm') G = np.hypot(Ix, Iy) G = G / G.max() size = hh.shape[:2] gradient_coarse = np.array([output[1], G]).max(axis=0) gradient_fine = np.array([output[0], G]).max(axis=0) gradient_orientation = output[2] graph = hg.get_4_adjacency_graph(size) edge_weights_fine = hg.weight_graph(graph, gradient_fine, hg.WeightFunction.mean) edge_weights_coarse = hg.weight_graph(graph, gradient_coarse, hg.WeightFunction.mean) edge_weights_hig = hg.weight_graph(graph, G, hg.WeightFunction.mean) # special handling for angles to wrap around the trigonometric cycle... edge_orientations_source = hg.weight_graph(graph, gradient_orientation, hg.WeightFunction.source) edge_orientations_target = hg.weight_graph(graph, gradient_orientation, hg.WeightFunction.target) edge_orientations = hg.mean_angle_mod_pi(edge_orientations_source, edge_orientations_target) combined_hierarchy1, altitudes_combined1 = hg.multiscale_mean_pb_hierarchy( graph, edge_weights_fine, others_edge_weights=(edge_weights_coarse, ), edge_orientations=edge_orientations) return hg.graph_4_adjacency_2_khalimsky( graph, hg.saliency(combined_hierarchy1, altitudes_combined1))
def InstSegm(extent, boundary, t_ext=0.4, t_bound=0.2): """ INPUTS: extent : extent prediction boundary : boundary prediction t_ext : threshold for extent t_bound : threshold for boundary OUTPUT: instances """ # Threshold extent mask ext_binary = np.uint8(extent >= t_ext) # Artificially create strong boundaries for # pixels with non-field labels input_hws = np.copy(boundary) input_hws[ext_binary == 0] = 1 # Create the directed graph size = input_hws.shape[:2] graph = hg.get_8_adjacency_graph(size) edge_weights = hg.weight_graph(graph, input_hws, hg.WeightFunction.mean) tree, altitudes = hg.watershed_hierarchy_by_dynamics(graph, edge_weights) # Get individual fields # by cutting the graph using altitude instances = hg.labelisation_horizontal_cut_from_threshold( tree, altitudes, threshold=t_bound).astype(np.float) instances[ext_binary == 0] = np.nan return instances
def test_hierarchy_to_optimal_MumfordShah_energy_cut_hierarchy(self): # Test strategy: # 1) start from a random hierarchy # 2) construct the corresponding optimal Mumford-Shah energy cut hierarchy # 3) verify that the horizontal cuts of the new hierarchy corresponds to the # optimal energy cuts of the first hierarchy obtained from the explicit MF energy # and the function labelisation_optimal_cut_from_energy shape = (10, 10) g = hg.get_4_adjacency_graph(shape) np.random.seed(2) vertex_weights = np.random.rand(*shape) edge_weights = hg.weight_graph(g, vertex_weights, hg.WeightFunction.L1) tree1, altitudes1 = hg.watershed_hierarchy_by_area(g, edge_weights) tree, altitudes = hg.hierarchy_to_optimal_MumfordShah_energy_cut_hierarchy( tree1, vertex_weights, approximation_piecewise_linear_function=999999) for a in altitudes: if a != 0: res = False cut1 = hg.labelisation_horizontal_cut_from_threshold( tree, altitudes, a) # du to numerical issues, and especially as we test critical scale level lambda, # we test several very close scale levels for margin in [-1e-8, 0, 1e-8]: mfs_energy = hg.attribute_piecewise_constant_Mumford_Shah_energy( tree1, vertex_weights, a + margin) cut2 = hg.labelisation_optimal_cut_from_energy( tree1, mfs_energy) res = res or hg.is_in_bijection(cut1, cut2) self.assertTrue(res)
def test_weighting_graph(self): g = hg.get_4_adjacency_graph((2, 2)) data = np.asarray((0, 1, 2, 3)) ref = (0.5, 1, 2, 2.5) r = hg.weight_graph(g, data, hg.WeightFunction.mean) self.assertTrue(np.allclose(ref, r)) ref = (0, 0, 1, 2) r = hg.weight_graph(g, data, hg.WeightFunction.min) self.assertTrue(np.allclose(ref, r)) ref = (1, 2, 3, 3) r = hg.weight_graph(g, data, hg.WeightFunction.max) self.assertTrue(np.allclose(ref, r)) ref = (1, 2, 2, 1) r = hg.weight_graph(g, data, hg.WeightFunction.L1) self.assertTrue(np.allclose(ref, r)) ref = (math.sqrt(1), 2, 2, math.sqrt(1)) r = hg.weight_graph(g, data, hg.WeightFunction.L2) self.assertTrue(np.allclose(ref, r)) ref = (1, 2, 2, 1) r = hg.weight_graph(g, data, hg.WeightFunction.L_infinity) self.assertTrue(np.allclose(ref, r)) ref = (1, 4, 4, 1) r = hg.weight_graph(g, data, hg.WeightFunction.L2_squared) self.assertTrue(np.allclose(ref, r))
def test_weighting_graph_vectorial(self): g = hg.get_4_adjacency_graph((2, 2)) data = np.asarray(((0, 1), (2, 3), (4, 5), (6, 7))) ref = (4, 8, 8, 4) r = hg.weight_graph(g, data, hg.WeightFunction.L1) self.assertTrue(np.allclose(ref, r)) ref = (math.sqrt(8), math.sqrt(32), math.sqrt(32), math.sqrt(8)) r = hg.weight_graph(g, data, hg.WeightFunction.L2) self.assertTrue(np.allclose(ref, r)) ref = (2, 4, 4, 2) r = hg.weight_graph(g, data, hg.WeightFunction.L_infinity) self.assertTrue(np.allclose(ref, r)) ref = (8, 32, 32, 8) r = hg.weight_graph(g, data, hg.WeightFunction.L2_squared) self.assertTrue(np.allclose(ref, r))
def do_hierarchies(self): path_trees = pjoin(self.root_path, 'precomp_desc', 'pb_trees') path_leaf_graphs = pjoin(self.root_path, 'precomp_desc', 'pb_leaf_graphs') path_maps = pjoin(self.root_path, 'precomp_desc', 'pb_maps') # print('doing probability boundaries hierarchies...') if (os.path.exists(path_trees)): # print('found directory {}. Delete to re-run'.format(path_trees)) return else: os.makedirs(path_trees) if (os.path.exists(path_leaf_graphs)): # print('found directory {}. Delete to re-run'.format( # path_leaf_graphs)) return else: os.makedirs(path_leaf_graphs) if (os.path.exists(path_maps)): # print('found directory {}. Delete to re-run'.format(path_maps)) return else: os.makedirs(path_maps) print('will save trees to {}'.format(path_trees)) print('will save leaf graphs to {}'.format(path_leaf_graphs)) print('will save vertex/edge maps to {}'.format(path_maps)) pbar = tqdm(total=len(self.dl)) for s in self.dl: pb = io.imread( pjoin(self.root_path, 'precomp_desc', 'pb', s['frame_name'])) graph = hg.get_4_adjacency_graph(pb.shape) edge_weights = hg.weight_graph(graph, pb, hg.WeightFunction.mean) rag, vertex_map, edge_map, tree, altitudes = hg.cpp._mean_pb_hierarchy( graph, pb.shape, edge_weights) hg.save_tree( pjoin(path_trees, os.path.splitext(s['frame_name'])[0] + '.p'), tree, {'altitudes': altitudes}) hg.save_graph_pink( pjoin(path_leaf_graphs, os.path.splitext(s['frame_name'])[0] + '.p'), rag) np.savez(pjoin(path_maps, os.path.splitext(s['frame_name'])[0]), **{ 'vertex_map': vertex_map, 'edge_map': edge_map }) pbar.update(1)
def constrained_connectivity_hierarchy_alpha_omega(graph, vertex_weights): """ Alpha-omega constrained connectivity hierarchy based on the given vertex weighted graph. For :math:`(i,j)` be an edge of the graph, we define :math:`w(i,j)=|w(i) - w(j)|`, the weight of this edge. Let :math:`X` be a set of vertices, the range of :math:`X` is the maximal absolute difference between the weights of any two vertices in :math:`X`: :math:`R(X) = \max\{|w(i) - w(j)|, (i,j)\in X^2\}` Let :math:`\\alpha` be a positive real number, a set of vertices :math:`X` is :math:`\\alpha`-connected, if for any two vertices :math:`i` and :math:`j` in :math:`X`, there exists a path from :math:`i` to :math:`j` in :math:`X` composed of edges of weights lower than or equal to :math:`\\alpha`. Let :math:`\\alpha` and :math:`\omega` be a two positive real numbers, the :math:`\\alpha-\omega`-connected components of the graph are the maximal :math:`\\alpha'`-connected sets of vertices with a range lower than or equal to :math:`\omega`, with :math:`\\alpha'\leq\\alpha`. Finally, the alpha-omega constrained connectivity hierarchy is defined as the hierarchy composed of all the :math:`k-k`-connected components for all positive :math:`k`. The definition used follows the one given in: P. Soille, "Constrained connectivity for hierarchical image partitioning and simplification," in IEEE Transactions on Pattern Analysis and Machine Intelligence, vol. 30, no. 7, pp. 1132-1145, July 2008. doi: 10.1109/TPAMI.2007.70817 The algorithm runs in time :math:`\mathcal{O}(n\log(n))` and proceeds by filtering a quasi-flat zone hierarchy (see :func:`~higra.quasi_flat_zones_hierarchy`) :param graph: input graph :param vertex_weights: edge_weights: edge weights of the input graph :return: a tree (Concept :class:`~higra.CptHierarchy`) and its node altitudes """ vertex_weights = hg.linearize_vertex_weights(vertex_weights, graph) if vertex_weights.ndim != 1: raise ValueError("constrainted_connectivity_hierarchy_alpha_omega only works for scalar vertex weights.") # QFZ on the L1 distance weighted graph edge_weights = hg.weight_graph(graph, vertex_weights, hg.WeightFunction.L1) tree, altitudes = hg.quasi_flat_zone_hierarchy(graph, edge_weights) altitude_parents = altitudes[tree.parents()] # vertex value range inside each region min_value = hg.accumulate_sequential(tree, vertex_weights, hg.Accumulators.min) max_value = hg.accumulate_sequential(tree, vertex_weights, hg.Accumulators.max) value_range = max_value - min_value # parent node can't be deleted altitude_parents[tree.root()] = max(altitudes[tree.root()], value_range[tree.root()]) # nodes whith a range greater than the altitudes of their parent have to be deleted violated_constraints = value_range >= altitude_parents # the altitude of nodes with a range greater than their altitude but lower than the one of their parent must be changed reparable_node_indices = np.nonzero(np.logical_and(value_range > altitudes, value_range < altitude_parents)) altitudes[reparable_node_indices] = value_range[reparable_node_indices] # final result construction tree, node_map = hg.simplify_tree(tree, violated_constraints) altitudes = altitudes[node_map] hg.CptHierarchy.link(tree, graph) return tree, altitudes