def test_NNGraph(): Xin = np.arange(90).reshape(30, 3) dist_types = ['euclidean', 'manhattan', 'max_dist', 'minkowski'] for dist_type in dist_types: G1 = graphs.NNGraph(Xin, NNtype='knn', dist_type=dist_type) G2 = graphs.NNGraph(Xin, use_flann=True, NNtype='knn', dist_type=dist_type) G3 = graphs.NNGraph(Xin, NNtype='radius', dist_type=dist_type)
def test_nngraph(self): Xin = np.arange(90).reshape(30, 3) dist_types = ['euclidean', 'manhattan', 'max_dist', 'minkowski'] for dist_type in dist_types: # Only p-norms with 1<=p<=infinity permitted. if dist_type != 'minkowski': graphs.NNGraph(Xin, NNtype='radius', dist_type=dist_type) graphs.NNGraph(Xin, NNtype='knn', dist_type=dist_type) # Distance type unsupported in the C bindings, # use the C++ bindings instead. if dist_type != 'max_dist': graphs.NNGraph(Xin, use_flann=True, NNtype='knn', dist_type=dist_type)
def test_nngraph(self, n_vertices=30): rs = np.random.RandomState(42) Xin = rs.normal(size=(n_vertices, 3)) dist_types = ['euclidean', 'manhattan', 'max_dist', 'minkowski'] for dist_type in dist_types: # Only p-norms with 1<=p<=infinity permitted. if dist_type != 'minkowski': graphs.NNGraph(Xin, NNtype='radius', dist_type=dist_type) graphs.NNGraph(Xin, NNtype='knn', dist_type=dist_type) # Distance type unsupported in the C bindings, # use the C++ bindings instead. if dist_type != 'max_dist': graphs.NNGraph(Xin, use_flann=True, NNtype='knn', dist_type=dist_type)
def _make_filter(self, tau=10): ''' Build graph and heatmap for denoising. ''' graph = graphs.NNGraph(zip(self.x, self.y), k=self.num_clusters) graph.estimate_lmax() # higher tau, spikier signal, less points fn = filters.Heat(graph, tau=tau) return fn
def denoise_cluster(self, points, num_cluster, tau=10): """ Determine if the cluster after denoising is the same as the original :param points: :return: [boolean], false means cluster_id varies, true means cluster_id is preserved """ length = len(points) graph = graphs.NNGraph(points, k=num_cluster) graph.estimate_lmax() fn = filters.Heat(graph, tau=tau) signal = np.empty(num_cluster * length).reshape( num_cluster, length) # create num_cluster*len(points) matrix vectors = np.zeros(length * num_cluster).reshape( length, num_cluster) # create len(points)*num_cluster matrix # fill the vectors sparse matrix for i, vec in enumerate(vectors): vec[self.cluster_list[i]] = 1 vectors = vectors.T # fill the denoising matrix, find the dominant cluster of each points for cluster_num, vec in enumerate(vectors): signal[cluster_num] = fn.analyze(vec) # see if the dominant cluster after denoising is the same as the original cluster dominant_cluster = np.argmax(signal, axis=0) vor_points, vor_clusters = [], [] for index, coor in enumerate(self.points): if dominant_cluster[index] == int(self.cluster_list[index]): vor_points.append(coor) vor_clusters.append(self.cluster_list[index]) self.points = vor_points self.cluster_list = vor_clusters
from pygsp import graphs import xarray as xa import numpy as np from hyperclass.data.spatial.tile import Tile, Block import os, math, sys block_shape = (500, 500) block_indices = (0, 0) image_name = "ang20170720t004130_corr_v2p9" N_neighbors = 8 dm = DataManager(image_name, block_shape=block_shape) tile: Tile = dm.getTile() block = tile.getBlock(*block_indices) data: np.ndarray = block.getPointData().values graph = graphs.NNGraph(data, 'knn', True, True, True, N_neighbors) print(".")
def test_NNGraph(): Xin = np.arange(90).reshape(30, 3) G = graphs.NNGraph(Xin) needed_attributes_testing(G)
def test_NNGraph(): Xin = np.arange(90).reshape(30, 3) G = graphs.NNGraph(Xin)