def _buildTree(ra, dec, leafsize=100, scale=None): """ Build KD tree on simDataRA/Dec and set radius (via setRad) for matching. Parameters ---------- ra, dec : float (or arrays) RA and Dec values (in radians). leafsize : int (100) The number of Ra/Dec pointings in each leaf node. scale : float (None) If set, the values are scaled up, rounded, and converted to integers. Useful for forcing a set precision and preventing machine precision differences """ if np.any(np.abs(ra) > np.pi * 2.0) or np.any(np.abs(dec) > np.pi * 2.0): raise ValueError('Expecting RA and Dec values to be in radians.') x, y, z = _xyz_from_ra_dec(ra, dec) if scale is not None: x = np.round(x * scale).astype(int) y = np.round(y * scale).astype(int) z = np.round(z * scale).astype(int) data = list(zip(x, y, z)) if np.size(data) > 0: try: tree = kdTree(data, leafsize=leafsize, balanced_tree=False, compact_nodes=False) except TypeError: tree = kdTree(data, leafsize=leafsize) else: raise ValueError('ra and dec should have length greater than 0.') return tree
def _update_kdtree(self): self.empty = False #all_particle_coords = self.data # self.swarm.shadow_particles_fetch() # dims = self.swarm.particleCoordinates.data.shape[1] # pc = np.append(self.swarm.particleCoordinates.data, # self.swarm.particleCoordinates.data_shadow) # all_particle_coords = pc.reshape(-1,dims) all_particle_coords = self.data if len(all_particle_coords) < 3: self.empty = True #self.kdtree = lambda x: float('inf') #trying this instead, self.kdtree = kdTree(np.empty((2, 0))) else: self.kdtree = kdTree(all_particle_coords) return
def _update_kdtree(self): self.empty = False all_particle_coords = self.particleCoordinates if len(all_particle_coords) < 3: self.empty = True self.kdtree = kdTree(np.empty((2,0))) else: self.kdtree = kdTree(all_particle_coords) return
def nn_evaluation(_fromCoords, _toCoords, n=1, weighted=False): """ This function provides nearest neighbour information for uw swarms, given the "_toCoords", which could be the .data handle (coordinates) of a mesh or a different swarm, this function returns the indices of the n nearest neighbours in "_fromCoords" (will usually be swarm.particleCoordinates.data ) it also returns the inverse-distance weights if weighted=True. The function works in parallel, if the example below is followed Usage ------------ #get the n indexes, weights and distances ix, weights, d = nn_evaluation(swarm.particleCoordinates.data, toSwarm.particleCoordinates.data, n=n, weighted=False) #apply to the 'toSwarm' variable in a parallel-safe way if len(weights): #parallel safety toSwarmVar.data[:,0] = np.average(fromSwarmVar.evaluate(fromSwarm)[:,0][ix], weights=weights, axis=len((weights.shape)) - 1) """ if len(_toCoords) > 0: #this is required for safety in parallel tree = kdTree(_fromCoords) d, ix = tree.query(_toCoords, n) if n == 1: weights = np.ones(_toCoords.shape[0]) elif not weighted: weights = np.ones((_toCoords.shape[0], n))*(1./n) else: weights = (1./d[:])/(1./d[:]).sum(axis=1)[:,None] return ix, weights, d else: return np.empty(0, dtype="int"), np.empty(0, dtype="int"), np.empty(0, dtype="int")
def _buildTree(ra, dec, leafsize=100): """ Build KD tree on simDataRA/Dec and set radius (via setRad) for matching. Parameters ---------- ra, dec = RA and Dec values (in radians). leafsize = the number of Ra/Dec pointings in each leaf node. """ if np.any(np.abs(ra) > np.pi * 2.0) or np.any(np.abs(dec) > np.pi * 2.0): raise ValueError('Expecting RA and Dec values to be in radians.') x, y, z = _xyz_from_ra_dec(ra, dec) data = list(zip(x, y, z)) if np.size(data) > 0: try: tree = kdTree(data, leafsize=leafsize, balanced_tree=False, compact_nodes=False) except TypeError: tree = kdTree(data, leafsize=leafsize) else: raise ValueError('ra and dec should have length greater than 0.') return tree
def pointToRegionsDistances(p0, points, regions, kd=3): region_distances = [] for rid in range(len(regions)): region = regions[rid] data = np.insert([points[i] for i in region], 0, p0, axis=0) #prepending the search point to the region kdtree = kdTree(data) distances, i = kdtree.query(p0, k=2) region_distances.append( distances[1]) # distances[0][0] will be to the point itself return region_distances
def removeOutliersIndices(points, indices, mode='sor', max=2, k=8): if mode == 'median': z_data = points[:, 2] mask = abs(z_data - np.median(z_data)) < max return points[mask] elif mode == 'sor': kdtree = kdTree(points) distances, i = kdtree.query(kdtree.data, k=k, n_jobs=-1) z_distances = stats.zscore(np.mean(distances, axis=1)) sor_filter = abs(z_distances) < max print("SOR removing: ", len([bol for bol in sor_filter if bol == False])) return indices[sor_filter] else: print("Undefined outlierremoval mode")
def nn_evaluation(fromSwarm, toSwarm, n=1, weighted=False): """ This function provides nearest neighbour information for uw swarms, given the "toSwarm", this function returns the indices of the n nearest neighbours in "fromSwarm" it also returns the inverse-distance if weighted=True. The function works in parallel. The arrays come out a bit differently when used in nearest neighbour form (n = 1), or IDW: (n > 1). The examples belowe show how to fill out a swarm variable in each case. Usage n == 1: ------------ ix, weights = nn_evaluation(swarm, data, n=1, weighted=False) toSwarmVar.data[:][:,0] = np.average(fromSwarmVar[ix][:,0], weights=weights) Usage n > 1: ------------ ix, weights = nn_evaluation(swarm, data, n=2, weighted=False) toSwarmVar.data[:][:,0] = np.average(fromSwarmVar[ix][:,:,0], weights=weights, axis=1) """ if len(toSwarm) > 0: #this is required for safety in parallel #this should avoid building the tree again when this function is called multiple times. try: tree = fromSwarm.tree #print(1) except: #print(2) fromSwarm.tree = kdTree(fromSwarm.particleCoordinates.data) tree = fromSwarm.tree d, ix = tree.query(toSwarm, n) if n == 1: weights = np.ones(toSwarm.shape[0]) elif not weighted: weights = np.ones((toSwarm.shape[0], n)) * (1. / n) else: weights = (1. / d[:]) / (1. / d[:]).sum(axis=1)[:, None] return ix, weights else: return [], []
def _update_kdtree(self): self.empty = False self.swarm.shadow_particles_fetch() dims = self.swarm.particleCoordinates.data.shape[1] pc = np.append(self.swarm.particleCoordinates.data, self.swarm.particleCoordinates.data_shadow) all_particle_coords = pc.reshape(-1, dims) if len(all_particle_coords) < 4: self.empty = True self.kdtree = lambda x: float('inf') else: self.kdtree = kdTree(all_particle_coords) return
def _update_kdtree(self): self.empty = False self.swarm.shadow_particles_fetch() dims = self.swarm.particleCoordinates.data.shape[1] pc = np.append(self.swarm.particleCoordinates.data, self.swarm.particleCoordinates.data_shadow) all_particle_coords = pc.reshape(-1,dims) if len(all_particle_coords) < 3: self.empty = True self.kdtree = lambda x: float('inf') else: self.kdtree = kdTree(all_particle_coords) return
def _update_kdtree_v2(self): self.empty = False self.swarm.shadow_particles_fetch() if self.swarm.particleLocalCount == 0: self.empty = True self.kdtree = lambda x: float('inf') return if self.swarm.particleCoordinates.data_shadow.shape[0] == 0: all_particle_coords = self.swarm.particleCoordinates.data else: all_particle_coords = np.concatenate((self.swarm.particleCoordinates.data, self.swarm.particleCoordinates.data_shadow)) if len(all_particle_coords) < 3: self.empty = True self.kdtree = lambda x: float('inf') else: self.kdtree = kdTree(all_particle_coords) return
def _update_surface_normals(self): """ Rebuilds the normals for the string of points """ # This is the case if there are too few points to # compute normals so there can be values to remove all_particle_coords = self.data #can be important for parallel #self.swarm.shadow_particles_fetch() if self.empty: self.director.data[...] = 0.0 else: #before looping through particles, we want to set up some things #local coords particle_coords = self.swarm.particleCoordinates.data if isinstance(self.insidePt, interface2D): #local insidePoints inside_particle_coords = self.insidePt.swarm.particleCoordinates.data if inside_particle_coords.shape[0] > 1: localkdtree = kdTree(inside_particle_coords) #get the full set of insidepoint NNs #self.insidePt._update_kdtree() #make sure kdtree is synced r, nindex = localkdtree.query(particle_coords, k=1) insidepoints = inside_particle_coords[nindex] #these will hold the normal vector compenents Nx = np.empty(self.swarm.particleLocalCount) Ny = np.empty(self.swarm.particleLocalCount) for i, xy in enumerate(particle_coords): r, neighbours = self.kdtree.query(particle_coords[i], k=3) # neighbour points are neighbours[1] and neighbours[2] XY1 = all_particle_coords[neighbours[1]] XY2 = all_particle_coords[neighbours[2]] #XY1 = self.kdtree.data[neighbours[1]] #XY2 = self.kdtree.data[neighbours[2]] dXY = XY2 - XY1 Nx[i] = dXY[1] Ny[i] = -dXY[0] #if the inside point is another interface2D object #use the nearest point to define the orientation if isinstance(self.insidePt, interface2D): if inside_particle_coords.shape[0] > 1: #if True is False: #only proceed if there are particles in the insidePt #if not self.insidePt.empty: #r, nindex = self.insidePt.kdtree.query(particle_coords[i], k=1) ip = insidepoints[i] #uw.barrier() sign = np.sign((ip[0] - xy[0]) * Nx[i] + (ip[1] - xy[1]) * Ny[i]) #if not self.insidePt.empty: Nx[i] *= sign Ny[i] *= sign #else: #elif (self.insidePt): #elif hands(self.insidePt) == 2: #elif isinstance(self.insidePt, tuple): elif hasattr(self.insidePt, '__len__'): #print('shouldt be here test') sign = np.sign((self.insidePt[0] - xy[0]) * Nx[i] + (self.insidePt[1] - xy[1]) * Ny[i]) Nx[i] *= sign Ny[i] *= sign for i in range(0, self.swarm.particleLocalCount): scale = 1.0 / np.sqrt(Nx[i]**2 + Ny[i]**2) Nx[i] *= scale Ny[i] *= scale self.director.data[:, 0] = Nx[:] self.director.data[:, 1] = Ny[:] return
import numpy as np from scipy.spatial import cKDTree as kdTree np.random.seed(42) # Make a square of random points npts = int(1e7) x = np.random.random(npts) - 0.5 y = np.random.random(npts) - 0.5 data = list(zip(x, y)) tree = kdTree(data) count = np.size(tree.query_ball_point([0, 0], 0.2)) print('count = %i' % count)