class test_count_neighbors: def setUp(self): n = 50 m = 2 np.random.seed(1234) self.T1 = KDTree(np.random.randn(n,m),leafsize=2) self.T2 = KDTree(np.random.randn(n,m),leafsize=2) def test_one_radius(self): r = 0.2 assert_equal(self.T1.count_neighbors(self.T2, r), np.sum([len(l) for l in self.T1.query_ball_tree(self.T2,r)])) def test_large_radius(self): r = 1000 assert_equal(self.T1.count_neighbors(self.T2, r), np.sum([len(l) for l in self.T1.query_ball_tree(self.T2,r)])) def test_multiple_radius(self): rs = np.exp(np.linspace(np.log(0.01),np.log(10),3)) results = self.T1.count_neighbors(self.T2, rs) assert_(np.all(np.diff(results) >= 0)) for r,result in zip(rs, results): assert_equal(self.T1.count_neighbors(self.T2, r), result)
def match_jets(particle_level_jets: np.ndarray, detector_level_jets: np.ndarray, matching_distance: float) -> List[Tuple[int, int]]: """ Match particle and detector level jets geometrically. Matching is performed via KDTrees. The particle level jet is required to match the detector level jet and vice-versa. Args: particle_level_jets: Particle level jets. detector_level_jets: Detector level jets. matching_distance: Maximum matching distance between jets. Default guidance is to use 0.6 * R. Returns: List of pairs of (particle level index, detector level index). """ # Extract the jet locations from the PSeudoJets. part_level_positions = np.array([(j.eta, j.phi) for j in particle_level_jets]) det_level_positions = np.array([(j.eta, j.phi) for j in detector_level_jets]) # Construct the KDTress. They default to using the L^2 norm (ie our expected distance measure). part_level_tree = KDTree(part_level_positions) det_level_tree = KDTree(det_level_positions) # Perform the actual matching. part_level_matches = part_level_tree.query_ball_tree(det_level_tree, r=matching_distance) det_level_matches = det_level_tree.query_ball_tree(part_level_tree, r=matching_distance) # Only keep the closest match where the particle level jet points to the detector level # jet and vise-versa. indices = [] for i, part_match in enumerate(part_level_matches): min_distance = 1000 min_distance_index = -1 for det_match in det_level_matches: for m in det_match: if m in part_match: # Calculate the distance dist = np.sqrt((part_level_positions[i][0] - det_level_positions[m][0])**2 + (part_level_positions[i][1] - det_level_positions[m][1])**2) #logger.debug(f"part_level_index: {i}, Potential match: {m}, distance: {dist}") if dist < min_distance: #logger.debug(f"Found match! Previous min_distance: {min_distance}") min_distance = dist min_distance_index = m if min_distance_index != -1: indices.append((i, min_distance_index)) #logger.debug(f"part_level_matches: {part_level_matches}, det_level_matches: {det_level_matches}") #logger.debug(f"indices: {indices}") return indices
def clean( ra, dec ): #use two ann search to clean the stationary and non-paired detections tree = KDTree(zip(ra, dec)) pair = tree.query_ball_tree(tree, 30 / 3600.) stationary = tree.query_ball_tree(tree, 1.5 / 3600.) mask = zeros(len(ra), dtype=bool) for n, i in enumerate(pair): if len(i) > 2 and len(stationary[n]) == 1: mask[n] = 1 else: mask[n] = 0 return mask
def compute(self, dataset_pool): parcels = self.get_dataset() arr = self.get_dataset().compute_variables(['parcel.aggregate(job.sector_id==%s)' % self.sector_id], dataset_pool=dataset_pool) coords = column_stack( (parcels.get_attribute("x_coord_sp"), parcels.get_attribute("y_coord_sp")) ) kd_tree = KDTree(coords, 100) results = kd_tree.query_ball_tree(kd_tree, self.radius) return array(map(lambda l: arr[l].sum(), results))
def _featurize_cells(df, neighborhood_feature_fn, radius, is_anchor_col, x_col, y_col, z_col=None, include_anchors=False): anchor_cells = df[df[is_anchor_col]] if include_anchors: neighborhood_cells = df else: neighborhood_cells = df[~df[is_anchor_col]] # Throw an error if there are no cells in neighborhoods # For example all cells are anchors and include_anchors == False if len(neighborhood_cells.index) == 0: raise ValueError("There are no neighbours to compute features from \ (try include_anchors = True)") feature_fn = functools.partial(neighborhood_feature_fn, neighborhood_cells) coord_cols = [x_col, y_col] if z_col is not None: coord_cols.append(z_col) anchor_kdTree = KDTree(anchor_cells[coord_cols].values) neighborhood_kdTree = KDTree(neighborhood_cells[coord_cols].values) neighborhoods = anchor_kdTree.query_ball_tree(neighborhood_kdTree, radius, p=2.0, eps=0.0) neighborhood_features = pd.concat(map(feature_fn, neighborhoods), axis=1).T neighborhood_features.index = anchor_cells.index blank_row_mask = neighborhood_features.isnull().all(axis=1) return neighborhood_features[~blank_row_mask]
def compute(self, dataset_pool): parcels = self.get_dataset() arr = self.get_dataset().compute_variables(['parcel.aggregate(household.children>0)'], dataset_pool=dataset_pool) coords = column_stack( (parcels.get_attribute("x_coord_sp"), parcels.get_attribute("y_coord_sp")) ) kd_tree = KDTree(coords, 100) results = kd_tree.query_ball_tree(kd_tree, self.radius) return array(map(lambda l: arr[l].sum(), results))
def find_seeds(self, mixing_ratio, patch_length): """Find the seeds for the AWoL-MRF patches. We assume that each seed needs a minimum number of high-confidence voxels in its 26-voxel neighbourhood, which is determined by the mixing ratio parameter.""" self.seeds = [] seed_coord = [] lflat = self.labels.ravel() for lcv in self.lcv: for value in self.label_values[1:]: #for each structural label if sum(lflat[self.neighbors_big[lcv]] == value) > mixing_ratio: self.seeds.append(lcv) seed_coord.append(unravel_index(lcv, self.labels.shape)) del self.neighbors_big if len(self.seeds) > 500: #control for max number of seeds self.seeds = self.seeds[:500] seed_coord = seed_coord[:500] #find the patch for each seed using KDTree tree_seeds = KDTree(seed_coord) self.patches = tree_seeds.query_ball_tree(self.tree, patch_length) for i, elt in enumerate(self.patches): self.patches[i].remove(self.seeds[i]) del self.tree
def match(gt_maxima, pred_maxima, voxel_size, distance_threshold=39): assert(np.all(np.shape(gt_maxima) == np.shape(pred_maxima))) gt_maxima_locations = np.array(np.nonzero(gt_maxima)).T pred_maxima_locations = np.array(np.nonzero(pred_maxima)).T vertices_gt = {i: gt_maxima_locations[i] for i in range(len(gt_maxima_locations))} vertices_pred = {j + len(gt_maxima_locations): pred_maxima_locations[j] for j in range(len(pred_maxima_locations))} vertices = vertices_gt.copy() vertices.update(vertices_pred) G = nx.Graph() for v, pos in vertices.items(): G.add_node(v, position=pos) gt_tree = KDTree(gt_maxima_locations * np.array(voxel_size)) pred_tree = KDTree(pred_maxima_locations * np.array(voxel_size)) results = gt_tree.query_ball_tree(pred_tree, r=distance_threshold) edges = [] edge_distances = [] for gt_id in range(len(results)): for pred_id in results[gt_id]: edges.append((gt_id, pred_id + len(gt_maxima_locations))) edge_distances.append(np.linalg.norm(gt_maxima_locations[gt_id] - pred_maxima_locations[pred_id])) eps = 10e-6 for edge, distance in zip(edges, edge_distances): G.add_edge(edge[0], edge[1], weight=1./(distance + eps)) pairs = nx.max_weight_matching(G, maxcardinality=True) v_gt = set([v for v in vertices_gt.keys()]) v_pred = set([w for w in vertices_pred.keys()]) true_positives = len(pairs) for edge in pairs: try: v_gt.remove(edge[0]) v_pred.remove(edge[1]) except KeyError: v_gt.remove(edge[1]) v_pred.remove(edge[0]) false_negatives = len(v_gt) false_positives = len(v_pred) canvas_fn = np.zeros(np.shape(gt_maxima)) canvas_fp = np.zeros(np.shape(pred_maxima)) for fn in v_gt: canvas_fn[vertices_gt[fn][0], vertices_gt[fn][1], vertices_gt[fn][2]] = 1 for fp in v_pred: canvas_fp[vertices_pred[fp][0], vertices_pred[fp][1], vertices_pred[fp][2]] = 1 return true_positives, false_negatives, false_positives, canvas_fn, canvas_fp
def _kill_duplicates(arr, minimum_distance=10): """ Attempts to eliminate garbage coordinates arr is a 2D array of (npeaks)x4 coordinates. 0:1 is the x and y coordinates. 2 is the peak height 3 is the peak width """ import scipy from scipy.spatial import KDTree tree = KDTree(arr) match_list = tree.query_ball_tree(tree, minimum_distance) match_list = [list_item for list_item in match_list if len(list_item) > 1] chuck_list = [] for match in match_list: # compile the heights from the table #heights=arr[match][:,2] #best=np.argmax(heights) match.remove(match[0]) chuck_list += match keepers = range(arr.shape[0]) [keepers.remove(chuck) for chuck in chuck_list if chuck in keepers] return arr[keepers]
def findAffineTransform(test_srcs, ref_srcs, max_pix_tol = 2., min_matches_fraction = 0.8, invariantMap=None): if len(test_srcs) < 3: raise Exception("Test sources has less than the minimum value of points (3).") if invariantMap is None: invMap = InvariantTriangleMapping() if len(ref_srcs) < 3: raise Exception("Test sources has less than the minimum value of points (3).") #generateInvariants should return a list of the invariant tuples for each asterism and # a corresponding list of the indices that make up the asterism ref_invariants, ref_asterisms = invMap.generateInvariants(ref_srcs, nearest_neighbors = 7) ref_invariant_tree = KDTree(ref_invariants) test_invariants, test_asterisms = invMap.generateInvariants(test_srcs, nearest_neighbors = 5) test_invariant_tree = KDTree(test_invariants) #0.03 is just an empirical number that returns about the same number of matches than inputs matches_list = test_invariant_tree.query_ball_tree(ref_invariant_tree, 0.03) matches = [] #t1 is an asterism in test, t2 in ref for t1, t2_list in zip(test_asterisms, matches_list): for t2 in np.array(ref_asterisms)[t2_list]: matches.append(zip(t2, t1)) matches = np.array(matches) invModel = invMap.matchTransform(ref_srcs, test_srcs) nInvariants = len(matches) max_iter = nInvariants min_matches = min(10, int(nInvariants * min_matches_fraction)) bestM = ransac.ransac(matches, invModel, 1, max_iter, max_pix_tol, min_matches) return bestM
def compute(self, dataset_pool): with logger.block(name="compute variable persons_within_DDD_of_parcel with DDD=%s" % self.radius, verbose=False): results = None with logger.block(name="trying to read cache file %s" % self.cache_file_name, verbose=False): try: results = self._load_results() except IOError: logger.log_warning("Cache file could not be loaded") with logger.block(name="initialize datasets", verbose=False): parcels = self.get_dataset() arr = self.get_dataset().sum_dataset_over_ids(dataset_pool.get_dataset('household'), attribute_name="persons") if not results: with logger.block(name="initialize coords", verbose=False): coords = column_stack( (parcels.get_attribute("x_coord_sp"), parcels.get_attribute("y_coord_sp")) ) with logger.block(name="build KDTree", verbose=False): kd_tree = KDTree(coords, 100) with logger.block(name="compute"): results = kd_tree.query_ball_tree(kd_tree, self.radius) with logger.block(name="cache"): if not SimulationState().cache_directory_exists(): logger.log_warning("Cache does not exist and is created.") SimulationState().create_cache_directory() self._cache_results(results) with logger.block(name="sum results", verbose=False): return_values = array(map(lambda l: arr[l].sum(), results)) return return_values
def Near_PRM(V, r_n): # Create a KD Tree first KDT = KDTree(data=V) # Create the KD Tree to search against search_against = KDTree(data=V) # run query_ball_tree results = KDT.query_ball_tree(other=search_against, r=r_n) # construct edge set #edges = [] #for i in range(len(V)): # for j in results[i]: # if i == j: # continue # # here, i and j are indices in V # distance = np.linalg.norm(V[i] - V[j], ord=None) # edges.append((i, j, distance)) edges = defaultdict(list) for i in range(len(V)): for j in results[i]: if i == j: continue else: edges[i].append((j, np.linalg.norm(V[i] - V[j]))) return edges
def find_seeds(self): """Find the seeds for the AWoL-MRF patches. We assume that each seed needs a minimum number of high-confidence voxels in its 26-voxel neighbourhood, which is determined by the mixing ratio parameter.""" self.seeds = [] confidence_level = [] lflat = self.labels.ravel() for lcv in self.lcv: n_hcv = self.mixing_ratio for value in self.label_values[1:]: #for each structural label if sum(lflat[self.neighbors_big[lcv]] == value) > n_hcv: n_hcv = sum(lflat[self.neighbors_big[lcv]] == value) if n_hcv > self.mixing_ratio: #minimum confidence level for seeds confidence_level.append(n_hcv) self.seeds.append(lcv) del self.neighbors_big if len(self.seeds) > 0: self.seeds = [seed for (c, seed) in sorted(zip(confidence_level, self.seeds))][::-1] seed_coord = [np.unravel_index(seed, self.labels.shape) for seed in self.seeds] #find the patch for each seed using KDTree tree_seeds = KDTree(seed_coord) self.patches = tree_seeds.query_ball_tree(self.tree, self.patch_length) for i, elt in enumerate(self.patches): self.patches[i].sort()
def compute(self, dataset_pool): parcels = self.get_dataset() arr = self.get_dataset().sum_dataset_over_ids(dataset_pool.get_dataset('household'), constant=1) coords = column_stack( (parcels.get_attribute("x_coord_sp"), parcels.get_attribute("y_coord_sp")) ) kd_tree = KDTree(coords, 100) results = kd_tree.query_ball_tree(kd_tree, self.radius) return array(map(lambda l: arr[l].sum(), results))
def _kill_duplicates(arr, minimum_distance=10): """ Attempts to eliminate garbage coordinates arr is a 2D array of (npeaks)x4 coordinates. 0:1 is the x and y coordinates. 2 is the peak height 3 is the peak width """ import scipy from scipy.spatial import KDTree tree = KDTree(arr) match_list = tree.query_ball_tree(tree,minimum_distance) match_list=[list_item for list_item in match_list if len(list_item)>1] chuck_list=[] for match in match_list: # compile the heights from the table #heights=arr[match][:,2] #best=np.argmax(heights) match.remove(match[0]) chuck_list += match keepers = range(arr.shape[0]) [keepers.remove(chuck) for chuck in chuck_list if chuck in keepers] return arr[keepers]
def inequivalent_iteration(k, sm, thr): """ Find symmetry inequivalent points starting from a set of point, a symm_matrix and a precision threshold. Params: - k: np.ndarray, starting point coordinates - sm: np.ndarray, symmetry matrix (3x3) - thr: float (def=1e-5), precision threshold for two point being equivalend """ from scipy.spatial import KDTree n_pt = k.shape[0] res_i = np.arange(n_pt, dtype=int) res_p = np.empty((0, 3)) tree = KDTree(k) itree = KDTree(k.dot(sm)) res = tree.query_ball_tree(itree, thr) avoid = [] n1 = 0 for n, l in enumerate(res): if n in avoid: continue res_p = np.vstack((res_p, tree.data[n])) res_i[n] = n1 for i in l: res_i[i] = n1 avoid.append(i) n1 += 1 return res_i, res_p # np.array(res_p)
def test_find_daily_clusters(plot=True): df_dict = dict() df_dict['x'] = [ 1, 2, 2, 3, 10, 10, 10, 10, 10, 10, .5, .8, 2.5, 2.7, 10, 11, 12, 5, 5.5, 5, 5.5 ] df_dict['y'] = [ 1, .5, 1.5, 1.2, 1, 2, 3, 5, 6, 7, 2, 1.2, .5, .6, 4, 4, 4, 3, 3, 3.3, 3.3 ] df_dict['dayofyear'] = [ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3 ] df = pd.DataFrame(df_dict) print "DataFrame:" print df if plot: plt.scatter(df.x, df.y, c=df.dayofyear) get_current_fig_manager().window.raise_() plt.show() plt.close() kd = KDTree(np.column_stack((df.x, df.y))) neighbors_list = kd.query_ball_tree(kd, 1.05) neighbors_dict = dict() for i, arr in enumerate(neighbors_list): neighbors_dict[df.iloc[i].name] = set() for j in arr: neighbors_dict[df.iloc[i].name].add(df.iloc[j].name) # Get initial clusters on day 1 day1fires = df[df.dayofyear == 1] clust2nodes, nodes2clust, merge_dict = find_daily_clusters( day1fires, neighbors_dict) print "clust2nodes, day 1:" + str(clust2nodes) print "node2clusts, day 1:" + str(nodes2clust) print "merge_dict, day 1:" + str(merge_dict) day2fires = df[df.dayofyear == 2] clust2nodes, node2clusts, merge_dict = find_daily_clusters( day2fires, neighbors_dict, clust2nodes=clust2nodes, nodes2clust=nodes2clust, merge_dict=merge_dict) print "clust2nodes, day 2:" + str(clust2nodes) print "node2clusts, day 2:" + str(nodes2clust) print "merge_dict, day 2:" + str(merge_dict) day3fires = df[df.dayofyear == 3] clust2nodes, node2clusts, merge_dict = find_daily_clusters( day3fires, neighbors_dict, clust2nodes=clust2nodes, nodes2clust=nodes2clust, merge_dict=merge_dict) print "clust2nodes, day 3:" + str(clust2nodes) print "node2clusts, day 3:" + str(nodes2clust) print "merge_dict, day 3:" + str(merge_dict)
def generate_cubic_grid(structure, centers, distance, dim): """Generate a cubic grids centered in `centers` of size `distance` and dimensionality `dim`. :param structure: aiida.orm.StructureData node used to get the cell of the material. :param centers: aiida.orm.ArrayData containing an array named `centers`. Each element of `centers` is used to generate a cubic grid around it. :param distance: aiida.orm.Float indicating the lateral size of the cubic grid. :param dim: aiida.orm.Int determining the dimensionality of the grid. e.g.: dim=1 -> 5x1x1 dim = 2 -> 5x5x1 dim = 3 -> 5x5x5 :return: aiida.orm.KpointsData containing the generated grids. """ if not isinstance(structure, orm.StructureData): raise InputValidationError( 'Invalide type {} for parameter `structure`'.format( type(structure))) if not isinstance(centers, orm.ArrayData): raise InputValidationError( 'Invalide type {} for parameter `centers`'.format(type(centers))) if not isinstance(distance, orm.Float): raise InputValidationError( 'Invalide type {} for parameter `distance`'.format(type(distance))) npoints = 5 centers = centers.get_array('pinned') dist = distance.value / (npoints - 1) dim = dim.value # yapf: disable l = np.arange(-(npoints-1)//2, (npoints-1)//2 + 1) + ((npoints + 1)%2) * 0.5 lx = l ly = l if dim > 1 else [0,] lz = l if dim > 2 else [0,] grid = np.array(list(product(lx, ly, lz))) * dist res = np.empty((0,3)) for n,c in enumerate(centers): new = c + grid if n == 0: attach = new else: old_tree = KDTree(res) new_tree = KDTree(new) query = new_tree.query_ball_tree(old_tree, r=dist*1.74) attach = np.array([new[n] for n,q in enumerate(query) if not q]) if len(attach): res = np.vstack((res, attach)) kpt = orm.KpointsData() kpt.set_cell_from_structure(structure) kpt.set_kpoints(res, cartesian=True) return kpt
def join(net1, net2, L_max=0.99): r""" Joins two networks together topologically including new connections Parameters ---------- net1 : dictionary A dictionary containing 'vert.coords' and 'edge.conns'. net2 : dictionary A dictionary containing 'vert.coords' and 'edge.conns' L_max : float The maximum distance between vertices below which they are called neighbors Returns ------- network : dict A dictionary containing 'vert.coords' vertices from both ``net1`` and ``net2``, and ``edge.conns`` with original connections plus new ones found during the join process. Notes ----- This function uses ``scipy.spatial.KDTree``. """ # Perform neighbor query from scipy.spatial import KDTree t1 = KDTree(net1['vert.coords']) t2 = KDTree(net2['vert.coords']) pairs = t1.query_ball_tree(t2, r=0.99) # Combine existing network data net3 = {} Np1 = net1['vert.coords'].shape[0] Np2 = net2['vert.coords'].shape[0] net3['vert.coords'] = np.vstack((net1.pop('vert.coords'), net2.pop('vert.coords'))) net3['edge.conns'] = np.vstack((net1.pop('edge.conns'), net2.pop('edge.conns') + Np1)) # Convert kdtree result into new connections nnz = sum([len(row) for row in pairs]) conns = np.zeros((nnz, 2), dtype=int) i = 0 for j, row in enumerate(pairs): for col in row: conns[i, :] = j, col + Np1 i += 1 # Add new connections to network net3['edge.conns'] = np.vstack((net3.pop('edge.conns'), conns)) # Finally, expand any other data arrays on given networks keys = set(net1.keys()).union(net2.keys()) for item in keys: temp1 = net1.pop(item, np.zeros(Np1)*np.nan) temp2 = net2.pop(item, np.zeros(Np2)*np.nan) net3[item] = np.concatenate((temp1, temp2), axis=0) return net3
def model_tree(self, list_coord1, distpot=6.0, list_coord2=None): """ Returns """ try: from scipy.spatial import cKDTree coordtree = cKDTree(list_coord1) if list_coord2 != None: coordtree1 = cKDTree(list_coord2) except ImportError: from scipy.spatial import KDTree coordtree = KDTree(list_coord1) if list_coord2 != None: coordtree1 = KDTree(list_coord2) if list_coord2 != None: neigh_points = coordtree.query_ball_tree(coordtree1, distpot) # use count_neighbors if the corresponding indices are not required else: neigh_points = coordtree.query_ball_tree(coordtree, distpot) return neigh_points
def astroalign_optimized_find_transform(source, target_controlp, target_invariant_tree, target_asterisms): """ A faster version of astroalign.find_transform considering that we know the target control points, invariants tree and asterisms. For details, see astroalign.find_transform This allows to compute control points once for reference frame """ source_controlp = astroalign._find_sources( source)[:astroalign.MAX_CONTROL_POINTS] # Check for low number of reference points if len(source_controlp) < 3: raise Exception("Reference stars in source image are less than the " "minimum value (3).") if len(target_controlp) < 3: raise Exception("Reference stars in target image are less than the " "minimum value (3).") source_invariants, source_asterisms = astroalign._generate_invariants( source_controlp) source_invariant_tree = KDTree(source_invariants) matches_list = source_invariant_tree.query_ball_tree(target_invariant_tree, r=0.1) matches = [] for t1, t2_list in zip(source_asterisms, matches_list): for t2 in target_asterisms[t2_list]: matches.append(list(zip(t1, t2))) matches = np.array(matches) inv_model = astroalign._MatchTransform(source_controlp, target_controlp) n_invariants = len(matches) max_iter = n_invariants min_matches = max( 1, min(10, int(n_invariants * astroalign.MIN_MATCHES_FRACTION))) if (len(source_controlp) == 3 or len(target_controlp) == 3) and len(matches) == 1: best_t = inv_model.fit(matches) inlier_ind = np.arange(len(matches)) # All of the indices else: best_t, inlier_ind = astroalign._ransac(matches, inv_model, 1, max_iter, astroalign.PIXEL_TOL, min_matches) triangle_inliers = matches[inlier_ind] d1, d2, d3 = triangle_inliers.shape inl_arr = triangle_inliers.reshape(d1 * d2, d3) inl_unique = set(tuple(pair) for pair in inl_arr) inl_arr_unique = np.array(list(list(apair) for apair in inl_unique)) so, d = inl_arr_unique.T return best_t, (source_controlp[so], target_controlp[d])
def crop_kpoints(structure, kpt_data, centers, radius): """Crop a given set of k-points `kpt_data` that are within a spherical radius `r` from a set of centers `centers`. :param structure: aiida.orm.StructureData used to get the cell of the material. :param kpt_data: aiida.orm.KpointsData to crop. :param centers: aiida.orm.ArrayData containing an array named `centers`. Each element of `centers` is used as the center of a spherical cropping. :param radius: radius of the sphere cropping. :return: aiida.orm.KpointsData node containing the cropped kpoints """ if not isinstance(structure, orm.StructureData): raise InputValidationError( 'Invalide type {} for parameter `structure`'.format( type(structure))) if not isinstance(kpt_data, orm.KpointsData): raise InputValidationError( 'Invalide type {} for parameter `kpt_data`'.format(type(kpt_data))) if not isinstance(centers, orm.ArrayData): raise InputValidationError( 'Invalide type {} for parameter `centers`'.format(type(centers))) if not isinstance(radius, orm.Float): raise InputValidationError( 'Invalide type {} for parameter `radius`'.format(type(radius))) centers = centers.get_array('centers') if len(centers.shape) != 2 or centers.shape[1] != 3: raise InputValidationError( 'Invalide shape {} for array `centers`. Expected (*,3)'.format( centers.shape)) r = radius.value cell = np.array(structure.cell) recipr = recipr_base(cell) try: kpt_cryst = np.array(kpt_data.get_kpoints_mesh(print_list=True)) except MemoryError: return orm.Bool(False) kpt_cart = np.dot(kpt_cryst, recipr) c_cryst = centers c_cart = np.dot(c_cryst, recipr) kpt_cart = KDTree(kpt_cart) centers = KDTree(c_cart) query = kpt_cart.query_ball_tree(centers, r=r) where = [n for n, l in enumerate(query) if len(l)] new = orm.KpointsData() new.set_kpoints(kpt_cryst[where]) return new
def find_pairs_within_d(X, r): npoints = X.shape[0] ntimes = X.shape[1] pairs = [set() for _ in range(npoints)] for time in range(ntimes): tree = KDTree(X[:, time, :]) for i, js in enumerate(tree.query_ball_tree(tree, r)): js = [j for j in js if j != i] pairs[i] = pairs[i].union(js) return pairs
class KdTreeBlocking(BaseBlocking): """ A blocking technique based on KdTree """ def __init__(self, ref_attr_index, target_attr_index, threshold=0.1): super(KdTreeBlocking, self).__init__(ref_attr_index, target_attr_index) self.threshold = threshold self.reftree = None self.targettree = None self.nb_elements = None def _fit(self, refset, targetset): """ Fit the blocking """ firstelement = refset[0][self.ref_attr_index] self.nb_elements = len(refset) idsize = len(firstelement) if isinstance(firstelement, (tuple, list)) else 1 idelement = (0,) * idsize # KDTree is expecting a two-dimensional array if idsize == 1: self.reftree = KDTree([(elt[self.ref_attr_index],) or idelement for elt in refset]) self.targettree = KDTree([(elt[self.target_attr_index],) or idelement for elt in targetset]) else: self.reftree = KDTree([elt[self.ref_attr_index] or idelement for elt in refset]) self.targettree = KDTree([elt[self.target_attr_index] or idelement for elt in targetset]) def _iter_blocks(self): """ Iterator over the different possible blocks. Returns ------- (block1, block2): The blocks are always (reference_block, target_block) and containts the indexes of the record in the corresponding dataset. """ extraneighbours = self.reftree.query_ball_tree(self.targettree, self.threshold) neighbours = [] for ind in xrange(self.nb_elements): if not extraneighbours[ind]: continue _ref = [self.refids[ind],] _target = [self.targetids[v] for v in extraneighbours[ind]] neighbours.append((_ref, _target)) for block1, block2 in neighbours: if len(block1) and len(block2): yield block1, block2 def _cleanup(self): """ Cleanup blocking for further use (e.g. in pipeline) """ self.reftree = None self.targettree = None self.nb_elements = None
def remove_duplicates(center_centroids, center_contours, other_centroids, shift): picks = [] if len(other_centroids)>0: picks = [] center_tree = KDTree(center_centroids) other_tree = KDTree(other_centroids-np.ones(other_centroids.shape)*shift) q = center_tree.query_ball_tree(other_tree, overlap_distance) for (k, neighbour_list) in enumerate(q): if len(neighbour_list) < 1: picks.append(k) return center_centroids[picks], list(np.array(center_contours)[picks]) else: return center_centroids, center_contours
def estimate_location(points): #shape_names, distances): # finding half as many points which are close to each other to localize num_relevant_points = len(points) / 2 # checking that distance between points is less than length_threshold length_threshold = 6 kd = KDTree(points) # find a collection of points less than length_threshold apart ball = kd.query_ball_tree(kd, length_threshold) # keep shrinking the length_threshold until we have the correct number of points for i in range(1,4): if len(ball[0]) > num_relevant_points + 1: length_threshold = length_threshold - 1 ball = kd.query_ball_tree(kd, length_threshold) else: break # this needs to be generalized if len(points) == 6: indices = ball[1] else: indices = ball[0] all_points = points.tolist() relevant_points = [] [relevant_points.append(all_points[indices[i]]) for i in range(len(indices))] #print relevant_points # average of relevant points is used as localization estimate estimated_point = np.mean(np.array(relevant_points), axis=0) return estimated_point
def compute(self, dataset_pool): parcels = self.get_dataset() if self.filter is not None: index = where(parcels[self.filter] > 0)[0] else: index = arange(parcels.size()) arr = parcels[self.quantity][index] coords = column_stack( (parcels["x_coord_sp"][index], parcels["y_coord_sp"][index]) ) kd_tree = KDTree(coords, 100) KDTresults = kd_tree.query_ball_tree(kd_tree, self.radius) result = zeros(parcels.size(), dtype=arr.dtype) tmp = array(map(lambda l: arr[l].sum(), KDTresults)) result[index] = tmp return result
def find_affine_transform(test_srcs, ref_srcs, max_pix_tol=2., min_matches_fraction=0.8, invariant_map=None): """ Return the 2 by 3 affine transformation M that maps pixel coordinates (indices) from the reference image r = (x, y) into the test image t = (x, y). t = M * r """ if len(test_srcs) < 3: raise Exception( "Test sources are less than the minimum value of points (3).") if invariant_map is None: inv_map = InvariantTriangleMapping() if len(ref_srcs) < 3: raise Exception( "Ref sources are less than the minimum value of points (3).") # generate_invariants should return a list of the invariant tuples for each # asterism and a corresponding list of the indices that make up the astrsm ref_invariants, ref_asterisms = \ inv_map.generate_invariants(ref_srcs, nearest_neighbors=7) ref_invariant_tree = KDTree(ref_invariants) test_invariants, test_asterisms = \ inv_map.generate_invariants(test_srcs, nearest_neighbors=5) test_invariant_tree = KDTree(test_invariants) # 0.03 is just an empirical number that returns about the same number of # matches than inputs matches_list = \ test_invariant_tree.query_ball_tree(ref_invariant_tree, 0.03) matches = [] # t1 is an asterism in test, t2 in ref for t1, t2_list in zip(test_asterisms, matches_list): for t2 in np.array(ref_asterisms)[t2_list]: matches.append(zip(t2, t1)) matches = np.array(matches) inv_model = inv_map.MatchTransform(ref_srcs, test_srcs) n_invariants = len(matches) max_iter = n_invariants min_matches = min(10, int(n_invariants * min_matches_fraction)) best_m = ransac(matches, inv_model, 1, max_iter, max_pix_tol, min_matches) return best_m
def _greedy_cluster_batch(self, all_tree, index_map, batch_map, batch_points, clusters, clustered_points): query_tree = KDTree(np.array(batch_points)) result = query_tree.query_ball_tree(all_tree, self.radius, eps=0.1) for i, pts in enumerate(result): index_in_all_points = batch_map[i] if index_in_all_points in clustered_points: continue clusters[index_in_all_points] = set([index_in_all_points]) clustered_points.add(index_in_all_points) pts = {index_map[pt] for pt in pts} pts -= clustered_points clusters[index_in_all_points] |= pts clustered_points |= pts return clusters, clustered_points
class test_sparse_distance_matrix: def setUp(self): n = 50 m = 4 self.T1 = KDTree(np.random.randn(n, m), leafsize=2) self.T2 = KDTree(np.random.randn(n, m), leafsize=2) self.r = 0.3 def test_consistency_with_neighbors(self): M = self.T1.sparse_distance_matrix(self.T2, self.r) r = self.T1.query_ball_tree(self.T2, self.r) for i, l in enumerate(r): for j in l: assert_equal(M[i, j], distance(self.T1.data[i], self.T2.data[j])) for ((i, j), d) in M.items(): assert j in r[i]
def remove_duplicates(center_centroids, center_contours, other_centroids, shift, min_distance): """Removes duplicate contours and centroids from center_contours and center_centroids. Uses KDTree to remove duplicates in overlapping regions between two adjacent blocks. The objects in the other block are shifted, This is necessary because all points are relative to their corresponding block. Arguments --------- center_centroids : (N,2) numpy array Array containing points relative to the center block. center_contours : list of length N List with contours corresponding to points in center block. other_centroids : (M,2) numpy array Array containing points relative to other block. shift : 2-tuple Tuple of the form (w, h), such that w is added to the first component of each point in other_centroids, and h is added to the second component. min_distance : float Minimum distance between to centroids. If their distance is smaller, remove the centroid and its corresponding contour from the center block. Returns ------- center_centroids : (N*,2) numpy array Numpy array containing points which have not been removed, note that N* <= N. center_contours : list of length N* List containing contours corresponding to the points which have not been removed. """ picks = [] if len(other_centroids) > 0: picks = [] center_tree = KDTree(center_centroids) other_tree = KDTree(other_centroids - np.ones(other_centroids.shape) * shift) q = center_tree.query_ball_tree(other_tree, min_distance) for (k, neighbour_list) in enumerate(q): if len(neighbour_list) < 1: picks.append(k) return center_centroids[picks], list(np.array(center_contours)[picks]) else: return center_centroids, center_contours
def compute(self, dataset_pool): with logger.block( name="compute variable jobs_within_DDD_of_parcel_weighted with DDD=%s" % self.radius, verbose=False ): results = None distances = None with logger.block(name="trying to read cache files", verbose=False): try: results = self._load_results() except IOError: logger.log_warning("Cache file %s could not be loaded" % self.cache_file_name) try: distances = self._load_distances() except IOError: logger.log_warning("Cache file %s could not be loaded" % self.cache_distances_file_name) with logger.block(name="initialize datasets", verbose=False): parcels = self.get_dataset() arr = parcels.sum_dataset_over_ids(dataset_pool.get_dataset("job"), constant=1) if not results or not distances: with logger.block(name="initialize coords", verbose=False): coords = column_stack((parcels.get_attribute("x_coord_sp"), parcels.get_attribute("y_coord_sp"))) with logger.block(name="build KDTree", verbose=False): kd_tree = KDTree(coords, 100) with logger.block(name="compute neighbourhoods"): results = kd_tree.query_ball_tree(kd_tree, self.radius) with logger.block(name="compute euclidean distances"): distances = kd_tree.sparse_distance_matrix(kd_tree, self.radius) with logger.block(name="cache neighbourhoods"): if not SimulationState().cache_directory_exists(): logger.log_warning("Cache does not exist and is created.") SimulationState().create_cache_directory() self._cache_results(results) self._cache_distances(distances) with logger.block(name="Sum weighted jobs in neighbourhood", verbose=False): # return_values = array(map(lambda l: arr[l].sum(), results)) return_values = array(self.euclidean_accessibility_for_parcel(results, distances, arr)) return return_values
def remove_duplicates(center_centroids, center_contours, other_centroids, shift): """Uses KDTree to remove duplicates in overlapping regions between two adjacent blocks. Shift should be a tuple of the form (w, h), such that w is added to the first component of each point in other_centroids, and h is added to the second component. This is necessary because all points are relative to their corresponding block.""" picks = [] if len(other_centroids) > 0: picks = [] center_tree = KDTree(center_centroids) other_tree = KDTree(other_centroids - np.ones(other_centroids.shape) * shift) q = center_tree.query_ball_tree(other_tree, overlap_distance) for (k, neighbour_list) in enumerate(q): if len(neighbour_list) < 1: picks.append(k) return center_centroids[picks], list(np.array(center_contours)[picks]) else: return center_centroids, center_contours
def findAffineTransform(test_srcs, ref_srcs, max_pix_tol=2., min_matches_fraction=0.8, invariantMap=None): if len(test_srcs) < 3: raise Exception( "Test sources has less than the minimum value of points (3).") if invariantMap is None: invMap = InvariantTriangleMapping() if len(ref_srcs) < 3: raise Exception( "Test sources has less than the minimum value of points (3).") #generateInvariants should return a list of the invariant tuples for each asterism and # a corresponding list of the indices that make up the asterism ref_invariants, ref_asterisms = invMap.generateInvariants( ref_srcs, nearest_neighbors=7) ref_invariant_tree = KDTree(ref_invariants) test_invariants, test_asterisms = invMap.generateInvariants( test_srcs, nearest_neighbors=5) test_invariant_tree = KDTree(test_invariants) #0.03 is just an empirical number that returns about the same number of matches than inputs matches_list = test_invariant_tree.query_ball_tree(ref_invariant_tree, 0.03) matches = [] #t1 is an asterism in test, t2 in ref for t1, t2_list in zip(test_asterisms, matches_list): for t2 in np.array(ref_asterisms)[t2_list]: matches.append(zip(t2, t1)) matches = np.array(matches) invModel = invMap.matchTransform(ref_srcs, test_srcs) nInvariants = len(matches) max_iter = nInvariants min_matches = min(10, int(nInvariants * min_matches_fraction)) bestM = ransac.ransac(matches, invModel, 1, max_iter, max_pix_tol, min_matches) return bestM
def __add_matching_edges(self, distance_threshold, voxel_size): assert (len(voxel_size) == 3) assert (isinstance(voxel_size, np.ndarray)) assert (isinstance(distance_threshold, int)) gt_nodes = {} rec_nodes = {} for v, data in self.nodes(data=True): if data["line_type"] == 'gt': gt_nodes[v] = np.array([data["z"], data["y"], data["x"]]) elif data["line_type"] == 'rec': rec_nodes[v] = np.array([data["z"], data["y"], data["x"]]) else: raise ValueError("Node with line type {} in db. Abort.".format( data["line_type"])) gt_positions = np.array(list(gt_nodes.values())) gt_node_ids = list(gt_nodes.keys()) rec_positions = np.array(list(rec_nodes.values())) rec_node_ids = list(rec_nodes.keys()) if len(gt_positions) > 0 and len(rec_positions) > 0: gt_tree = KDTree(gt_positions * voxel_size) rec_tree = KDTree(rec_positions * voxel_size) """ From the docs: KDTree.query_ball_tree(other, r, p=2.0, eps=0) For each element self.data[i] of this tree, results[i] is a list of the indices of its neighbors in other.data. """ results = gt_tree.query_ball_tree(rec_tree, r=distance_threshold) for gt_idx in range(len(results)): gt_node_id = gt_node_ids[gt_idx] for rec_idx in results[gt_idx]: rec_node_id = rec_node_ids[rec_idx] self.add_edge(gt_node_id, rec_node_id, edge_type="matching")
def fuse_close_companions(x,d): """ Function: fuse points in x which are closer than d Arguments --------- x[:,:]: float array of points [n_point:n_dim] d: float fuse points that are closer than this distance Result ------ x_new[:,:] new version of x with fused points """ # make KDTree object tree=KDTree(x) # make neighbour list neib_list=tree.query_ball_tree(tree,d) # loop through x and make a new list of fused points x_new=[] point_active=[True]*x.shape[0] for i in range(x.shape[0]): if point_active[i]: # fuse neighbours x_new.append(x[neib_list[i],:].mean(axis=0)) # loop over neighbour list and deactivate points for j in neib_list[i]: point_active[j]=False return np.array(x_new)
def Near(V, r_n): # Create a KD Tree first KDT = KDTree(data=V) # Create the KD Tree to search against search_against = KDTree(data=V) # run query_ball_tree; returns list of lists results = KDT.query_ball_tree(other=search_against, r=r_n) # for vertex indexed by i, results[i] contains all indices j such that dist(v[i],v[j]) < r edges = [None] * len(V) for i in range(len(V)): edges[i] = [] for j in results[i]: if i != j: edges[i].append(j) return edges
def __add_matching_edges(self, distance_threshold, voxel_size): """ Connect ground truth and reconstruction vertices of the matching graph to each other that are below a certain distance threshold. Distance threshold should be given in physical coordinates together with the voxel size. """ logger.info("Add matching edges...") gt_positions = self.get_positions("gt") rec_positions = self.get_positions("rec") # tag_positions[i] == matching_graph.get_position[tag_mv_ids[i]] gt_mv_ids = self.get_mv_ids("gt") rec_mv_ids = self.get_mv_ids("rec") logger.info("Initialize KDTrees...") gt_tree = KDTree(gt_positions * np.array(voxel_size)) rec_tree = KDTree(rec_positions * np.array(voxel_size)) """ From the docs: KDTree.query_ball_tree(other, r, p=2.0, eps=0) For each element self.data[i] of this tree, results[i] is a list of the indices of its neighbors in other.data. """ logger.info("Query ball tree...") results = gt_tree.query_ball_tree(rec_tree, r=distance_threshold) logger.info("Add matching edges to graph...") for gt_id in range(len(results)): mv_id_source = gt_mv_ids[gt_id] for rec_id in results[gt_id]: mv_id_target = rec_mv_ids[rec_id] edge = self.add_edge(mv_id_source, mv_id_target) # Set matching edge property: self.set_matching(edge) # Add distance: self.set_distance(edge)
def calculate_point_density(self, list_points, list_ids, radius): sys.setrecursionlimit(10000) # TODO: Check alternatives # Set precision for decimal numbers getcontext().prec = 4 tree = KDTree(np.array(list_points)) neighbors = tree.query_ball_tree(tree, radius) frequency = np.array(map(len, neighbors)) density_list = ((frequency/Decimal(len(list_ids)))*100).tolist() density = zip(list_ids, density_list) # Remove double id codes res = set() res = [item for item in density if item[0] not in res and not res.add(item[0])] return res
class test_sparse_distance_matrix: def setUp(self): n = 50 m = 4 np.random.seed(1234) self.T1 = KDTree(np.random.randn(n,m),leafsize=2) self.T2 = KDTree(np.random.randn(n,m),leafsize=2) self.r = 0.5 def test_consistency_with_neighbors(self): M = self.T1.sparse_distance_matrix(self.T2, self.r) r = self.T1.query_ball_tree(self.T2, self.r) for i,l in enumerate(r): for j in l: assert_almost_equal(M[i,j], distance(self.T1.data[i], self.T2.data[j]), decimal=14) for ((i,j),d) in M.items(): assert_(j in r[i]) def test_zero_distance(self): # raises an exception for bug 870 self.T1.sparse_distance_matrix(self.T1, self.r)
def match_radec( cat1, cat2, offset_ra, offset_dec, dt_ratio, count, imname1, imname2, ax1, ax2 ): ''' FILE = open('xy19.reg', 'r') dat = FILE.readlines() FILE.close() FILE = open('plant19.txt', 'r') dat2 = FILE.readlines() FILE.close() RA_plant, DEC_plant, MAG_plant = [],[],[] for k in dat: k = k.strip().split() RA_plant.append( float(k[0]) ) DEC_plant.append( float(k[1]) ) for k in dat2: k = k.strip().split() MAG_plant.append( float(k[6]) ) #print RA_plant[-1], DEC_plant[-1] RA_plant, DEC_plant = numpy.asarray( RA_plant ), numpy.asarray( DEC_plant ) MAG_plant = numpy.asarray( MAG_plant ) ''' cRA10, cDEC10 = numpy.asarray(cat1[ 'XWIN_WORLD' ][:]), numpy.asarray(cat1['YWIN_WORLD'][:]) cRA20, cDEC20 = numpy.asarray(cat2[ 'XWIN_WORLD' ][:]), numpy.asarray(cat2['YWIN_WORLD'][:]) RA1, DEC1 = 1.0*cat1[ 'XWIN_WORLD' ][:], 1.0*cat1['YWIN_WORLD'][:] RA2, DEC2 = 1.0*cat2[ 'XWIN_WORLD' ][:], 1.0*cat2['YWIN_WORLD'][:] #print RA1 #print DEC1 X1, Y1, X2, Y2 = cat1[ 'XWIN_IMAGE' ], cat1['YWIN_IMAGE'], cat2[ 'XWIN_IMAGE' ], cat2['YWIN_IMAGE'] MAG1, MAG2 = cat1[ 'MAG_APER' ], cat2[ 'MAG_APER' ] SN1, SN2 = cat1[ 'FLUX_APER' ] / cat1[ 'FLUXERR_APER' ], cat2[ 'FLUX_APER' ] / cat2[ 'FLUXERR_APER' ] RA1 *= 3600.0 / numpy.cos( DEC1 * math.pi / 180.0 ) RA2 *= 3600.0 / numpy.cos( DEC2 * math.pi / 180.0 ) #RA_plant *= 3600.0 / numpy.cos( DEC_plant * math.pi / 180.0 ) RA1 += offset_ra #RA_plant += offset_ra DEC1 *= 3600.0 DEC2 *= 3600.0 #DEC_plant *= 3600.0 DEC1 += offset_dec #DEC_plant += offset_dec tree1 = KDTree( numpy.asarray( [RA1, DEC1] ).T ) tree2 = KDTree( numpy.asarray( [RA2, DEC2] ).T ) #tree3 = KDTree( numpy.asarray( [RA_plant, DEC_plant] ).T ) match_pairs = tree1.query_ball_tree( tree2, r=math.ceil( dt_ratio*0.04 ) ) #plant_pairs = tree1.query_ball_tree( tree3, r=20.0*0.04 ) #print match_pairs FILE1 = open('a.reg', 'a') FILE2 = open('b.reg', 'a') X_all, Y_all = [],[] X_all2, Y_all2 = [],[] MAG_ALL1, MAG_ALL2 = [],[] SN_ALL1, SN_ALL2 = [],[] IS_PLANT = [] OBJ_info = [] for i in range(0, len(tree1.data)): for j in match_pairs[i]: if abs(MAG1[i] - MAG2[j]) <= 1.0: #print X1[i], Y1[i], X2[j], Y2[j] FILE1.write('circle(%s,%s,5) # text = {%s}\n'%( X1[i], Y1[i], count ) ) FILE1.write('circle(%s,%s,5) # text = {%s}\n'%( X2[j], Y2[j], count ) ) FILE1.write('line %s %s %s %s\n'%( X1[i], Y1[i], X2[j], Y2[j] ) ) FILE2.write('circle(%s,%s,20) # text = {%s}\n'%( X2[j], Y2[j], count ) ) #X_all.append( X1[i] ) #Y_all.append( Y1[i] ) #X_all2.append( X2[i] ) #Y_all2.append( Y2[i] ) #MAG_ALL1.append( MAG1[i] ) #MAG_ALL2.append( MAG2[j] ) #SN_ALL1.append( SN1[i] ) #SN_ALL2.append( SN2[j] ) OBJ_info.append( [ X1[i], Y1[i], cRA10[i], cDEC10[i], MAG1[i], X2[j], Y2[j], cRA20[j], cDEC20[j], MAG2[j], SN1[i], SN2[j] ] ) #if len(plant_pairs[i]) != 0: # print 'PLANT:',X1[i],Y1[i], '%.3f'%(tree3.query([ RA1[i],DEC1[i] ])[0] / 0.04) # print MAG1[i], MAG_plant[ tree3.query([ RA1[i],DEC1[i] ])[1] ] - MAG1[i], MAG_plant[ tree3.query([ RA1[i],DEC1[i] ])[1] ] - MAG2[i] # IS_PLANT.append( 1 ) #else: # print 'NOT PLANT:',X1[i],Y1[i], '%.3f'%(tree3.query([ RA1[i],DEC1[i] ])[0] / 0.04) # IS_PLANT.append( 0 ) #retval = topng(imname1, X1[i], Y1[i], imname2, X2[j], Y2[j], 'junk1.fits', 'junk2.fits',ax1, ax2) #if retval: # pylab.savefig('%s_%s_%s_good.png'%(str(count).rjust(3,'0'), int(round(X1[i])), int(round(Y1[i]))), bbox_inches='tight', pad_inches=0, dpi=120) #else: # pylab.savefig('%s_%s_%s_bad.png'%(str(count).rjust(3,'0'), int(round(X1[i])), int(round(Y1[i]))), bbox_inches='tight', pad_inches=0, dpi=120) #if IS_PLANT[-1]: # ax2.annotate( xy=(10,10), s=MAG_plant[ tree3.query([ RA1[i],DEC1[i] ])[1] ], color='w' ) # # pylab.savefig('%s_%s_%sP.png'%(str(count).rjust(3,'0'), int(round(X1[i])), int(round(Y1[i]))), bbox_inches='tight', pad_inches=0, dpi=120) #else: # pylab.savefig('%s_%s_%sN.png'%(str(count).rjust(3,'0'), int(round(X1[i])), int(round(Y1[i]))), bbox_inches='tight', pad_inches=0, dpi=120) FILE1.close() FILE2.close() #MAG_ALL1, SN_ALL1, MAG_ALL2, SN_ALL2 = numpy.asarray( MAG_ALL1 ), numpy.asarray( SN_ALL1 ), numpy.asarray( MAG_ALL2 ), numpy.asarray( SN_ALL2 ) #IS_PLANT = numpy.asarray( IS_PLANT ) #ok = numpy.where( numpy.asarray( IS_PLANT ) ) return OBJ_info #X_all, Y_all, X_all2, Y_all2, SN_ALL1, SN_ALL2
def run(self, rinput): self.logger.info('starting processing for object detection') flow = self.init_filters(rinput) hdulist = basic_processing_with_combination(rinput, flow=flow) hdr = hdulist[0].header self.set_base_headers(hdr) self.logger.debug('finding point sources') try: filtername = hdr['FILTER'] readmode = hdr['READMODE'] rotang = hdr['ROTANG'] detpa = hdr['DETPA'] dtupa = hdr['DTUPA'] dtub, dtur = datamodel.get_dtur_from_header(hdr) except KeyError as error: self.logger.error(error) raise RecipeError(error) data = hdulist[0].data # Copy needed in numpy 1.7 # This seems already bitswapped?? # FIXME: check this works offline/online # ndata = data.byteswap().newbyteorder() # data = data.byteswap(inplace=True).newbyteorder() snr_detect = 5.0 fwhm = 4.0 npixels = 15 box_shape = [64, 64] self.logger.info('point source detection2') self.logger.info('using internal mask to remove corners') # Corners mask = numpy.zeros_like(data, dtype='int32') mask[2000:, 0:80] = 1 mask[2028:, 2000:] = 1 mask[:50, 1950:] = 1 mask[:100, :50] = 1 # Remove corner regions self.logger.info('compute background map, %s', box_shape) bkg = sep.Background(data) self.logger.info('reference fwhm is %5.1f pixels', fwhm) self.logger.info('detect threshold, %3.1f over background', snr_detect) self.logger.info('convolve with gaussian kernel, FWHM %3.1f pixels', fwhm) sigma = fwhm * gaussian_fwhm_to_sigma # kernel = Gaussian2DKernel(sigma) kernel.normalize() thresh = snr_detect * bkg.globalrms data_s = data - bkg.back() objects, segmap = sep.extract(data - bkg.back(), thresh, minarea=npixels, filter_kernel=kernel.array, segmentation_map=True, mask=mask) fits.writeto('segmap.fits', segmap) self.logger.info('detected %d objects', len(objects)) # Hardcoded values rs2 = 15.0 fit_rad = 10.0 flux_min = 1000.0 flux_max = 30000.0 self.logger.debug('Flux limit is %6.1f %6.1f', flux_min, flux_max) # FIXME: this should be a view, not a copy xall = objects['x'] yall = objects['y'] mm = numpy.array([xall, yall]).T self.logger.info('computing FWHM') # Find objects with pairs inside fit_rad kdtree = KDTree(mm) nearobjs = (kdtree.query_ball_tree(kdtree, r=fit_rad)) positions = [] for idx, obj in enumerate(objects): x0 = obj['x'] y0 = obj['y'] sl = image_box2d(x0, y0, data.shape, (fit_rad, fit_rad)) # sl_sky = image_box2d(x0, y0, data.shape, (rs2, rs2)) part_s = data_s[sl] # Logical coordinates xx0 = x0 - sl[1].start yy0 = y0 - sl[0].start _, fwhm_x, fwhm_y = compute_fwhm_2d_simple(part_s, xx0, yy0) if min(fwhm_x, fwhm_x) < 3: continue if flux_min > obj['peak'] or flux_max < obj['peak']: continue # nobjs is the number of object inside fit_rad nobjs = len(nearobjs[idx]) flag = 0 if nobjs == 1 else 1 positions.append([idx, x0, y0, obj['peak'], fwhm_x, fwhm_y, flag]) self.logger.info('saving photometry') positions = numpy.array(positions) positions_alt = positions self.logger.info('end processing for object detection') result = self.create_result(frame=hdulist, positions=positions_alt, positions_alt=positions_alt, filter=filtername, DTU=dtub, readmode=readmode, ROTANG=rotang, DETPA=detpa, DTUPA=dtupa, param_recenter=rinput.recenter, param_max_recenter_radius=rinput.max_recenter_radius, param_box_half_size=rinput.box_half_size ) return result
def drift_calb(data_coord,data,calb_coord,calb,interp_method,buff, csv_path): """ Takes four Numpy Arrays 1 - Data Corrdinates in X, Y, (Z) 2 - Data 3 - Calibration line coordinates in X, Y, (Z) - Must be the same format as data coordinates 4 - Calibration Data - must be the same format as Data """ print "\n"'Calibration started' flusher() #produces tree of data and calibration coordinates dt_tree = KDTree(data_coord) cl_tree = KDTree(calb_coord) print 'trees created' flusher() #creates an array containing indexes of calibration point within specified distance of each measured data point buff = float(buff) coords = dt_tree.query_ball_tree(cl_tree, buff) #print coords print len(coords), 'matching coordinates'"\n" flusher() temp = int(len(coords)) #creates empty array the same size as data array calb_array = np.empty((temp,9)) #produces temp blank lists for i in range(3,9): vars()['temp',i]=[] xp = [] j=0 #creates list of calibration data for row in coords: if row: xp.append(j) for i in range(3,9): temp_calb = 0 for k in range(len(row)): temp_calb += calb[row[k],i] tempdata = data[(j),(i)] - temp_calb/len(row) vars()['temp',i].append(tempdata) j += 1 Northings = data[:,0] Eastings = data[:,1] Altitude = data[:,2] calb_array = np.column_stack((Northings,Eastings,Altitude)) #Creates list of interpolated calibration data using chosen method for i in range(3,9): if interp_method: s = interpolate.UnivariateSpline(xp,vars()['temp',i],k=3) vars()['c'+str(i)] = s(range(j)) else: vars()['c' + str(i)] = np.interp(range(j),xp,vars()['temp',i]) calb_array = np.column_stack((calb_array,vars()['c'+str(i)])) #prints the calibration curves to csv files to check for erroneous results. np.savetxt(csv_path + '/' + 'c'+str(i)+'.csv', vars()['c'+str(i)], delimiter=',') out_array = np.subtract(data,calb_array) out_array[:,0:3] = data_coord return out_array
class PolyRigidDiffeomorphism(basis.Model): def __init__(self, control_points, smooth, points=None, number_of_steps=1., step=1.): self.control_points = control_points.copy() self.control_tree = KDTree(self.control_points) self.smooth = smooth self.parameter = self.identity self.points = points self.number_of_steps = float(number_of_steps) self.step = float(step) self.time = self.step / self.number_of_steps if points is not None: self.tree = KDTree(points) def __getstate__(self): result = self.__dict__.copy() if 'tree' in result: del result['tree'] if 'control_tree' in result: del result['control_tree'] return result def __setstate__(self, dict): self.__dict__ = dict for name, value in dict.iteritems(): setattr(self, name, value) self.control_tree = KDTree(self.control_points) if 'points' in dict and self.points is not None: self.tree = KDTree(self.points) @property def identity(self): return numpy.tile( numpy.array((0., 0., 0., 0., 0., 0.)), len(self.control_points) ) @property def bounds(self): return numpy.tile( numpy.array([ [-numpy.pi, numpy.pi], [-numpy.pi, numpy.pi], [-numpy.pi, numpy.pi], [-numpy.inf, numpy.inf], [-numpy.inf, numpy.inf], [-numpy.inf, numpy.inf] ]), (len(self.control_points), 1) ) def transform_points(self, points): if not points is self.points: tree = KDTree(points) else: tree = self.tree displacements_uw = numpy.zeros_like(points, dtype=float) displacements = numpy.zeros_like(points, dtype=float) total_weights = numpy.zeros((len(points), 1)) points_to_each_control = self.control_tree.query_ball_tree(tree, self.smooth * 3) for i, point_indices_to_control in enumerate(points_to_each_control): if len(point_indices_to_control) == 0: continue points_to_control = points[point_indices_to_control] - self.control_points[i] distances2 = (points_to_control ** 2).sum(1) weights = self.kernel(distances2)[:, None] total_weights[point_indices_to_control] += weights matrix = rigid_from_parameters(self.parameter[i * 6: (i + 1) * 6]) rotations = matrix[:3, :3] translation = matrix[:3, -1] displacement_matrix = expm(rotations / self.number_of_steps) displacement_matrix[(0, 1, 2), (0, 1, 2)] -= 1 displacement = ( translation / self.number_of_steps + numpy.dot(displacement_matrix, ( points_to_control - translation * self.time ).T).T ) displacements_uw[point_indices_to_control] += ( displacement ) displacements[point_indices_to_control] += ( displacement * weights ) total_weights_mask = (total_weights > 0).ravel() displacements[total_weights_mask] /= total_weights[total_weights_mask] return displacements + points def jacobian(self, points): if not points is self.points: tree = KDTree(points) else: tree = self.tree jacobian = numpy.zeros((len(points), 6 * len(self.control_points), 3)) jacobian_uw = numpy.zeros_like(jacobian, dtype=float) total_weights = numpy.zeros((len(points), 1)) points_to_each_control = self.control_tree.query_ball_tree(tree, self.smooth * 3) for i, point_indices_to_control in enumerate(points_to_each_control): if len(point_indices_to_control) == 0: continue points_to_control = points[point_indices_to_control] - self.control_points[i] distances2 = (points_to_control ** 2).sum(1) weights = self.kernel(distances2)[:, None] total_weights[point_indices_to_control] += weights control_point_jacobian = self.rigid_jacobian( self.parameter[i * 6: (i + 1) * 6], points_to_control ) jacobian_uw[point_indices_to_control, i * 6: (i + 1) * 6, :] += ( control_point_jacobian ) jacobian[point_indices_to_control, i * 6: (i + 1) * 6, :] += ( control_point_jacobian * weights[:, None] ) total_weights_mask = (total_weights > 0).ravel() jacobian[total_weights_mask] /= total_weights[:, None][total_weights_mask] return jacobian def jacobian_position(self, points): if not points is self.points: tree = KDTree(points) else: tree = self.tree jacobian = numpy.zeros((len(points), 3, 3)) jacobian_uw = numpy.zeros_like(jacobian, dtype=float) total_weights = numpy.zeros((len(points), 1)) points_to_each_control = self.control_tree.query_ball_tree(tree, self.smooth * 3) for i, point_indices_to_control in enumerate(points_to_each_control): if len(point_indices_to_control) == 0: continue points_to_control = points[point_indices_to_control] - self.control_points[i] distances2 = (points_to_control ** 2).sum(1) weights = self.kernel(distances2)[:, None] total_weights[point_indices_to_control] += weights control_point_parameter = self.parameter[i * 6: (i + 1) * 6].copy() control_point_jacobian = rigid_from_parameters(control_point_parameter)[:-1, :-1].T jacobian_uw[point_indices_to_control, :, :] += ( control_point_jacobian ) jacobian[point_indices_to_control, :, :] += ( control_point_jacobian * weights[:, None] ) total_weights_mask = (total_weights > 0).ravel() jacobian[total_weights_mask] /= total_weights[:, None][total_weights_mask] return jacobian def affine_matrices(self): affine_matrices = numpy.empty((len(self.control_points), 4, 4)) for i in xrange(len(self.control_points)): affine_matrices[i] = rigid_from_parameters(self.parameter[i * 6: (i + 1) * 6]) return affine_matrices def kernel(self, distances2): return numpy.exp(-distances2 / self.smooth ** 2) @staticmethod def log(parameter): n_matrices = len(parameter) / 6 matrices = numpy.empty((n_matrices, 4, 4)) for i in xrange(n_matrices): p = parameter[i * 6: (i + 1) * 6].copy() matrices[i, :, :] = logm( rigid_from_parameters(p) ) return matrices @staticmethod def rigid_transform_points(affine_parameter, points): cos_phi, cos_theta, cos_psi = cos(affine_parameter[0: 3]) sin_phi, sin_theta, sin_psi = sin(affine_parameter[0: 3]) displacement = affine_parameter[-3:][None, :] x = points[:, 0][:, None] y = points[:, 1][:, None] z = points[:, 2][:, None] return numpy.c_[ cos_theta * cos_psi * x + (-cos_phi * sin_psi + sin_phi * sin_theta * cos_psi) * y + (sin_phi * sin_psi + cos_phi * sin_theta * cos_psi) * z, cos_theta * sin_psi * x + (cos_phi * cos_psi + sin_phi * sin_theta * sin_psi) * y + (-sin_phi * cos_psi + cos_phi * sin_theta * sin_psi) * z, -sin_theta * x + sin_phi * cos_theta * y + cos_phi * cos_theta * z ] + displacement @staticmethod def rigid_transform_vectors(affine_parameter, vectors): cos_phi, cos_theta, cos_psi = cos(affine_parameter[0: 3]) sin_phi, sin_theta, sin_psi = sin(affine_parameter[0: 3]) x = vectors[:, 0][:, None] y = vectors[:, 1][:, None] z = vectors[:, 2][:, None] return numpy.c_[ cos_theta * cos_psi * x + (-cos_phi * sin_psi + sin_phi * sin_theta * cos_psi) * y + (sin_phi * sin_psi + cos_phi * sin_theta * cos_psi) * z, cos_theta * sin_psi * x + (cos_phi * cos_psi + sin_phi * sin_theta * sin_psi) * y + (-sin_phi * cos_psi + cos_phi * sin_theta * sin_psi) * z, -sin_theta * x + sin_phi * cos_theta * y + cos_phi * cos_theta * z ] @staticmethod def rigid_jacobian(affine_parameter, points): centered_points = numpy.atleast_2d(points) x = centered_points[:, 0][:, None].flatten() y = centered_points[:, 1][:, None].flatten() z = centered_points[:, 2][:, None].flatten() cos_phi, cos_theta, cos_psi = cos(affine_parameter[0: 3]) sin_phi, sin_theta, sin_psi = sin(affine_parameter[0: 3]) jacobian = numpy.zeros((len(points), 6, 3)) jacobian[:, 0, 0] = cos_theta * sin_psi * x jacobian[:, 0, 1] = (cos_phi * cos_psi + sin_psi * sin_theta * sin_phi) * y jacobian[:, 0, 2] = (-sin_phi * cos_psi + cos_phi * sin_theta * sin_psi) * z jacobian[:, 1, 0] = -sin_theta * x jacobian[:, 1, 1] = cos_theta * sin_phi * y jacobian[:, 1, 2] = cos_phi * cos_theta * z linear.jacobian_rotation(affine_parameter[0: 3], points, jacobian[:, 0: 3, :]) linear.jacobian_translation(affine_parameter[3: 6], points, jacobian[:, 3: 6, :]) return jacobian def compute_rigid_rotation_derivatives(matrix, N): rotations = numpy.array([ [ [0, 0, 0], [0, 0, -1], [0, 1, 0] ], [ [0, 0, 1], [0, 0, 0], [-1, 0, 0] ], [ [0, -1, 0], [1, 0, 0], [0, 0, 0] ] ]) scaling_matrices = [numpy.eye(3)] for i in xrange(N): scaling_matrices.append(numpy.dot( scaling_matrices[i], matrix )) derivative_matrices = rotations * 0. for n in xrange(1, N): for i in xrange(1, n + 1): for k in xrange(3): derivative_matrices += numpy.dot( numpy.dot( scaling_matrices[i - 1], rotations[k] ), scaling_matrices[n - i] ) return derivative_matrices
import numpy as np import scipy.spatial as ss from itertools import combinations from itertools import permutations import time d_threshold = 1.5 x=[1,2,3,4,5,6,7,8,9,10] y=[1,2,3,4,5,6,7,8,9,10] points = zip(x, y) x2=[1,3,4,5,6,7,8,8,10,16] y2=[1,2,3,4,5,6,7,8,9,16.1] points2 = zip(x, y) start = time.clock() A_tree=KDTree(points) B_tree=KDTree(points2) neighbors=A_tree.query_ball_tree(B_tree,d_threshold) # print neighbors #it is a list of lists distances=A_tree.sparse_distance_matrix(B_tree,d_threshold) # print distances A_tree=cKDTree(points) B_tree=cKDTree(points2) # print points[0] cneighbors=A_tree.query_ball_tree(B_tree,d_threshold) # print neighbors #it is a list of lists cdistances=A_tree.sparse_distance_matrix(B_tree,d_threshold) # print distances # print neighbors # print cneighbors #print distances # if(neighbors==cneighbors): # print "Yes!!!"
class PolyRigid(basis.Model): def __init__(self, control_points, smooth, points=None): self.control_points = control_points.copy() self.control_tree = KDTree(self.control_points) self.smooth = smooth self.parameter = self.identity self.points = points if points is not None: self.tree = KDTree(points) def __getstate__(self): result = self.__dict__.copy() if 'tree' in result: del result['tree'] if 'control_tree' in result: del result['control_tree'] return result def __setstate__(self, dict): self.__dict__ = dict for name, value in dict.iteritems(): setattr(self, name, value) self.control_tree = KDTree(self.control_points) if 'points' in dict and self.points is not None: self.tree = KDTree(self.points) @property def identity(self): return numpy.tile( numpy.array((0., 0., 0., 0., 0., 0.)), len(self.control_points) ) @property def bounds(self): return numpy.tile( numpy.array([ [-numpy.pi, numpy.pi], [-numpy.pi, numpy.pi], [-numpy.pi, numpy.pi], [-numpy.inf, numpy.inf], [-numpy.inf, numpy.inf], [-numpy.inf, numpy.inf] ]), (len(self.control_points), 1) ) def transform_points(self, points): if not points is self.points: tree = KDTree(points) else: tree = self.tree displacements = numpy.zeros_like(points, dtype=float) total_weights = numpy.zeros((len(points), 1)) points_to_each_control = self.control_tree.query_ball_tree(tree, self.smooth * 3) for i, point_indices_to_control in enumerate(points_to_each_control): if len(point_indices_to_control) == 0: continue points_to_control = points[point_indices_to_control] - self.control_points[i] distances2 = (points_to_control ** 2).sum(1) weights = self.kernel(distances2)[:, None] total_weights[point_indices_to_control] += weights displacement = points_to_control - self.rigid_transform_points( self.parameter[i * 6: (i + 1) * 6], points_to_control ) displacements[point_indices_to_control] += ( displacement * weights ) total_weights_mask = (total_weights > 0).ravel() displacements[total_weights_mask] /= total_weights[total_weights_mask] return points - displacements def transform_vectors(self, points, vectors): if not points is self.points: tree = KDTree(points) else: tree = self.tree displacements_uw = numpy.zeros_like(points, dtype=float) displacements = numpy.zeros_like(points, dtype=float) total_weights = numpy.zeros((len(points), 1)) points_to_each_control = self.control_tree.query_ball_tree(tree, self.smooth * 3) for i, point_indices_to_control in enumerate(points_to_each_control): if len(point_indices_to_control) == 0: continue points_to_control = points[point_indices_to_control] - self.control_points[i] vectors_to_control = vectors[point_indices_to_control] distances2 = (points_to_control ** 2).sum(1) weights = self.kernel(distances2)[:, None] total_weights[point_indices_to_control] += weights displacement = vectors_to_control - self.rigid_transform_vectors( self.parameter[i * 6: (i + 1) * 6], vectors_to_control ) displacements_uw[point_indices_to_control] += ( displacement ) displacements[point_indices_to_control] += ( displacement * weights ) total_weights_mask = (total_weights > 0).ravel() displacements[total_weights_mask] /= total_weights[total_weights_mask] return vectors - displacements def jacobian(self, points): if not points is self.points: tree = KDTree(points) else: tree = self.tree jacobian = numpy.zeros((len(points), 6 * len(self.control_points), 3)) total_weights = numpy.zeros((len(points), 1)) points_to_each_control = self.control_tree.query_ball_tree(tree, self.smooth * 3) for i, point_indices_to_control in enumerate(points_to_each_control): if len(point_indices_to_control) == 0: continue points_to_control = points[point_indices_to_control] - self.control_points[i] distances2 = (points_to_control ** 2).sum(1) weights = self.kernel(distances2)[:, None] total_weights[point_indices_to_control] += weights control_point_jacobian = self.rigid_jacobian( self.parameter[i * 6: (i + 1) * 6], points_to_control ) jacobian[point_indices_to_control, i * 6: (i + 1) * 6, :] += ( control_point_jacobian * weights[:, None] ) total_weights_mask = (total_weights > 0).ravel() jacobian[total_weights_mask] /= total_weights[:, None][total_weights_mask] return jacobian def jacobian_position(self, points): if not points is self.points: tree = KDTree(points) else: tree = self.tree jacobian = numpy.zeros((len(points), 3, 3)) total_weights = numpy.zeros((len(points), 1)) points_to_each_control = self.control_tree.query_ball_tree(tree, self.smooth * 3) for i, point_indices_to_control in enumerate(points_to_each_control): if len(point_indices_to_control) == 0: continue points_to_control = points[point_indices_to_control] - self.control_points[i] distances2 = (points_to_control ** 2).sum(1) weights = self.kernel(distances2)[:, None] total_weights[point_indices_to_control] += weights control_point_parameter = self.parameter[i * 6: (i + 1) * 6].copy() control_point_jacobian = rigid_from_parameters(control_point_parameter)[:-1, :-1].T jacobian[point_indices_to_control, :, :] += ( control_point_jacobian * weights[:, None] ) total_weights_mask = (total_weights > 0).ravel() jacobian[total_weights_mask] /= total_weights[:, None][total_weights_mask] return jacobian def affine_matrices(self): affine_matrices = numpy.empty((len(self.control_points), 4, 4)) for i in xrange(len(self.control_points)): affine_matrices[i] = rigid_from_parameters(self.parameter[i * 6: (i + 1) * 6]) return affine_matrices def kernel(self, distances2): return numpy.exp(-distances2 / self.smooth ** 2) @staticmethod def log(parameter): n_matrices = len(parameter) / 6 matrices = numpy.empty((n_matrices, 4, 4)) for i in xrange(n_matrices): p = parameter[i * 6: (i + 1) * 6].copy() matrices[i, :, :] = logm( rigid_from_parameters(p) ) return matrices @staticmethod def gradient_log(parameter): n_matrices = len(parameter) / 6 matrices = numpy.empty((n_matrices, 4, 4)) for i in xrange(n_matrices): p = parameter[i * 6: (i + 1) * 6].copy() #In the case that it is the gradient, we #take the parameter as a distance from the #identity, so we add the non-scale to it p[0: 3] += 1 matrices[i, :, :] = logm( rigid_from_parameters(p) ) return matrices @staticmethod def exp(log_parameter): parameter = numpy.empty(6 * len(log_parameter)) for i in xrange(len(log_parameter)): parameter[i * 6: (i + 1) * 6] = parameters_from_rigid( expm(log_parameter[i]) ) return parameter @staticmethod def rigid_transform_points(affine_parameter, points): cos_phi, cos_theta, cos_psi = cos(affine_parameter[0: 3]) sin_phi, sin_theta, sin_psi = sin(affine_parameter[0: 3]) displacement = affine_parameter[-3:][None, :] x = points[:, 0][:, None] y = points[:, 1][:, None] z = points[:, 2][:, None] return numpy.c_[ cos_theta * cos_psi * x + (-cos_phi * sin_psi + sin_phi * sin_theta * cos_psi) * y + (sin_phi * sin_psi + cos_phi * sin_theta * cos_psi) * z, cos_theta * sin_psi * x + (cos_phi * cos_psi + sin_phi * sin_theta * sin_psi) * y + (-sin_phi * cos_psi + cos_phi * sin_theta * sin_psi) * z, -sin_theta * x + sin_phi * cos_theta * y + cos_phi * cos_theta * z ] + displacement @staticmethod def rigid_transform_vectors(affine_parameter, vectors): cos_phi, cos_theta, cos_psi = cos(affine_parameter[0: 3]) sin_phi, sin_theta, sin_psi = sin(affine_parameter[0: 3]) x = vectors[:, 0][:, None] y = vectors[:, 1][:, None] z = vectors[:, 2][:, None] return numpy.c_[ cos_theta * cos_psi * x + (-cos_phi * sin_psi + sin_phi * sin_theta * cos_psi) * y + (sin_phi * sin_psi + cos_phi * sin_theta * cos_psi) * z, cos_theta * sin_psi * x + (cos_phi * cos_psi + sin_phi * sin_theta * sin_psi) * y + (-sin_phi * cos_psi + cos_phi * sin_theta * sin_psi) * z, -sin_theta * x + sin_phi * cos_theta * y + cos_phi * cos_theta * z ] @staticmethod def rigid_jacobian(affine_parameter, points): centered_points = numpy.atleast_2d(points) x = centered_points[:, 0][:, None].flatten() y = centered_points[:, 1][:, None].flatten() z = centered_points[:, 2][:, None].flatten() cos_phi, cos_theta, cos_psi = cos(affine_parameter[0: 3]) sin_phi, sin_theta, sin_psi = sin(affine_parameter[0: 3]) jacobian = numpy.zeros((len(points), 6, 3)) jacobian[:, 0, 0] = cos_theta * sin_psi * x jacobian[:, 0, 1] = (cos_phi * cos_psi + sin_psi * sin_theta * sin_phi) * y jacobian[:, 0, 2] = (-sin_phi * cos_psi + cos_phi * sin_theta * sin_psi) * z jacobian[:, 1, 0] = -sin_theta * x jacobian[:, 1, 1] = cos_theta * sin_phi * y jacobian[:, 1, 2] = cos_phi * cos_theta * z linear.jacobian_rotation(affine_parameter[0: 3], points, jacobian[:, 0: 3, :]) linear.jacobian_translation(affine_parameter[3: 6], points, jacobian[:, 3: 6, :]) return jacobian
from scipy.spatial import KDTree import gc, os, csv, numpy as np, time from qgis.core import * from PyQt4 import QtGui QgsApplication.setPrefixPath("C:/OSGeo4W/apps/qgis", True) QgsApplication.initQgis() gc.enable() x, y = np.mgrid[0:5, 2:8] tree = KDTree(zip(x.ravel(), y.ravel())) tree.data pts = np.array([[0, 0], [2.1, 2.9]]) tree.query(pts,1,0,2,2.01) tree.data[30] layer = QgsVectorLayer('D:/FragsDissolve.shp','layer','ogr') feats=layer.getFeatures(QgsFeatureRequest().setFilterFids([30381,258317])) feats = [f for f in feats] pol=feats[0].geometry().asPolygon()[0] pol2=feats[1].geometry().asPolygon()[0] tree=KDTree(pol) tree2=KDTree(pol2) result=tree.query_ball_tree(tree2,0.0039) x,y=tree.query(tree2.data) minimum=np.array(x).argmin() minimum2=y[minimum] tree.data[minimum] tree2.data[minimum2] ((x[0]-y[0])**2+(x[1]-y[1])**2)**0.5
def despike(data_coord,data,radius,threshold): """ despikes a given dataset using the equation Z = (datapoint - mean) / standard deviation Usage: despike.py <coordinates> <data> <radius> <threshold> Parameters --------- coordinates: should be a numpy array of x, y, (z) coordinates data:numpy array with the same number of rows as coordinates but as many columns as required radius:a numerical value threshold: a numerical value returns ------- a numpy array containing coordinates and data Finnegan Pope-Carter """ import numpy as np from scipy.spatial import KDTree #creates a tree from the coordinates to allow tree querys data_tree = KDTree(data_coord) # creates lists of indices of points within 'radius' of point groups = data_tree.query_ball_tree(data_tree,radius, eps=1) #defines some variables row_delete = [] j=0 col_cnt = data.shape[1] #creates list of rows Z is greater than defined threshold for row in groups: if row: for i in range(col_cnt): vars()['temp',i] = np.array(data[row,i]) mean = np.mean(vars()['temp',i]) std = np.std(vars()['temp',i]) #Avoids issues with division by/of 0 if mean == 0: pass if std == 0: pass else: z = abs(data[j,i] - mean)/std if z > threshold: row_delete.append(j) break j += 1 #combines coordinate and data arrays data = np.column_stack((data_coord,data)) #deletes rows defined in previous list data = np.delete(data, row_delete, axis=0) #returns numpy array return data