def match_candidates_by_distance( images_ref: List[str], images_cand: List[str], exifs: Dict[str, Any], reference: geo.TopocentricConverter, max_neighbors: int, max_distance: float, ) -> Set[Tuple[str, str]]: """Find candidate matching pairs by GPS distance. The GPS altitude is ignored because we want images of the same position at different altitudes to be matched together. Otherwise, for drone datasets, flights at different altitudes do not get matched. """ if len(images_cand) == 0: return set() if max_neighbors <= 0 and max_distance <= 0: return set() max_neighbors = max_neighbors or 99999999 max_distance = max_distance or 99999999.0 k = min(len(images_cand), max_neighbors) points = np.zeros((len(images_cand), 3)) for i, image in enumerate(images_cand): gps = exifs[image]["gps"] points[i] = reference.to_topocentric(gps["latitude"], gps["longitude"], 0) tree = spatial.cKDTree(points) pairs = set() for image_ref in images_ref: nn = k + 1 if image_ref in images_cand else k gps = exifs[image_ref]["gps"] point = reference.to_topocentric(gps["latitude"], gps["longitude"], 0) distances, neighbors = tree.query(point, k=nn, distance_upper_bound=max_distance) if type(neighbors) == int: # special case with only one NN neighbors = [neighbors] for j in neighbors: if j >= len(images_cand): continue image_cand = images_cand[j] if image_cand != image_ref: pairs.add(sorted_pair(image_ref, image_cand)) return pairs
def get_gps_point( exif: Dict[str, Any], reference: geo.TopocentricConverter ) -> Tuple[np.ndarray, np.ndarray]: """Return GPS-based representative point. Direction is returned as Z oriented (vertical assumption)""" gps = exif["gps"] altitude = 0 direction = np.array([0, 0, 1]) return ( reference.to_topocentric(gps["latitude"], gps["longitude"], altitude), direction, )