def merge_two_reconstructions(r1, r2, config, threshold=1): ''' Merge two reconstructions with common tracks ''' t1, t2 = r1['points'], r2['points'] common_tracks = list(set(t1) & set(t2)) # print 'Number of common tracks between two reconstructions: {0}'.format(len(common_tracks)) if len(common_tracks) > 6: # Estimate similarity transform p1 = np.array([t1[t]['coordinates'] for t in common_tracks]) p2 = np.array([t2[t]['coordinates'] for t in common_tracks]) T, inliers = multiview.fit_similarity_transform(p1, p2, max_iterations=1000, threshold=threshold) if len(inliers) >= 10: s, A, b = multiview.decompose_similarity_transform(T) r1p = r1 apply_similarity(r1p, s, A, b) r = r2 r['shots'].update(r1p['shots']) r['points'].update(r1p['points']) align_reconstruction(r, config) return [r] else: return [r1, r2] else: return [r1, r2]
def merge_two_reconstructions(r1, r2, config, threshold=1): """Merge two reconstructions with common tracks.""" t1, t2 = r1.points, r2.points common_tracks = list(set(t1) & set(t2)) if len(common_tracks) > 6: # Estimate similarity transform p1 = np.array([t1[t].coordinates for t in common_tracks]) p2 = np.array([t2[t].coordinates for t in common_tracks]) T, inliers = multiview.fit_similarity_transform(p1, p2, max_iterations=1000, threshold=threshold) if len(inliers) >= 10: s, A, b = multiview.decompose_similarity_transform(T) r1p = r1 align.apply_similarity(r1p, s, A, b) r = r2 r.shots.update(r1p.shots) r.points.update(r1p.points) align.align_reconstruction(r, None, config) return [r] else: return [r1, r2] else: return [r1, r2]
def merge_two_reconstructions(r1, r2, config, threshold=1): ''' Merge two reconstructions with common tracks ''' t1, t2 = r1.points, r2.points common_tracks = list(set(t1) & set(t2)) # print 'Number of common tracks between two reconstructions: {0}'.format(len(common_tracks)) if len(common_tracks) > 6: # Estimate similarity transform p1 = np.array([t1[t].coordinates for t in common_tracks]) p2 = np.array([t2[t].coordinates for t in common_tracks]) T, inliers = multiview.fit_similarity_transform(p1, p2, max_iterations=1000, threshold=threshold) if len(inliers) >= 10: s, A, b = multiview.decompose_similarity_transform(T) r1p = r1 apply_similarity(r1p, s, A, b) r = r2 r.shots.update(r1p.shots) r.points.update(r1p.points) align_reconstruction(r, config) return [r] else: return [r1, r2] else: return [r1, r2]
def align_two_reconstruction(r1, r2, common_tracks, threshold): """Estimate similarity transform between two reconstructions.""" t1, t2 = r1.points, r2.points if len(common_tracks) > 6: p1 = np.array([t1[t[0]].coordinates for t in common_tracks]) p2 = np.array([t2[t[1]].coordinates for t in common_tracks]) # 3 samples / 100 trials / 50% outliers = 0.99 probability # with probability = 1-(1-(1-outlier)^model)^trial T, inliers = multiview.fit_similarity_transform( p1, p2, max_iterations=100, threshold=threshold) if len(inliers) > 0: return True, T, inliers return False, None, None
def align_two_reconstruction( r1: types.Reconstruction, r2: types.Reconstruction, common_tracks: List[Tuple[str, str]], threshold: float, ) -> Tuple[float, Optional[np.ndarray], List[int]]: """Estimate similarity transform between two reconstructions.""" t1, t2 = r1.points, r2.points if len(common_tracks) > 6: p1 = np.array([t1[t[0]].coordinates for t in common_tracks]) p2 = np.array([t2[t[1]].coordinates for t in common_tracks]) # 3 samples / 100 trials / 50% outliers = 0.99 probability # with probability = 1-(1-(1-outlier)^model)^trial T, inliers = multiview.fit_similarity_transform( p1, p2, max_iterations=100, threshold=threshold ) if len(inliers) > 0: return True, T, inliers return False, None, []