Exemplo n.º 1
0
def compute_correspondence_metrics(
    keypoints_i1: Keypoints,
    keypoints_i2: Keypoints,
    corr_idxs_i1i2: np.ndarray,
    intrinsics_i1: Cal3Bundler,
    intrinsics_i2: Cal3Bundler,
    i2Ti1: Pose3,
    epipolar_distance_threshold: float,
) -> Tuple[int, float]:
    """Compute the metrics for the generated verified correspondence.

    Args:
        keypoints_i1: detected keypoints in image i1.
        keypoints_i2: detected keypoints in image i2.
        corr_idxs_i1i2: indices of correspondences.
        intrinsics_i1: intrinsics for i1.
        intrinsics_i2: intrinsics for i2.
        i2Ti1: relative pose.
        epipolar_distance_threshold: max epipolar distance to qualify as a correct match.

    Returns:
        Number of correct correspondences.
        Inlier Ratio, i.e. ratio of correspondences which are correct.
    """
    number_correct = metric_utils.count_correct_correspondences(
        keypoints_i1.extract_indices(corr_idxs_i1i2[:, 0]),
        keypoints_i2.extract_indices(corr_idxs_i1i2[:, 1]),
        intrinsics_i1,
        intrinsics_i2,
        i2Ti1,
        epipolar_distance_threshold,
    )

    return number_correct, number_correct / corr_idxs_i1i2.shape[0]
Exemplo n.º 2
0
    def estimate_F(
        self,
        keypoints_i1: Keypoints,
        keypoints_i2: Keypoints,
        match_indices: np.ndarray,
        robust_estimation_type: RobustEstimationType = RobustEstimationType.
        FM_RANSAC,
    ) -> Tuple[np.ndarray, np.ndarray]:
        """Estimate the Fundamental matrix from correspondences.

        Args:
            keypoints_i1: detected features in image #i1.
            keypoints_i2: detected features in image #i2.
            match_indices: matches as indices of features from both images, of shape (N3, 2), where N3 <= min(N1, N2),
               given N1 features from image 1, and N2 features from image 2.

        Returns:
            i2Fi1: Fundamental matrix, as 3x3 array.
            inlier_mask: boolean array of shape (N3,) indicating inlier matches.
        """
        i2Fi1, inlier_mask = cv2.findFundamentalMat(
            keypoints_i1.extract_indices(match_indices[:, 0]).coordinates,
            keypoints_i2.extract_indices(match_indices[:, 1]).coordinates,
            method=getattr(cv2, robust_estimation_type.value),
            ransacReprojThreshold=self._estimation_threshold_px,
            confidence=RANSAC_SUCCESS_PROB,
            maxIters=RANSAC_MAX_ITERS,
        )
        return i2Fi1, inlier_mask
Exemplo n.º 3
0
    def test_extract_indices_valid(self):
        """Test extraction of indices."""

        # test without scales and responses
        input = Keypoints(coordinates=np.array([[1.3, 5], [20, 10], [5.0, 1.3], [2.1, 4.2]]))
        indices = np.array([0, 2])

        expected = Keypoints(coordinates=np.array([[1.3, 5], [5.0, 1.3]]))
        computed = input.extract_indices(indices)

        self.assertEqual(computed, expected)

        # test without scales and responses
        input = Keypoints(
            coordinates=np.array([[1.3, 5], [20, 10], [5.0, 1.3], [2.1, 4.2]]),
            scales=np.array([0.2, 0.5, 0.3, 0.9]),
            responses=np.array([2.3, 1.2, 4.5, 0.2]),
        )
        indices = np.array([0, 2])

        expected = Keypoints(
            coordinates=np.array([[1.3, 5], [5.0, 1.3]]), scales=np.array([0.2, 0.3]), responses=np.array([2.3, 4.5])
        )
        computed = input.extract_indices(indices)

        self.assertEqual(computed, expected)
Exemplo n.º 4
0
    def test_extract_indices_empty(self):
        """Test extraction of indices, which are empty."""

        # test without scales and responses
        input = Keypoints(coordinates=np.array([[1.3, 5], [20, 10], [5.0, 1.3], [2.1, 4.2]]))
        indices = np.array([])

        computed = input.extract_indices(indices)

        self.assertEqual(len(computed), 0)
Exemplo n.º 5
0
    def estimate_F(self, keypoints_i1: Keypoints, keypoints_i2: Keypoints,
                   match_indices: np.ndarray) -> Tuple[np.ndarray, np.ndarray]:
        """Estimate the Fundamental matrix from correspondences.

        Args:
            keypoints_i1: detected features in image #i1.
            keypoints_i2: detected features in image #i2.
            match_indices: matches as indices of features from both images, of shape (N3, 2), where N3 <= min(N1, N2),
               given N1 features from image 1, and N2 features from image 2.

        Returns:
            i2Fi1: Fundamental matrix, as 3x3 array.
            inlier_mask: boolean array of shape (N3,) indicating inlier matches.
        """
        i2Fi1, inlier_mask = cv2.findFundamentalMat(
            keypoints_i1.extract_indices(match_indices[:, 0]).coordinates,
            keypoints_i2.extract_indices(match_indices[:, 1]).coordinates,
            method=cv2.FM_LMEDS,
        )
        return i2Fi1, inlier_mask
Exemplo n.º 6
0
def compute_correspondence_metrics(
    keypoints_i1: Keypoints,
    keypoints_i2: Keypoints,
    corr_idxs_i1i2: np.ndarray,
    intrinsics_i1: Cal3Bundler,
    intrinsics_i2: Cal3Bundler,
    dist_threshold: float,
    gt_wTi1: Optional[Pose3] = None,
    gt_wTi2: Optional[Pose3] = None,
    gt_scene_mesh: Optional[Trimesh] = None,
) -> Tuple[Optional[np.ndarray], Optional[np.ndarray]]:
    """Checks the correspondences for epipolar distances and counts ones which are below the threshold.

    Args:
        keypoints_i1: keypoints in image i1.
        keypoints_i2: corr. keypoints in image i2.
        intrinsics_i1: intrinsics for i1.
        intrinsics_i2: intrinsics for i2.
        dist_threshold: max acceptable distance for a correct correspondence.
        gt_wTi1: ground truth pose of image i1.
        gt_wTi2: ground truth pose of image i2.
        gt_scene_mesh: ground truth triangular surface mesh of the scene in the world frame.

    Raises:
        ValueError: when the number of keypoints do not match.

    Returns:
        Boolean mask of which verified correspondences are classified as correct under Sampson error
            (using GT epipolar geometry).
        Reprojection error for every verified correspondence against GT geometry.
    """
    if corr_idxs_i1i2.size == 0:
        return None, None

    if gt_wTi1 is None or gt_wTi2 is None:
        return None, None

    # Compute ground truth correspondences.
    matched_keypoints_i1 = keypoints_i1.extract_indices(corr_idxs_i1i2[:, 0])
    matched_keypoints_i2 = keypoints_i2.extract_indices(corr_idxs_i1i2[:, 1])
    # Check to see if a GT mesh is provided.
    if gt_scene_mesh is not None:
        gt_camera_i1 = PinholeCameraCal3Bundler(gt_wTi1, intrinsics_i1)
        gt_camera_i2 = PinholeCameraCal3Bundler(gt_wTi2, intrinsics_i2)
        return mesh_inlier_correspondences(
            matched_keypoints_i1,
            matched_keypoints_i2,
            gt_camera_i1,
            gt_camera_i2,
            gt_scene_mesh,
            dist_threshold,
        )

    # If no mesh is provided, use squared Sampson error.
    gt_i2Ti1 = gt_wTi2.between(gt_wTi1)
    return epipolar_inlier_correspondences(
        matched_keypoints_i1,
        matched_keypoints_i2,
        intrinsics_i1,
        intrinsics_i2,
        gt_i2Ti1,
        dist_threshold,
    )
Exemplo n.º 7
0
    def verify(
        self,
        keypoints_i1: Keypoints,
        keypoints_i2: Keypoints,
        match_indices: np.ndarray,
        camera_intrinsics_i1: Cal3Bundler,
        camera_intrinsics_i2: Cal3Bundler,
    ) -> Tuple[Optional[Rot3], Optional[Unit3], np.ndarray]:
        """Performs verification of correspondences between two images to recover the relative pose and indices of
        verified correspondences.

        Args:
            keypoints_i1: detected features in image #i1.
            keypoints_i2: detected features in image #i2.
            match_indices: matches as indices of features from both images, of shape (N3, 2), where N3 <= min(N1, N2).
            camera_intrinsics_i1: intrinsics for image #i1.
            camera_intrinsics_i2: intrinsics for image #i2.
        Returns:
            Estimated rotation i2Ri1, or None if it cannot be estimated.
            Estimated unit translation i2Ui1, or None if it cannot be estimated.
            Indices of verified correspondences, of shape (N, 2) with N <= N3. These are subset of match_indices.
        """
        if match_indices.shape[0] < self._min_matches:
            return self._failure_result

        if self._use_intrinsics_in_verification:
            uv_norm_i1 = feature_utils.normalize_coordinates(keypoints_i1.coordinates, camera_intrinsics_i1)
            uv_norm_i2 = feature_utils.normalize_coordinates(keypoints_i2.coordinates, camera_intrinsics_i2)
            K = np.eye(3)

            i2Ei1, inlier_mask = cv2.findEssentialMat(
                uv_norm_i1[match_indices[:, 0]],
                uv_norm_i2[match_indices[:, 1]],
                K,
                method=cv2.RANSAC,
                threshold=NORMALIZED_COORD_RANSAC_THRESH,
                prob=DEFAULT_RANSAC_SUCCESS_PROB,
            )
        else:
            i2Fi1, inlier_mask = cv2.findFundamentalMat(
                keypoints_i1.extract_indices(match_indices[:, 0]).coordinates,
                keypoints_i2.extract_indices(match_indices[:, 1]).coordinates,
                method=cv2.FM_RANSAC,
                ransacReprojThreshold=PIXEL_COORD_RANSAC_THRESH,
                confidence=DEFAULT_RANSAC_SUCCESS_PROB,
                maxIters=10000,
            )

            i2Ei1 = verification_utils.fundamental_to_essential_matrix(
                i2Fi1, camera_intrinsics_i1, camera_intrinsics_i2
            )

        inlier_idxs = np.where(inlier_mask.ravel() == 1)[0]

        (i2Ri1, i2Ui1) = verification_utils.recover_relative_pose_from_essential_matrix(
            i2Ei1,
            keypoints_i1.coordinates[match_indices[inlier_idxs, 0]],
            keypoints_i2.coordinates[match_indices[inlier_idxs, 1]],
            camera_intrinsics_i1,
            camera_intrinsics_i2,
        )

        return i2Ri1, i2Ui1, match_indices[inlier_idxs]