예제 #1
0
def compute_epipolar_distances(normalized_coords_i1: np.ndarray,
                               normalized_coords_i2: np.ndarray,
                               i2Ei1: EssentialMatrix) -> Optional[np.ndarray]:
    """Compute symmetric point-line epipolar distances between normalized coordinates of correspondences.

    Args:
        normalized_coords_i1: normalized coordinates in image i1, of shape Nx2.
        normalized_coords_i2: corr. normalized coordinates in image i2, of shape Nx2.
        i2Ei1: essential matrix between two images

    Returns:
        Symmetric epipolar distances for each row of the input, of shape N.
    """
    if (normalized_coords_i1 is None or normalized_coords_i1.size == 0
            or normalized_coords_i2 is None or normalized_coords_i2.size == 0):
        return None

    # construct the essential matrix in the opposite directin
    i2Ti1 = Pose3(i2Ei1.rotation(), i2Ei1.direction().point3())
    i1Ti2 = i2Ti1.inverse()
    i1Ei2 = EssentialMatrix(i1Ti2.rotation(), Unit3(i1Ti2.translation()))

    # get lines in i2 and i1
    epipolar_lines_i2 = feature_utils.convert_to_epipolar_lines(
        normalized_coords_i1, i2Ei1)
    epipolar_lines_i1 = feature_utils.convert_to_epipolar_lines(
        normalized_coords_i2, i1Ei2)

    # compute two distances and average them
    return 0.5 * (feature_utils.compute_point_line_distances(
        normalized_coords_i1, epipolar_lines_i1) +
                  feature_utils.compute_point_line_distances(
                      normalized_coords_i2, epipolar_lines_i2))
예제 #2
0
    def test_convert_to_epipolar_lines_none_input(self):
        """Test conversion of None to epipolar lines using the essential matrix."""

        points = None
        E_matrix = EssentialMatrix(Rot3.RzRyRx(0, np.deg2rad(45), 0), Unit3(np.array([-5, 2, 0])))
        F_matrix = E_matrix.matrix()  # using identity intrinsics

        computed = feature_utils.convert_to_epipolar_lines(points, F_matrix)
        self.assertIsNone(computed)
예제 #3
0
    def test_convert_to_epipolar_lines_valid_input(self):
        """Test conversion of valid 2D points to epipolar lines using the fundamental matrix, against manual
        computation and with OpenCV's output."""

        points = np.array([[10.0, -5.0], [3.5, 20.0],])  # 2d points in homogenous coordinates
        E_matrix = EssentialMatrix(Rot3.RzRyRx(0, np.deg2rad(45), 0), Unit3(np.array([-5, 2, 0])))
        F_matrix = E_matrix.matrix()  # using identity intrinsics
        expected_opencv = cv.computeCorrespondEpilines(points.reshape(-1, 1, 2), 1, F_matrix)
        expected_opencv = np.squeeze(expected_opencv)  # converting to 2D array as opencv adds a singleton dimension

        computed = feature_utils.convert_to_epipolar_lines(points, F_matrix)
        computed_normalized = computed / np.linalg.norm(computed[:, :2], axis=1, keepdims=True)
        np.testing.assert_allclose(computed_normalized, expected_opencv)
예제 #4
0
파일: metrics.py 프로젝트: borglab/gtsfm
def epipolar_inlier_correspondences(
    keypoints_i1: Keypoints,
    keypoints_i2: Keypoints,
    intrinsics_i1: Cal3Bundler,
    intrinsics_i2: Cal3Bundler,
    i2Ti1: Pose3,
    dist_threshold: float,
) -> Tuple[Optional[np.ndarray], Optional[np.ndarray]]:
    """Compute inlier correspondences using epipolar geometry and the ground truth relative pose.

    Args:
        keypoints_i1: keypoints in image i1.
        keypoints_i2: corr. keypoints in image i2.
        intrinsics_i1: intrinsics for i1.
        intrinsics_i2: intrinsics for i2.
        i2Ti1: relative pose
        dist_threshold: max acceptable distance for a correct correspondence.

    Returns:
        is_inlier: (N, ) mask of inlier correspondences.
        distance_squared: squared sampson distance between corresponding keypoints.
    """
    i2Ei1 = EssentialMatrix(i2Ti1.rotation(), Unit3(i2Ti1.translation()))
    i2Fi1 = verification_utils.essential_to_fundamental_matrix(
        i2Ei1, intrinsics_i1, intrinsics_i2)
    distance_squared = verification_utils.compute_epipolar_distances_sq_sampson(
        keypoints_i1.coordinates, keypoints_i2.coordinates, i2Fi1)
    is_inlier = distance_squared < dist_threshold**2 if distance_squared is not None else None

    return is_inlier, distance_squared
예제 #5
0
def simulate_two_planes_scene(M: int, N: int) -> Tuple[Keypoints, Keypoints, EssentialMatrix]:
    """Generate a scene where 3D points are on two planes, and projects the points to the 2 cameras. There are M points
    on plane 1, and N points on plane 2.

    The two planes in this test are:
    1. -10x -y -20z +150 = 0
    2. 15x -2y -35z +200 = 0

    Args:
        M: number of points on 1st plane.
        N: number of points on 2nd plane.

    Returns:
        keypoints for image i1, of length (M+N).
        keypoints for image i2, of length (M+N).
        Essential matrix i2Ei1.
    """
    # range of 3D points
    range_x_coordinate = (-5, 7)
    range_y_coordinate = (-10, 10)

    # define the plane equation
    plane1_coeffs = (-10, -1, -20, 150)
    plane2_coeffs = (15, -2, -35, 200)

    # sample the points from planes
    plane1_points = sample_points_on_plane(plane1_coeffs, range_x_coordinate, range_y_coordinate, M)
    plane2_points = sample_points_on_plane(plane2_coeffs, range_x_coordinate, range_y_coordinate, N)

    points_3d = np.vstack((plane1_points, plane2_points))

    # define the camera poses and compute the essential matrix
    wti1 = np.array([0.1, 0, -20])
    wti2 = np.array([1, -2, -20.4])

    wRi1 = Rot3.RzRyRx(np.pi / 20, 0, 0.0)
    wRi2 = Rot3.RzRyRx(0.0, np.pi / 6, 0.0)

    wTi1 = Pose3(wRi1, wti1)
    wTi2 = Pose3(wRi2, wti2)
    i2Ti1 = wTi2.between(wTi1)

    i2Ei1 = EssentialMatrix(i2Ti1.rotation(), Unit3(i2Ti1.translation()))

    # project 3D points to 2D image measurements
    intrinsics = Cal3Bundler()
    camera_i1 = PinholeCameraCal3Bundler(wTi1, intrinsics)
    camera_i2 = PinholeCameraCal3Bundler(wTi2, intrinsics)

    uv_im1 = []
    uv_im2 = []
    for point in points_3d:
        uv_im1.append(camera_i1.project(point))
        uv_im2.append(camera_i2.project(point))

    uv_im1 = np.vstack(uv_im1)
    uv_im2 = np.vstack(uv_im2)

    # return the points as keypoints and the essential matrix
    return Keypoints(coordinates=uv_im1), Keypoints(coordinates=uv_im2), i2Ei1
예제 #6
0
    def test_convert_to_epipolar_lines_empty_input(self):
        """Test conversion of 0 2D points to epipolar lines using the essential matrix."""

        points = np.array([])  # 2d points in homogenous coordinates
        f_matrix = EssentialMatrix(Rot3.RzRyRx(0, np.deg2rad(45), 0), Unit3(np.array([-5, 2, 0]))).matrix()

        computed = feature_utils.convert_to_epipolar_lines(points, f_matrix)
        self.assertIsNone(computed)
예제 #7
0
    def test_values(self):
        values = Values()
        E = EssentialMatrix(Rot3(), Unit3())
        tol = 1e-9

        values.insert(0, Point2(0, 0))
        values.insert(1, Point3(0, 0, 0))
        values.insert(2, Rot2())
        values.insert(3, Pose2())
        values.insert(4, Rot3())
        values.insert(5, Pose3())
        values.insert(6, Cal3_S2())
        values.insert(7, Cal3DS2())
        values.insert(8, Cal3Bundler())
        values.insert(9, E)
        values.insert(10, imuBias_ConstantBias())

        # Special cases for Vectors and Matrices
        # Note that gtsam's Eigen Vectors and Matrices requires double-precision
        # floating point numbers in column-major (Fortran style) storage order,
        # whereas by default, numpy.array is in row-major order and the type is
        # in whatever the number input type is, e.g. np.array([1,2,3])
        # will have 'int' type.
        #
        # The wrapper will automatically fix the type and storage order for you,
        # but for performance reasons, it's recommended to specify the correct
        # type and storage order.
        # for vectors, the order is not important, but dtype still is
        vec = np.array([1., 2., 3.])
        values.insert(11, vec)
        mat = np.array([[1., 2.], [3., 4.]], order='F')
        values.insert(12, mat)
        # Test with dtype int and the default order='C'
        # This still works as the wrapper converts to the correct type and order for you
        # but is nornally not recommended!
        mat2 = np.array([[1, 2, ], [3, 5]])
        values.insert(13, mat2)

        self.assertTrue(values.atPoint2(0).equals(Point2(0,0), tol))
        self.assertTrue(values.atPoint3(1).equals(Point3(0,0,0), tol))
        self.assertTrue(values.atRot2(2).equals(Rot2(), tol))
        self.assertTrue(values.atPose2(3).equals(Pose2(), tol))
        self.assertTrue(values.atRot3(4).equals(Rot3(), tol))
        self.assertTrue(values.atPose3(5).equals(Pose3(), tol))
        self.assertTrue(values.atCal3_S2(6).equals(Cal3_S2(), tol))
        self.assertTrue(values.atCal3DS2(7).equals(Cal3DS2(), tol))
        self.assertTrue(values.atCal3Bundler(8).equals(Cal3Bundler(), tol))
        self.assertTrue(values.atEssentialMatrix(9).equals(E, tol))
        self.assertTrue(values.atimuBias_ConstantBias(
            10).equals(imuBias_ConstantBias(), tol))

        # special cases for Vector and Matrix:
        actualVector = values.atVector(11)
        self.assertTrue(np.allclose(vec, actualVector, tol))
        actualMatrix = values.atMatrix(12)
        self.assertTrue(np.allclose(mat, actualMatrix, tol))
        actualMatrix2 = values.atMatrix(13)
        self.assertTrue(np.allclose(mat2, actualMatrix2, tol))
예제 #8
0
    def test_bundle_adjust(self):
        """Tests the bundle adjustment for relative pose on a simulated scene."""
        two_view_estimator = TwoViewEstimator(matcher=None,
                                              verifier=None,
                                              inlier_support_processor=None,
                                              bundle_adjust_2view=True,
                                              eval_threshold_px=4)

        # Extract example poses.
        i1Ri2 = EXAMPLE_DATA.get_camera(1).pose().rotation()
        i1ti2 = EXAMPLE_DATA.get_camera(1).pose().translation()
        i2Ti1 = Pose3(i1Ri2, i1ti2).inverse()
        i2Ei1 = EssentialMatrix(i2Ti1.rotation(), Unit3(i2Ti1.translation()))

        # Perform bundle adjustment.
        i2Ri1_optimized, i2Ui1_optimized, corr_idxs = two_view_estimator.bundle_adjust(
            keypoints_i1=self.keypoints_i1,
            keypoints_i2=self.keypoints_i2,
            verified_corr_idxs=self.corr_idxs,
            camera_intrinsics_i1=Cal3Bundler(),
            camera_intrinsics_i2=Cal3Bundler(),
            i2Ri1_initial=i2Ei1.rotation(),
            i2Ui1_initial=i2Ei1.direction(),
            i2Ti1_prior=None,
        )

        # Assert
        rotation_angular_error = comp_utils.compute_relative_rotation_angle(
            i2Ri1_optimized, i2Ei1.rotation())
        translation_angular_error = comp_utils.compute_relative_unit_translation_angle(
            i2Ui1_optimized, i2Ei1.direction())
        self.assertLessEqual(rotation_angular_error, 1)
        self.assertLessEqual(translation_angular_error, 1)
        np.testing.assert_allclose(corr_idxs, self.corr_idxs)
예제 #9
0
    def test_compute_epipolar_distances(self):
        """Test for epipolar distance computation using 2 sets of points and the essential matrix."""
        normalized_pts_i1 = np.array([[1.0, 3.5], [-2.0, 2.0]])
        normalized_pts_i2 = np.array([[2.0, -1.0], [1.0, 0.0]])

        i2Ri1 = Rot3.RzRyRx(0, np.deg2rad(30), np.deg2rad(10))
        i2ti1 = np.array([-0.5, 2.0, 0])
        i2Ei1 = EssentialMatrix(i2Ri1, Unit3(i2ti1))

        expected = np.array([1.637, 1.850])

        computed = verification_utils.compute_epipolar_distances(
            normalized_pts_i1, normalized_pts_i2, i2Ei1)

        np.testing.assert_allclose(computed, expected, rtol=1e-3)
예제 #10
0
def essential_to_fundamental_matrix(
        i2Ei1: EssentialMatrix, camera_intrinsics_i1: Cal3Bundler,
        camera_intrinsics_i2: Cal3Bundler) -> np.ndarray:
    """Converts the essential matrix to fundamental matrix using camera intrinsics.

    Args:
        i2Ei1: essential matrix which maps points in image #i1 to lines in image #i2.
        camera_intrinsics_i1: intrinsics for image #i1.
        camera_intrinsics_i2: intrinsics for image #i2.

    Returns:
        Fundamental matrix i2Fi1 as numpy array of shape (3x3).
    """
    return np.linalg.inv(
        camera_intrinsics_i2.K().T) @ i2Ei1.matrix() @ np.linalg.inv(
            camera_intrinsics_i1.K())
예제 #11
0
def recover_relative_pose_from_essential_matrix(
    i2Ei1: Optional[np.ndarray],
    verified_coordinates_i1: np.ndarray,
    verified_coordinates_i2: np.ndarray,
    camera_intrinsics_i1: Cal3Bundler,
    camera_intrinsics_i2: Cal3Bundler,
) -> Tuple[Optional[Rot3], Optional[Unit3]]:
    """Recovers the relative rotation and translation direction from essential matrix and verified correspondences
    using opencv's API.

    Args:
        i2Ei1: essential matrix as a numpy array, of shape 3x3.
        verified_coordinates_i1: coordinates of verified correspondences in image i1, of shape Nx2.
        verified_coordinates_i2: coordinates of verified correspondences in image i2, of shape Nx2.
        camera_intrinsics_i1: intrinsics for image i1.
        camera_intrinsics_i2: intrinsics for image i2.

    Returns:
        relative rotation i2Ri1, or None if the input essential matrix is None.
        relative translation direction i2Ui1, or None if the input essential matrix is None.
    """
    if i2Ei1 is None:
        return None, None

    # obtain points in normalized coordinates using intrinsics.
    normalized_coordinates_i1 = feature_utils.normalize_coordinates(
        verified_coordinates_i1, camera_intrinsics_i1)
    normalized_coordinates_i2 = feature_utils.normalize_coordinates(
        verified_coordinates_i2, camera_intrinsics_i2)

    # use opencv to recover pose
    _, i2Ri1, i2ti1, _ = cv.recoverPose(i2Ei1, normalized_coordinates_i1,
                                        normalized_coordinates_i2)
    i2Ri1 = Rot3(i2Ri1)
    i2Ui1 = Unit3(i2ti1.squeeze())
    i2Ei1_reconstructed = EssentialMatrix(i2Ri1, i2Ui1).matrix()

    # normalizing the two essential matrices
    i2Ei1_normalized = i2Ei1 / np.linalg.norm(i2Ei1, axis=None)
    i2Ei1_reconstructed_normalized = i2Ei1_reconstructed / np.linalg.norm(
        i2Ei1_reconstructed, axis=None)
    if not np.allclose(i2Ei1_normalized, i2Ei1_reconstructed_normalized):
        logger.warning(
            "Recovered R, t cannot create the input Essential Matrix")

    return i2Ri1, i2Ui1
예제 #12
0
파일: features.py 프로젝트: asa/gtsfm
def convert_to_epipolar_lines(normalized_coordinates_i1: np.ndarray, i2Ei1: EssentialMatrix) -> Optional[np.array]:
    """Convert coordinates to epipolar lines in image i2.

    The epipolar line in image i2 is given by i2Ei1 @ x_i1. A point x_i2 is on this line if x_i2^T @ i2Ei1 @ x_i1 = 0.

    Args:
        normalized_coordinates_i1: normalized coordinates in i1, of shape Nx2.
        i2Ei1: essential matrix.

    Returns:
        Corr. epipolar lines in i2, of shape Nx3.
    """
    if normalized_coordinates_i1 is None or normalized_coordinates_i1.size == 0:
        return None

    epipolar_lines = convert_to_homogenous_coordinates(normalized_coordinates_i1) @ i2Ei1.matrix().T
    return epipolar_lines
예제 #13
0
    def test_convert_to_epipolar_lines_valid_input(self):
        """Test conversion of valid 2D points to epipolar lines using the essential matrix."""

        points = np.array([
            [10.0, -5.0],
            [3.5, 20.0],
        ])  # 2d points in homogenous coordinates
        essential_mat = EssentialMatrix(Rot3.RzRyRx(0, np.deg2rad(45), 0),
                                        Unit3(np.array([-5, 2, 0])))

        computed = feature_utils.convert_to_epipolar_lines(
            points, essential_mat)

        expected = np.array([
            [-2.36351579, -5.90878948, 1.75364193],
            [-0.65653216, -1.64133041, -19.75129171],
        ])

        np.testing.assert_allclose(computed, expected)
예제 #14
0
파일: metrics.py 프로젝트: asa/gtsfm
def count_correct_correspondences(
    keypoints_i1: Keypoints,
    keypoints_i2: Keypoints,
    intrinsics_i1: Cal3Bundler,
    intrinsics_i2: Cal3Bundler,
    i2Ti1: Pose3,
    epipolar_dist_threshold: float,
) -> int:
    """Checks the correspondences for epipolar distances and counts ones which are below the threshold.

    Args:
        keypoints_i1: keypoints in image i1.
        keypoints_i2: corr. keypoints in image i2.
        intrinsics_i1: intrinsics for i1.
        intrinsics_i2: intrinsics for i2.
        i2Ti1: relative pose
        epipolar_dist_threshold: max acceptable distance for a correct correspondence.

    Raises:
        ValueError: when the number of keypoints do not match.

    Returns:
        Number of correspondences which are correct.
    """
    # TODO: add unit test, with mocking.
    if len(keypoints_i1) != len(keypoints_i2):
        raise ValueError("Keypoints must have same counts")

    if len(keypoints_i1) == 0:
        return 0

    normalized_coords_i1 = feature_utils.normalize_coordinates(
        keypoints_i1.coordinates, intrinsics_i1)
    normalized_coords_i2 = feature_utils.normalize_coordinates(
        keypoints_i2.coordinates, intrinsics_i2)
    i2Ei1 = EssentialMatrix(i2Ti1.rotation(), Unit3(i2Ti1.translation()))

    epipolar_distances = verification_utils.compute_epipolar_distances(
        normalized_coords_i1, normalized_coords_i2, i2Ei1)
    return np.count_nonzero(epipolar_distances < epipolar_dist_threshold)
예제 #15
0
    def test_VisualISAMExample(self):

        # Create a factor
        poseKey1 = symbol('x', 1)
        poseKey2 = symbol('x', 2)
        trueRotation = Rot3.RzRyRx(0.15, 0.15, -0.20)
        trueTranslation = Point3(+0.5, -1.0, +1.0)
        trueDirection = Unit3(trueTranslation)
        E = EssentialMatrix(trueRotation, trueDirection)
        model = gtsam.noiseModel.Isotropic.Sigma(5, 0.25)
        factor = EssentialMatrixConstraint(poseKey1, poseKey2, E, model)

        #  Create a linearization point at the zero-error point
        pose1 = Pose3(Rot3.RzRyRx(0.00, -0.15, 0.30), Point3(-4.0, 7.0, -10.0))
        pose2 = Pose3(
            Rot3.RzRyRx(0.179693265735950, 0.002945368776519,
                        0.102274823253840),
            Point3(-3.37493895, 6.14660244, -8.93650986))

        expected = np.zeros((5, 1))
        actual = factor.evaluateError(pose1, pose2)
        self.gtsamAssertEquals(actual, expected, 1e-8)
예제 #16
0
def generate_random_essential_matrix() -> EssentialMatrix:
    rotation_angles = np.random.uniform(low=0.0, high=2 * np.pi, size=(3, ))
    R = Rot3.RzRyRx(rotation_angles[0], rotation_angles[1], rotation_angles[2])
    t = np.random.uniform(low=-1.0, high=1.0, size=(3, ))

    return EssentialMatrix(R, Unit3(t))