Exemplo n.º 1
0
def calculate_triangulation_angle_in_degrees(
        camera_1: PinholeCameraCal3Bundler, camera_2: PinholeCameraCal3Bundler,
        point_3d: np.ndarray) -> float:
    """Calculates the angle formed at the 3D point by the rays backprojected from 2 cameras.
    In the setup with X (point_3d) and two cameras C1 and C2, the triangulation angle is the angle between rays C1-X
    and C2-X, i.e. the angle subtendted at the 3d point.
        X
       / \
      /   \
     /     \
    C1      C2
    References:
    - https://github.com/colmap/colmap/blob/dev/src/base/triangulation.cc#L122
    
    Args:
        camera_1: the first camera.
        camera_2: the second camera.
        point_3d: the 3d point which is imaged by the two camera centers, and where the angle between the light rays 
                  associated with the measurements are computed.
    Returns:
        the angle formed at the 3d point, in degrees.
    """
    camera_center_1: np.ndarray = camera_1.pose().translation()
    camera_center_2: np.ndarray = camera_2.pose().translation()

    # compute the two rays
    ray_1 = point_3d - camera_center_1
    ray_2 = point_3d - camera_center_2

    return geometry_utils.compute_relative_unit_translation_angle(
        Unit3(ray_1), Unit3(ray_2))
Exemplo n.º 2
0
def test_convert_cartesian_to_spherical_directions() -> None:
    """Check that correct spherical coordinates are obtained for certain directions."""
    directions = [
        Unit3(np.array([1, 0, 0])),
        Unit3(np.array([0, 1, 0])),
        Unit3(np.array([0, 0, 1])),
    ]
    expected_spherical_coordinates = np.deg2rad(np.array([[90, 90], [0, 0], [0, 90]]))
    spherical_coordinates = conversion_utils.cartesian_to_spherical_directions(directions)
    np.testing.assert_allclose(spherical_coordinates, expected_spherical_coordinates)
Exemplo n.º 3
0
    def test_compute_relative_unit_translation_angle(self):
        """Tests the relative angle between two unit-translations."""

        U_1 = Unit3(np.array([1, 0, 0]))
        U_2 = Unit3(np.array([0.5, 0.5, 0]))

        # returns angle in degrees
        computed_deg = geometry_comparisons.compute_relative_unit_translation_angle(
            U_1, U_2)
        expected_deg = 45

        self.assertAlmostEqual(computed_deg, expected_deg, places=3)
Exemplo n.º 4
0
def test_convert_spherical_to_cartesian_directions() -> None:
    """Check that Euclidean -> Spherical -> Euclidean conversion yields same results."""
    directions = [
        Unit3(np.array([1, 0, 0])),
        Unit3(np.array([0, 1, 0])),
        Unit3(np.array([0, 0, 1])),
        Unit3(np.array([1, 0, 1])),
        Unit3(np.array([0, 1, 1])),
    ]
    spherical_coordinates = conversion_utils.cartesian_to_spherical_directions(directions)
    cartesian_directions = conversion_utils.spherical_to_cartesian_directions(spherical_coordinates)
    for i in range(len(cartesian_directions)):
        np.testing.assert_almost_equal(cartesian_directions[i].point3(), directions[i].point3())
Exemplo n.º 5
0
def epipolar_inlier_correspondences(
    keypoints_i1: Keypoints,
    keypoints_i2: Keypoints,
    intrinsics_i1: Cal3Bundler,
    intrinsics_i2: Cal3Bundler,
    i2Ti1: Pose3,
    dist_threshold: float,
) -> Tuple[Optional[np.ndarray], Optional[np.ndarray]]:
    """Compute inlier correspondences using epipolar geometry and the ground truth relative pose.

    Args:
        keypoints_i1: keypoints in image i1.
        keypoints_i2: corr. keypoints in image i2.
        intrinsics_i1: intrinsics for i1.
        intrinsics_i2: intrinsics for i2.
        i2Ti1: relative pose
        dist_threshold: max acceptable distance for a correct correspondence.

    Returns:
        is_inlier: (N, ) mask of inlier correspondences.
        distance_squared: squared sampson distance between corresponding keypoints.
    """
    i2Ei1 = EssentialMatrix(i2Ti1.rotation(), Unit3(i2Ti1.translation()))
    i2Fi1 = verification_utils.essential_to_fundamental_matrix(
        i2Ei1, intrinsics_i1, intrinsics_i2)
    distance_squared = verification_utils.compute_epipolar_distances_sq_sampson(
        keypoints_i1.coordinates, keypoints_i2.coordinates, i2Fi1)
    is_inlier = distance_squared < dist_threshold**2 if distance_squared is not None else None

    return is_inlier, distance_squared
Exemplo n.º 6
0
def recover_relative_pose_from_essential_matrix(
    i2Ei1: np.ndarray,
    verified_coordinates_i1: np.ndarray,
    verified_coordinates_i2: np.ndarray,
    camera_intrinsics_i1: Cal3Bundler,
    camera_intrinsics_i2: Cal3Bundler,
) -> Tuple[Rot3, Unit3]:
    """Recovers the relative rotation and translation direction from essential matrix and verified correspondences
    using opencv's API.

    Args:
        i2Ei1: essential matrix as a numpy array, of shape 3x3.
        verified_coordinates_i1: coordinates of verified correspondences in image i1, of shape Nx2.
        verified_coordinates_i2: coordinates of verified correspondences in image i2, of shape Nx2.
        camera_intrinsics_i1: intrinsics for image i1.
        camera_intrinsics_i2: intrinsics for image i2.

    Returns:
        relative rotation i2Ri1.
        relative translation direction i2Ui1.
    """
    # obtain points in normalized coordinates using intrinsics.
    normalized_coordinates_i1 = feature_utils.normalize_coordinates(
        verified_coordinates_i1, camera_intrinsics_i1)
    normalized_coordinates_i2 = feature_utils.normalize_coordinates(
        verified_coordinates_i2, camera_intrinsics_i2)

    # use opencv to recover pose
    _, i2Ri1, i2ti1, _ = cv.recoverPose(i2Ei1, normalized_coordinates_i1,
                                        normalized_coordinates_i2)

    return Rot3(i2Ri1), Unit3(i2ti1.squeeze())
Exemplo n.º 7
0
    def test_bundle_adjust(self):
        """Tests the bundle adjustment for relative pose on a simulated scene."""
        two_view_estimator = TwoViewEstimator(matcher=None,
                                              verifier=None,
                                              inlier_support_processor=None,
                                              bundle_adjust_2view=True,
                                              eval_threshold_px=4)

        # Extract example poses.
        i1Ri2 = EXAMPLE_DATA.get_camera(1).pose().rotation()
        i1ti2 = EXAMPLE_DATA.get_camera(1).pose().translation()
        i2Ti1 = Pose3(i1Ri2, i1ti2).inverse()
        i2Ei1 = EssentialMatrix(i2Ti1.rotation(), Unit3(i2Ti1.translation()))

        # Perform bundle adjustment.
        i2Ri1_optimized, i2Ui1_optimized, corr_idxs = two_view_estimator.bundle_adjust(
            keypoints_i1=self.keypoints_i1,
            keypoints_i2=self.keypoints_i2,
            verified_corr_idxs=self.corr_idxs,
            camera_intrinsics_i1=Cal3Bundler(),
            camera_intrinsics_i2=Cal3Bundler(),
            i2Ri1_initial=i2Ei1.rotation(),
            i2Ui1_initial=i2Ei1.direction(),
            i2Ti1_prior=None,
        )

        # Assert
        rotation_angular_error = comp_utils.compute_relative_rotation_angle(
            i2Ri1_optimized, i2Ei1.rotation())
        translation_angular_error = comp_utils.compute_relative_unit_translation_angle(
            i2Ui1_optimized, i2Ei1.direction())
        self.assertLessEqual(rotation_angular_error, 1)
        self.assertLessEqual(translation_angular_error, 1)
        np.testing.assert_allclose(corr_idxs, self.corr_idxs)
Exemplo n.º 8
0
def compute_epipolar_distances(normalized_coords_i1: np.ndarray,
                               normalized_coords_i2: np.ndarray,
                               i2Ei1: EssentialMatrix) -> Optional[np.ndarray]:
    """Compute symmetric point-line epipolar distances between normalized coordinates of correspondences.

    Args:
        normalized_coords_i1: normalized coordinates in image i1, of shape Nx2.
        normalized_coords_i2: corr. normalized coordinates in image i2, of shape Nx2.
        i2Ei1: essential matrix between two images

    Returns:
        Symmetric epipolar distances for each row of the input, of shape N.
    """
    if (normalized_coords_i1 is None or normalized_coords_i1.size == 0
            or normalized_coords_i2 is None or normalized_coords_i2.size == 0):
        return None

    # construct the essential matrix in the opposite directin
    i2Ti1 = Pose3(i2Ei1.rotation(), i2Ei1.direction().point3())
    i1Ti2 = i2Ti1.inverse()
    i1Ei2 = EssentialMatrix(i1Ti2.rotation(), Unit3(i1Ti2.translation()))

    # get lines in i2 and i1
    epipolar_lines_i2 = feature_utils.convert_to_epipolar_lines(
        normalized_coords_i1, i2Ei1)
    epipolar_lines_i1 = feature_utils.convert_to_epipolar_lines(
        normalized_coords_i2, i1Ei2)

    # compute two distances and average them
    return 0.5 * (feature_utils.compute_point_line_distances(
        normalized_coords_i1, epipolar_lines_i1) +
                  feature_utils.compute_point_line_distances(
                      normalized_coords_i2, epipolar_lines_i2))
Exemplo n.º 9
0
    def test_lund_door(self):
        loader = FolderLoader(str(DATA_ROOT_PATH / "set1_lund_door"),
                              image_extension="JPG")

        expected_wTi_list = [
            loader.get_camera_pose(x) for x in range(len(loader))
        ]
        wRi_list = [x.rotation() for x in expected_wTi_list]

        i2Ui1_dict = dict()
        for (i1, i2) in loader.get_valid_pairs():
            i2Ti1 = expected_wTi_list[i2].between(expected_wTi_list[i1])

            i2Ui1_dict[(i1, i2)] = Unit3((i2Ti1.translation()))

        wti_list = self.obj.run(len(loader), i2Ui1_dict, wRi_list)

        wTi_list = [
            Pose3(wRi, wti) if wti is not None else None
            for (wRi, wti) in zip(wRi_list, wti_list)
        ]

        # TODO: using a v high value for translation relative threshold. Fix it
        self.assertTrue(
            geometry_comparisons.compare_global_poses(wTi_list,
                                                      expected_wTi_list,
                                                      trans_err_thresh=2e1))
Exemplo n.º 10
0
def get_twoview_translation_directions(
        wTi_list: List[Optional[Pose3]]
) -> Dict[Tuple[int, int], Optional[Unit3]]:
    """Generate synthetic measurements of the 2-view translation directions between image pairs.

    Args:
        wTi_list: List of poses (e.g. could be ground truth).

    Returns:
        i2Ui1_dict: Dict from (i1, i2) to unit translation direction i2Ui1.
    """
    number_images = len(wTi_list)  # vs. using ba_output.number_images()

    # check against all possible image pairs -- compute unit translation directions
    i2Ui1_dict = {}
    possible_img_pair_idxs = list(
        itertools.combinations(range(number_images), 2))
    for (i1, i2) in possible_img_pair_idxs:
        # compute the exact relative pose
        if wTi_list[i1] is None or wTi_list[i2] is None:
            i2Ui1 = None
        else:
            i2Ti1 = wTi_list[i2].between(wTi_list[i1])
            i2Ui1 = Unit3(i2Ti1.translation())
        i2Ui1_dict[(i1, i2)] = i2Ui1
    return i2Ui1_dict
Exemplo n.º 11
0
def simulate_two_planes_scene(M: int, N: int) -> Tuple[Keypoints, Keypoints, EssentialMatrix]:
    """Generate a scene where 3D points are on two planes, and projects the points to the 2 cameras. There are M points
    on plane 1, and N points on plane 2.

    The two planes in this test are:
    1. -10x -y -20z +150 = 0
    2. 15x -2y -35z +200 = 0

    Args:
        M: number of points on 1st plane.
        N: number of points on 2nd plane.

    Returns:
        keypoints for image i1, of length (M+N).
        keypoints for image i2, of length (M+N).
        Essential matrix i2Ei1.
    """
    # range of 3D points
    range_x_coordinate = (-5, 7)
    range_y_coordinate = (-10, 10)

    # define the plane equation
    plane1_coeffs = (-10, -1, -20, 150)
    plane2_coeffs = (15, -2, -35, 200)

    # sample the points from planes
    plane1_points = sample_points_on_plane(plane1_coeffs, range_x_coordinate, range_y_coordinate, M)
    plane2_points = sample_points_on_plane(plane2_coeffs, range_x_coordinate, range_y_coordinate, N)

    points_3d = np.vstack((plane1_points, plane2_points))

    # define the camera poses and compute the essential matrix
    wti1 = np.array([0.1, 0, -20])
    wti2 = np.array([1, -2, -20.4])

    wRi1 = Rot3.RzRyRx(np.pi / 20, 0, 0.0)
    wRi2 = Rot3.RzRyRx(0.0, np.pi / 6, 0.0)

    wTi1 = Pose3(wRi1, wti1)
    wTi2 = Pose3(wRi2, wti2)
    i2Ti1 = wTi2.between(wTi1)

    i2Ei1 = EssentialMatrix(i2Ti1.rotation(), Unit3(i2Ti1.translation()))

    # project 3D points to 2D image measurements
    intrinsics = Cal3Bundler()
    camera_i1 = PinholeCameraCal3Bundler(wTi1, intrinsics)
    camera_i2 = PinholeCameraCal3Bundler(wTi2, intrinsics)

    uv_im1 = []
    uv_im2 = []
    for point in points_3d:
        uv_im1.append(camera_i1.project(point))
        uv_im2.append(camera_i2.project(point))

    uv_im1 = np.vstack(uv_im1)
    uv_im2 = np.vstack(uv_im2)

    # return the points as keypoints and the essential matrix
    return Keypoints(coordinates=uv_im1), Keypoints(coordinates=uv_im2), i2Ei1
Exemplo n.º 12
0
    def verify_with_exact_intrinsics(
        self,
        keypoints_i1: Keypoints,
        keypoints_i2: Keypoints,
        match_indices: np.ndarray,
        camera_intrinsics_i1: Cal3Bundler,
        camera_intrinsics_i2: Cal3Bundler,
    ) -> Tuple[Optional[Rot3], Optional[Unit3], np.ndarray]:
        """Estimates the essential matrix and verifies the feature matches.

        Note: this function is preferred when camera intrinsics are approximate (i.e from image size/exif). The feature
        coordinates are used to compute the fundamental matrix, which is then converted to the essential matrix.

        Args:
            keypoints_i1: detected features in image #i1.
            keypoints_i2: detected features in image #i2.
            match_indices: matches as indices of features from both images, of shape (N3, 2), where N3 <= min(N1, N2).
            camera_intrinsics_i1: intrinsics for image #i1.
            camera_intrinsics_i2: intrinsics for image #i2.

        Returns:
            Estimated rotation i2Ri1, or None if it cannot be estimated.
            Estimated unit translation i2Ui1, or None if it cannot be estimated.
            Indices of verified correspondences, of shape (N, 2) with N <= N3. These are subset of match_indices.
        """
        v_inlier_idxs = np.array([], dtype=np.uint32)

        # check if we don't have the minimum number of points
        if match_indices.shape[0] < self.min_pts:
            return None, None, v_inlier_idxs

        # set a random seed using descriptor data for repeatability
        np.random.seed(
            int(1000 * (match_indices[0, 0] + match_indices[0, 1]) %
                (UINT32_MAX)))

        # get the number of entries in the input
        num_matches = match_indices.shape[0]

        # get the number of verified_pts we will output
        num_verifier_pts = np.random.randint(low=0, high=num_matches)

        # randomly sample the indices for matches which will be verified
        v_inlier_idxs = np.random.choice(num_matches,
                                         num_verifier_pts,
                                         replace=False).astype(np.uint32)

        # use a random 3x3 matrix if the number of verified points are less that
        if num_verifier_pts >= self.min_pts:
            # generate random rotation and translation for essential matrix
            rotation_angles = np.random.uniform(low=0.0,
                                                high=2 * np.pi,
                                                size=(3, ))
            i2Ri1 = Rot3.RzRyRx(rotation_angles[0], rotation_angles[1],
                                rotation_angles[2])
            i2Ti1 = Point3(np.random.uniform(low=-1.0, high=1.0, size=(3, )))

            return i2Ri1, Unit3(i2Ti1), match_indices[v_inlier_idxs]
        else:
            return None, None, match_indices[v_inlier_idxs]
Exemplo n.º 13
0
def compute_relative_pose_metrics(
    i2Ri1_computed: Optional[Rot3],
    i2Ui1_computed: Optional[Unit3],
    wTi1_expected: Optional[Pose3],
    wTi2_expected: Optional[Pose3],
) -> Tuple[Optional[float], Optional[float]]:
    """Compute the metrics on relative camera pose.

    Args:
        i2Ri1_computed: computed relative rotation.
        i2Ui1_computed: computed relative translation direction.
        i2Ti1_expected: expected relative pose.

    Returns:
        Rotation error, in degrees
        Unit translation error, in degrees
    """
    if wTi1_expected is not None and wTi2_expected is not None:
        i2Ti1_expected = wTi2_expected.between(wTi1_expected)
        R_error_deg = comp_utils.compute_relative_rotation_angle(
            i2Ri1_computed, i2Ti1_expected.rotation())
        U_error_deg = comp_utils.compute_relative_unit_translation_angle(
            i2Ui1_computed, Unit3(i2Ti1_expected.translation()))
    else:
        return (None, None)

    return (R_error_deg, U_error_deg)
Exemplo n.º 14
0
    def test_convert_to_epipolar_lines_empty_input(self):
        """Test conversion of 0 2D points to epipolar lines using the essential matrix."""

        points = np.array([])  # 2d points in homogenous coordinates
        f_matrix = EssentialMatrix(Rot3.RzRyRx(0, np.deg2rad(45), 0), Unit3(np.array([-5, 2, 0]))).matrix()

        computed = feature_utils.convert_to_epipolar_lines(points, f_matrix)
        self.assertIsNone(computed)
Exemplo n.º 15
0
    def test_values(self):
        values = Values()
        E = EssentialMatrix(Rot3(), Unit3())
        tol = 1e-9

        values.insert(0, Point2(0, 0))
        values.insert(1, Point3(0, 0, 0))
        values.insert(2, Rot2())
        values.insert(3, Pose2())
        values.insert(4, Rot3())
        values.insert(5, Pose3())
        values.insert(6, Cal3_S2())
        values.insert(7, Cal3DS2())
        values.insert(8, Cal3Bundler())
        values.insert(9, E)
        values.insert(10, imuBias_ConstantBias())

        # Special cases for Vectors and Matrices
        # Note that gtsam's Eigen Vectors and Matrices requires double-precision
        # floating point numbers in column-major (Fortran style) storage order,
        # whereas by default, numpy.array is in row-major order and the type is
        # in whatever the number input type is, e.g. np.array([1,2,3])
        # will have 'int' type.
        #
        # The wrapper will automatically fix the type and storage order for you,
        # but for performance reasons, it's recommended to specify the correct
        # type and storage order.
        # for vectors, the order is not important, but dtype still is
        vec = np.array([1., 2., 3.])
        values.insert(11, vec)
        mat = np.array([[1., 2.], [3., 4.]], order='F')
        values.insert(12, mat)
        # Test with dtype int and the default order='C'
        # This still works as the wrapper converts to the correct type and order for you
        # but is nornally not recommended!
        mat2 = np.array([[1, 2, ], [3, 5]])
        values.insert(13, mat2)

        self.assertTrue(values.atPoint2(0).equals(Point2(0,0), tol))
        self.assertTrue(values.atPoint3(1).equals(Point3(0,0,0), tol))
        self.assertTrue(values.atRot2(2).equals(Rot2(), tol))
        self.assertTrue(values.atPose2(3).equals(Pose2(), tol))
        self.assertTrue(values.atRot3(4).equals(Rot3(), tol))
        self.assertTrue(values.atPose3(5).equals(Pose3(), tol))
        self.assertTrue(values.atCal3_S2(6).equals(Cal3_S2(), tol))
        self.assertTrue(values.atCal3DS2(7).equals(Cal3DS2(), tol))
        self.assertTrue(values.atCal3Bundler(8).equals(Cal3Bundler(), tol))
        self.assertTrue(values.atEssentialMatrix(9).equals(E, tol))
        self.assertTrue(values.atimuBias_ConstantBias(
            10).equals(imuBias_ConstantBias(), tol))

        # special cases for Vector and Matrix:
        actualVector = values.atVector(11)
        self.assertTrue(np.allclose(vec, actualVector, tol))
        actualMatrix = values.atMatrix(12)
        self.assertTrue(np.allclose(mat, actualMatrix, tol))
        actualMatrix2 = values.atMatrix(13)
        self.assertTrue(np.allclose(mat2, actualMatrix2, tol))
Exemplo n.º 16
0
    def test_convert_to_epipolar_lines_none_input(self):
        """Test conversion of None to epipolar lines using the essential matrix."""

        points = None
        E_matrix = EssentialMatrix(Rot3.RzRyRx(0, np.deg2rad(45), 0), Unit3(np.array([-5, 2, 0])))
        F_matrix = E_matrix.matrix()  # using identity intrinsics

        computed = feature_utils.convert_to_epipolar_lines(points, F_matrix)
        self.assertIsNone(computed)
Exemplo n.º 17
0
 def test_compute_translation_to_direction_angle_is_zero(self):
     i2Ui1_measured = Unit3(Point3(1, 0, 0))
     wTi2_estimated = Pose3(Rot3(), Point3(0, 0, 0))
     wTi1_estimated = Pose3(Rot3(), Point3(2, 0, 0))
     self.assertEqual(
         geometry_comparisons.compute_translation_to_direction_angle(
             i2Ui1_measured, wTi2_estimated, wTi1_estimated),
         0.0,
     )
Exemplo n.º 18
0
    def verify(
        self,
        keypoints_i1: Keypoints,
        keypoints_i2: Keypoints,
        match_indices: np.ndarray,
        camera_intrinsics_i1: Cal3Bundler,
        camera_intrinsics_i2: Cal3Bundler,
    ) -> Tuple[Optional[Rot3], Optional[Unit3], np.ndarray, float]:
        """Performs verification of correspondences between two images to recover the relative pose and indices of
        verified correspondences.

        Args:
            keypoints_i1: detected features in image #i1.
            keypoints_i2: detected features in image #i2.
            match_indices: matches as indices of features from both images, of shape (N3, 2), where N3 <= min(N1, N2).
            camera_intrinsics_i1: intrinsics for image #i1.
            camera_intrinsics_i2: intrinsics for image #i2.

        Returns:
            Estimated rotation i2Ri1, or None if it cannot be estimated.
            Estimated unit translation i2Ui1, or None if it cannot be estimated.
            Indices of verified correspondences, of shape (N, 2) with N <= N3. These are subset of match_indices.
            Inlier ratio of w.r.t. the estimated model, i.e. the #final RANSAC inliers/ #putatives.
        """
        if match_indices.shape[0] < self._min_matches:
            logger.info("[GRIC] Not enough correspondences for verification.")
            return self._failure_result

        uv_i1 = keypoints_i1.coordinates[match_indices[:, 0]]
        uv_i2 = keypoints_i2.coordinates[match_indices[:, 1]]

        result_dict = self.__estimate_two_view_geometry(
            uv_i1, uv_i2, camera_intrinsics_i1, camera_intrinsics_i2)

        if not result_dict["success"]:
            logger.info("[GRIC] matrix estimation unsuccessful.")
            return self._failure_result

        logger.info("Two view configuration: %s",
                    ConfigurationType(result_dict["configuration_type"]))

        inlier_ratio_est_model = result_dict[
            "num_inliers"] / match_indices.shape[0]

        inlier_mask = np.array(result_dict["inliers"])
        v_corr_idxs = match_indices[inlier_mask]

        # See https://github.com/colmap/colmap/blob/dev/src/base/pose.h#L72 for quaternion coefficient ordering
        qw, qx, qy, qz = result_dict["qvec"]
        i2Ui1 = result_dict["tvec"]
        i2Ri1 = Rot3(qw, qx, qy, qz)
        i2Ui1 = Unit3(i2Ui1)

        return i2Ri1, i2Ui1, v_corr_idxs, inlier_ratio_est_model
Exemplo n.º 19
0
def cast_to_measurements_variable_in_global_coordinate_frame(
    i2Ui1_dict: Dict[Tuple[int, int], Optional[Unit3]], wRi_list: List[Optional[Rot3]], noise_model: gtsam.noiseModel
) -> BinaryMeasurementsUnit3:

    w_i2Ui1_measurements = BinaryMeasurementsUnit3()
    for (i1, i2), i2Ui1 in i2Ui1_dict.items():
        wRi2 = wRi_list[i2]
        if i2Ui1 is not None and wRi2 is not None:
            # TODO: what if wRi2 is None, but wRi1 is not? Can we still transform.
            w_i2Ui1_measurements.append(BinaryMeasurementUnit3(i2, i1, Unit3(wRi2.rotate(i2Ui1.point3())), noise_model))

    return w_i2Ui1_measurements
Exemplo n.º 20
0
    def test_convert_to_epipolar_lines_valid_input(self):
        """Test conversion of valid 2D points to epipolar lines using the fundamental matrix, against manual
        computation and with OpenCV's output."""

        points = np.array([[10.0, -5.0], [3.5, 20.0],])  # 2d points in homogenous coordinates
        E_matrix = EssentialMatrix(Rot3.RzRyRx(0, np.deg2rad(45), 0), Unit3(np.array([-5, 2, 0])))
        F_matrix = E_matrix.matrix()  # using identity intrinsics
        expected_opencv = cv.computeCorrespondEpilines(points.reshape(-1, 1, 2), 1, F_matrix)
        expected_opencv = np.squeeze(expected_opencv)  # converting to 2D array as opencv adds a singleton dimension

        computed = feature_utils.convert_to_epipolar_lines(points, F_matrix)
        computed_normalized = computed / np.linalg.norm(computed[:, :2], axis=1, keepdims=True)
        np.testing.assert_allclose(computed_normalized, expected_opencv)
def test_filter_to_cycle_consistent_edges() -> None:
    """Ensure correct edges are kept in a 2-triplet scenario.

    Scenario Ground Truth: consider 5 camera poses in a line, connected as follows, all with identity rotations:

    Spatial layout:
      _________    ________
     /         \\ /        \
    i4 -- i3 -- i2 -- i1 -- i0

    Topological layout:

     i4          i0
           i2
     i3          i1

    In the measurements, suppose, the measurement for (i2,i4) was corrupted by 15 degrees.
    """
    i2Ri1_dict = {
        (0, 1): Rot3(),
        (1, 2): Rot3(),
        (0, 2): Rot3(),
        (2, 3): Rot3(),
        (3, 4): Rot3(),
        (2, 4): Rot3.Ry(np.deg2rad(15)),
    }
    i2Ui1_dict = {
        (0, 1): Unit3(np.array([1, 0, 0])),
        (1, 2): Unit3(np.array([1, 0, 0])),
        (0, 2): Unit3(np.array([1, 0, 0])),
        (2, 3): Unit3(np.array([1, 0, 0])),
        (3, 4): Unit3(np.array([1, 0, 0])),
        (2, 4): Unit3(np.array([1, 0, 0])),
    }
    # The ViewGraphEstimator assumes these dicts contain the keys corresponding to the the rotation edges.
    calibrations = {k: Cal3Bundler() for k in range(0, 5)}
    corr_idxs_i1i2 = {i1i2: np.array([]) for i1i2 in i2Ri1_dict.keys()}
    keypoints = {k: np.array([]) for k in range(0, 5)}

    # populate dummy 2-view reports
    two_view_reports = {}
    for (i1, i2) in i2Ri1_dict.keys():
        two_view_reports[(i1, i2)] = TwoViewEstimationReport(
            v_corr_idxs=np.array([]),  # dummy array
            num_inliers_est_model=10,  # dummy value
        )

    rcc_estimator = CycleConsistentRotationViewGraphEstimator(
        EdgeErrorAggregationCriterion.MEDIAN_EDGE_ERROR)
    viewgraph_edges = rcc_estimator.run(i2Ri1_dict=i2Ri1_dict,
                                        i2Ui1_dict=i2Ui1_dict,
                                        calibrations=calibrations,
                                        corr_idxs_i1i2=corr_idxs_i1i2,
                                        keypoints=keypoints,
                                        two_view_reports=two_view_reports)

    # non-self-consistent triplet should have been removed
    expected_edges = {(0, 1), (1, 2), (0, 2)}
    assert set(viewgraph_edges) == expected_edges
Exemplo n.º 22
0
    def test_computation_graph(self):
        """Test the dask computation graph execution using a valid collection of relative unit-translations."""
        """Test a simple case with 8 camera poses.

        The camera poses are arranged on the circle and point towards the center
        of the circle. The poses of 8 cameras are obtained from SFMdata and the
        unit translations directions between some camera pairs are computed from their global translations.

        This test is copied from GTSAM's TranslationAveragingExample.
        """

        fx, fy, s, u0, v0 = 50.0, 50.0, 0.0, 50.0, 50.0
        expected_wTi_list = SFMdata.createPoses(Cal3_S2(fx, fy, s, u0, v0))

        wRi_list = [x.rotation() for x in expected_wTi_list]

        # create relative translation directions between a pose index and the
        # next two poses
        i2Ui1_dict = {}
        for i1 in range(len(expected_wTi_list) - 1):
            for i2 in range(i1 + 1, min(len(expected_wTi_list), i1 + 3)):
                # create relative translations using global R and T.
                i2Ui1_dict[(i1, i2)] = Unit3(expected_wTi_list[i2].between(
                    expected_wTi_list[i1]).translation())

        # use the `run` API to get expected results
        expected_wti_list = self.obj.run(len(wRi_list), i2Ui1_dict, wRi_list)
        expected_wTi_list = [
            Pose3(wRi, wti) if wti is not None else None
            for (wRi, wti) in zip(wRi_list, expected_wti_list)
        ]

        # form computation graph and execute
        i2Ui1_graph = dask.delayed(i2Ui1_dict)
        wRi_graph = dask.delayed(wRi_list)
        computation_graph = self.obj.create_computation_graph(
            len(wRi_list), i2Ui1_graph, wRi_graph)
        with dask.config.set(scheduler="single-threaded"):
            wti_list = dask.compute(computation_graph)[0]

        wTi_list = [
            Pose3(wRi, wti) if wti is not None else None
            for (wRi, wti) in zip(wRi_list, wti_list)
        ]

        # compare the entries
        self.assertTrue(
            geometry_comparisons.compare_global_poses(wTi_list,
                                                      expected_wTi_list))
Exemplo n.º 23
0
    def test_compute_epipolar_distances(self):
        """Test for epipolar distance computation using 2 sets of points and the essential matrix."""
        normalized_pts_i1 = np.array([[1.0, 3.5], [-2.0, 2.0]])
        normalized_pts_i2 = np.array([[2.0, -1.0], [1.0, 0.0]])

        i2Ri1 = Rot3.RzRyRx(0, np.deg2rad(30), np.deg2rad(10))
        i2ti1 = np.array([-0.5, 2.0, 0])
        i2Ei1 = EssentialMatrix(i2Ri1, Unit3(i2ti1))

        expected = np.array([1.637, 1.850])

        computed = verification_utils.compute_epipolar_distances(
            normalized_pts_i1, normalized_pts_i2, i2Ei1)

        np.testing.assert_allclose(computed, expected, rtol=1e-3)
Exemplo n.º 24
0
 def test_compute_translation_to_direction_angle_is_nonzero(self):
     rz = np.deg2rad(90)
     wRi2 = Rot3.RzRyRx(0, 0,
                        rz)  # x-axis of i2 points along y in world frame
     wTi2_estimated = Pose3(wRi2, Point3(0, 0, 0))
     wTi1_estimated = Pose3(Rot3(), Point3(
         -1, 0,
         0))  # At (0, 1, 0) in i2 frame, rotation of i1 is irrelevant here.
     i2Ui1_measured = Unit3(Point3(1, 0, 0))
     # Estimated relative translation of i1 in i2 frame is (0, 1, 0), and the measurement in i2 frame is (1, 0, 0).
     # Expected angle between the two is 90 degrees.
     self.assertTrue(
         geometry_comparisons.compute_translation_to_direction_angle(
             i2Ui1_measured, wTi2_estimated, wTi1_estimated),
         90.0,
     )
Exemplo n.º 25
0
    def test_outlier_case_missing_value(self) -> None:
        """Ensure that a missing `Value` in the 1dsfm result is represented by `None` in the returned entries.
        
        The scenario below will lead to an outlier configuration -- all edges to the node 4 will be rejected
        as outliers, so that Value cannot be cast to Point3 -- it is returned as None.
        
        This test ensures that 1dsfm checks if each Value exists in 1dsfm result, before casting it to a Point3.
        """
        # fmt: off
        wRi_list = [
            np.array([[-0.382164, 0.89195, 0.241612],
                      [-0.505682, 0.0169854, -0.862553],
                      [-0.773458, -0.451815, 0.444551]]),
            np.array([[-0.453335, 0.886803, -0.0898219],
                      [-0.27425, -0.234656, -0.93259],
                      [-0.8481, -0.398142, 0.349584]]),
            np.array([[-0.385656, 0.90387, -0.18517],
                      [0.125519, -0.147431, -0.981076],
                      [-0.914065, -0.4016, -0.0565954]]),
            np.array([[-0.359387, 0.898029, -0.253744],
                      [0.253506, -0.167734, -0.95268],
                      [-0.898096, -0.406706, -0.167375]]),
            np.array([[-0.342447, 0.898333, -0.275186],
                      [0.0881727, -0.260874, -0.961338],
                      [-0.935391, -0.353471, 0.0101272]]),
        ]
        # fmt: on
        wRi_input = [Rot3(wRi) for wRi in wRi_list]

        i2Ui1_input = {
            (0, 1): np.array([0.967948, -0.0290259, 0.24947]),
            (0, 2): np.array([0.906879, -0.000610539, 0.42139]),
            (0, 3): np.array([0.937168, -0.0161865, 0.348502]),
            (0, 4): np.array([-0.975139, 0.0133109, -0.221193]),
            (1, 2): np.array([0.990186, 0.0188153, 0.138484]),
            (1, 3): np.array([0.986072, -0.00746304, 0.166149]),
            (1, 4): np.array([-0.996558, 0.00911097, -0.0823996]),
            (2, 3): np.array([0.990546, -0.0294894, 0.133976]),
            (2, 4): np.array([0.998932, -0.0300599, -0.035099]),
            (3, 4): np.array([0.994791, -0.033332, -0.0963361]),
        }
        i2Ui1_input = {(i, j): Unit3(t) for (i, j), t in i2Ui1_input.items()}
        wti_computed, _ = self.obj.run(len(wRi_input), i2Ui1_input, wRi_input)

        assert len(wti_computed) == 5
        assert wti_computed[-1] is None
Exemplo n.º 26
0
def recover_relative_pose_from_essential_matrix(
    i2Ei1: Optional[np.ndarray],
    verified_coordinates_i1: np.ndarray,
    verified_coordinates_i2: np.ndarray,
    camera_intrinsics_i1: Cal3Bundler,
    camera_intrinsics_i2: Cal3Bundler,
) -> Tuple[Optional[Rot3], Optional[Unit3]]:
    """Recovers the relative rotation and translation direction from essential matrix and verified correspondences
    using opencv's API.

    Args:
        i2Ei1: essential matrix as a numpy array, of shape 3x3.
        verified_coordinates_i1: coordinates of verified correspondences in image i1, of shape Nx2.
        verified_coordinates_i2: coordinates of verified correspondences in image i2, of shape Nx2.
        camera_intrinsics_i1: intrinsics for image i1.
        camera_intrinsics_i2: intrinsics for image i2.

    Returns:
        relative rotation i2Ri1, or None if the input essential matrix is None.
        relative translation direction i2Ui1, or None if the input essential matrix is None.
    """
    if i2Ei1 is None:
        return None, None

    # obtain points in normalized coordinates using intrinsics.
    normalized_coordinates_i1 = feature_utils.normalize_coordinates(
        verified_coordinates_i1, camera_intrinsics_i1)
    normalized_coordinates_i2 = feature_utils.normalize_coordinates(
        verified_coordinates_i2, camera_intrinsics_i2)

    # use opencv to recover pose
    _, i2Ri1, i2ti1, _ = cv.recoverPose(i2Ei1, normalized_coordinates_i1,
                                        normalized_coordinates_i2)
    i2Ri1 = Rot3(i2Ri1)
    i2Ui1 = Unit3(i2ti1.squeeze())
    i2Ei1_reconstructed = EssentialMatrix(i2Ri1, i2Ui1).matrix()

    # normalizing the two essential matrices
    i2Ei1_normalized = i2Ei1 / np.linalg.norm(i2Ei1, axis=None)
    i2Ei1_reconstructed_normalized = i2Ei1_reconstructed / np.linalg.norm(
        i2Ei1_reconstructed, axis=None)
    if not np.allclose(i2Ei1_normalized, i2Ei1_reconstructed_normalized):
        logger.warning(
            "Recovered R, t cannot create the input Essential Matrix")

    return i2Ri1, i2Ui1
Exemplo n.º 27
0
def convert_data_for_translation_averaging(
    wTi_list: List[Pose3], i2Ti1_dict: Dict[Tuple[int, int], Pose3]
) -> Tuple[List[Rot3], Dict[Tuple[int, int], Unit3], List[Point3]]:
    """[summary]

    Args:
        wTi_list (List[Pose3]): [description]
        i2Ti1_dict (Dict[Tuple[int, int], Pose3]): [description]

    Returns:
        Tuple[List[Rot3], Dict[Tuple[int, int], Unit3], List[Point3]]: [description]
    """

    wRi_list = [x.rotation() for x in wTi_list]
    wti_list = [x.translation() for x in wTi_list]
    i2Ui1_dict = {k: Unit3(v.translation()) for k, v in i2Ti1_dict.items()}

    return wRi_list, i2Ui1_dict, wti_list
Exemplo n.º 28
0
    def test_lund_door(self):
        """Unit Test on the door dataset."""
        loader = OlssonLoader(str(DATA_ROOT_PATH / "set1_lund_door"),
                              image_extension="JPG")

        # we will use ground truth poses to generate relative rotations and relative unit translations
        wTi_expected_list = [
            loader.get_camera_pose(x) for x in range(len(loader))
        ]
        wRi_list = [x.rotation() for x in wTi_expected_list]
        wti_expected_list = [x.translation() for x in wTi_expected_list]

        i2Ui1_dict = dict()
        for (i1, i2) in loader.get_valid_pairs():
            i2Ti1 = wTi_expected_list[i2].between(wTi_expected_list[i1])

            i2Ui1_dict[(i1, i2)] = Unit3((i2Ti1.translation()))

        self.__execute_test(i2Ui1_dict, wRi_list, wti_expected_list)
Exemplo n.º 29
0
    def test_convert_to_epipolar_lines_valid_input(self):
        """Test conversion of valid 2D points to epipolar lines using the essential matrix."""

        points = np.array([
            [10.0, -5.0],
            [3.5, 20.0],
        ])  # 2d points in homogenous coordinates
        essential_mat = EssentialMatrix(Rot3.RzRyRx(0, np.deg2rad(45), 0),
                                        Unit3(np.array([-5, 2, 0])))

        computed = feature_utils.convert_to_epipolar_lines(
            points, essential_mat)

        expected = np.array([
            [-2.36351579, -5.90878948, 1.75364193],
            [-0.65653216, -1.64133041, -19.75129171],
        ])

        np.testing.assert_allclose(computed, expected)
Exemplo n.º 30
0
def spherical_to_cartesian_directions(spherical_coords: np.ndarray) -> List[Unit3]:
    """Converts an array of spherical coordinates to a list of Unit3 directions.

    Args:
        directions: Nx2 array where the first column are [azimuth, elevation] in radians.

    Returns:
        List of Unit3 directions for the provided spherical coordinates.
    """
    azimuth = spherical_coords[:, 0]
    elevation = spherical_coords[:, 1]
    y = np.cos(elevation)
    x = np.sin(azimuth) * np.sin(elevation)
    z = np.cos(azimuth) * np.sin(elevation)
    directions_unit3 = []

    for i in range(x.shape[0]):
        directions_unit3.append(Unit3(np.array([x[i], y[i], z[i]])))
    return directions_unit3