Esempio n. 1
0
    def test_compare_global_poses_scaled_squares(self):
        """Make sure a big and small square can be aligned.

        The u's represent a big square (10x10), and v's represents a small square (4x4).
        """
        R0 = Rotation.from_euler("z", 0, degrees=True).as_matrix()
        R90 = Rotation.from_euler("z", 90, degrees=True).as_matrix()
        R180 = Rotation.from_euler("z", 180, degrees=True).as_matrix()
        R270 = Rotation.from_euler("z", 270, degrees=True).as_matrix()

        wTu0 = Pose3(Rot3(R0), np.array([2, 3, 0]))
        wTu1 = Pose3(Rot3(R90), np.array([12, 3, 0]))
        wTu2 = Pose3(Rot3(R180), np.array([12, 13, 0]))
        wTu3 = Pose3(Rot3(R270), np.array([2, 13, 0]))

        wTi_list = [wTu0, wTu1, wTu2, wTu3]

        wTv0 = Pose3(Rot3(R0), np.array([4, 3, 0]))
        wTv1 = Pose3(Rot3(R90), np.array([8, 3, 0]))
        wTv2 = Pose3(Rot3(R180), np.array([8, 7, 0]))
        wTv3 = Pose3(Rot3(R270), np.array([4, 7, 0]))

        wTi_list_ = [wTv0, wTv1, wTv2, wTv3]

        pose_graphs_equal = geometry_comparisons.compare_global_poses(
            wTi_list, wTi_list_)
        self.assertTrue(pose_graphs_equal)
Esempio n. 2
0
    def test_create_computation_graph(self):
        """Will test Dask multi-processing capabilities and ability to serialize all objects."""
        use_intrinsics_in_verification = False

        with initialize_config_module(config_module="gtsfm.configs"):

            # config is relative to the gtsfm module
            cfg = compose(config_name="scene_optimizer_unit_test_config.yaml")
            self.obj: SceneOptimizer = instantiate(cfg.SceneOptimizer)

            # generate the dask computation graph
            sfm_result_graph = self.obj.create_computation_graph(
                len(self.loader),
                self.loader.get_valid_pairs(),
                self.loader.create_computation_graph_for_images(),
                self.loader.create_computation_graph_for_intrinsics(),
                use_intrinsics_in_verification=use_intrinsics_in_verification,
            )

            # create dask client
            cluster = LocalCluster(n_workers=1, threads_per_worker=4)

            with Client(cluster):
                sfm_result = dask.compute(sfm_result_graph)[0]

            self.assertIsInstance(sfm_result, SfmResult)

            # compare the camera poses
            poses = sfm_result.get_camera_poses()

            expected_poses = [self.loader.get_camera_pose(i) for i in range(len(self.loader))]

            self.assertTrue(comp_utils.compare_global_poses(poses, expected_poses))
Esempio n. 3
0
    def test_lund_door(self):
        loader = FolderLoader(str(DATA_ROOT_PATH / "set1_lund_door"),
                              image_extension="JPG")

        expected_wTi_list = [
            loader.get_camera_pose(x) for x in range(len(loader))
        ]
        wRi_list = [x.rotation() for x in expected_wTi_list]

        i2Ui1_dict = dict()
        for (i1, i2) in loader.get_valid_pairs():
            i2Ti1 = expected_wTi_list[i2].between(expected_wTi_list[i1])

            i2Ui1_dict[(i1, i2)] = Unit3((i2Ti1.translation()))

        wti_list = self.obj.run(len(loader), i2Ui1_dict, wRi_list)

        wTi_list = [
            Pose3(wRi, wti) if wti is not None else None
            for (wRi, wti) in zip(wRi_list, wti_list)
        ]

        # TODO: using a v high value for translation relative threshold. Fix it
        self.assertTrue(
            geometry_comparisons.compare_global_poses(wTi_list,
                                                      expected_wTi_list,
                                                      trans_err_thresh=2e1))
Esempio n. 4
0
    def test_compare_different_poses(self):
        """Compare pose comparison with different inputs."""

        pose_list = [POSE_LIST[1], POSE_LIST[2], POSE_LIST[3]]
        pose_list_ = [POSE_LIST[2], POSE_LIST[3], POSE_LIST[1]]

        self.assertFalse(
            geometry_comparisons.compare_global_poses(pose_list, pose_list_))
Esempio n. 5
0
    def test_compare_poses_with_origin_shift(self):
        """Check pose comparison with a shift in the global origin."""
        new_origin = Pose3(Rot3.RzRyRx(0.3, 0.1, -0.27),
                           np.array([-20.0, +19.0, 3.5]))

        pose_list_ = [new_origin.between(x) for x in POSE_LIST]

        self.assertTrue(
            geometry_comparisons.compare_global_poses(POSE_LIST, pose_list_))
Esempio n. 6
0
    def test_compare_poses_with_uniform_scaled_translations(self):
        """Check pose comparison with all translations in input #2 scaled by
        the same scalar factor."""
        scale_factor = 1.2
        pose_list_ = [
            Pose3(x.rotation(),
                  x.translation() * scale_factor) for x in POSE_LIST
        ]

        self.assertTrue(
            geometry_comparisons.compare_global_poses(POSE_LIST, pose_list_))
Esempio n. 7
0
    def test_compare_poses_with_nonuniform_scaled_translations(self):
        """Check pose comparison with all translations in input #2 scaled by
        significantly different scalar factors."""
        scale_factors = [0.3, 0.7, 0.9, 1.0, 1.0, 0.99, 1.01, 1.10]
        pose_list_ = [
            Pose3(x.rotation(),
                  x.translation() * scale_factors[idx])
            for idx, x in enumerate(POSE_LIST)
        ]

        self.assertFalse(
            geometry_comparisons.compare_global_poses(POSE_LIST, pose_list_))
Esempio n. 8
0
    def test_computation_graph(self):
        """Test the dask computation graph execution using a valid collection of relative unit-translations."""
        """Test a simple case with 8 camera poses.

        The camera poses are arranged on the circle and point towards the center
        of the circle. The poses of 8 cameras are obtained from SFMdata and the
        unit translations directions between some camera pairs are computed from their global translations.

        This test is copied from GTSAM's TranslationAveragingExample.
        """

        fx, fy, s, u0, v0 = 50.0, 50.0, 0.0, 50.0, 50.0
        expected_wTi_list = SFMdata.createPoses(Cal3_S2(fx, fy, s, u0, v0))

        wRi_list = [x.rotation() for x in expected_wTi_list]

        # create relative translation directions between a pose index and the
        # next two poses
        i2Ui1_dict = {}
        for i1 in range(len(expected_wTi_list) - 1):
            for i2 in range(i1 + 1, min(len(expected_wTi_list), i1 + 3)):
                # create relative translations using global R and T.
                i2Ui1_dict[(i1, i2)] = Unit3(expected_wTi_list[i2].between(
                    expected_wTi_list[i1]).translation())

        # use the `run` API to get expected results
        expected_wti_list = self.obj.run(len(wRi_list), i2Ui1_dict, wRi_list)
        expected_wTi_list = [
            Pose3(wRi, wti) if wti is not None else None
            for (wRi, wti) in zip(wRi_list, expected_wti_list)
        ]

        # form computation graph and execute
        i2Ui1_graph = dask.delayed(i2Ui1_dict)
        wRi_graph = dask.delayed(wRi_list)
        computation_graph = self.obj.create_computation_graph(
            len(wRi_list), i2Ui1_graph, wRi_graph)
        with dask.config.set(scheduler="single-threaded"):
            wti_list = dask.compute(computation_graph)[0]

        wTi_list = [
            Pose3(wRi, wti) if wti is not None else None
            for (wRi, wti) in zip(wRi_list, wti_list)
        ]

        # compare the entries
        self.assertTrue(
            geometry_comparisons.compare_global_poses(wTi_list,
                                                      expected_wTi_list))
Esempio n. 9
0
    def test_create_computation_graph(self):
        """Will test Dask multi-processing capabilities and ability to serialize all objects."""
        self.loader = OlssonLoader(str(DATA_ROOT_PATH / "set1_lund_door"),
                                   image_extension="JPG")

        with hydra.initialize_config_module(config_module="gtsfm.configs"):

            # config is relative to the gtsfm module
            cfg = hydra.compose(
                config_name="scene_optimizer_unit_test_config.yaml")
            scene_optimizer: SceneOptimizer = instantiate(cfg.SceneOptimizer)

            # generate the dask computation graph
            delayed_sfm_result, delayed_io = scene_optimizer.create_computation_graph(
                num_images=len(self.loader),
                image_pair_indices=self.loader.get_valid_pairs(),
                image_graph=self.loader.create_computation_graph_for_images(),
                all_intrinsics=self.loader.get_all_intrinsics(),
                image_shapes=self.loader.get_image_shapes(),
                absolute_pose_priors=self.loader.get_absolute_pose_priors(),
                relative_pose_priors=self.loader.get_relative_pose_priors(
                    self.loader.get_valid_pairs()),
                cameras_gt=self.loader.get_gt_cameras(),
                gt_wTi_list=self.loader.get_gt_poses(),
            )
            # create dask client
            cluster = LocalCluster(n_workers=1, threads_per_worker=4)

            with Client(cluster):
                sfm_result, *io = dask.compute(delayed_sfm_result, *delayed_io)

            self.assertIsInstance(sfm_result, GtsfmData)

            # compare the camera poses
            computed_poses = sfm_result.get_camera_poses()

            # get active cameras from largest connected component, may be <len(self.loader)
            connected_camera_idxs = sfm_result.get_valid_camera_indices()
            expected_poses = [
                self.loader.get_camera_pose(i) for i in connected_camera_idxs
            ]

            self.assertTrue(
                comp_utils.compare_global_poses(computed_poses,
                                                expected_poses,
                                                trans_err_atol=1.0,
                                                trans_err_rtol=0.1))
Esempio n. 10
0
    def __execute_test(self, i2Ui1_input: Dict[Tuple[int, int],
                                               Unit3], wRi_input: List[Rot3],
                       wti_expected: List[Point3]) -> None:
        """Helper function to run the averagaing and assert w/ expected."""

        wti_computed, _ = self.obj.run(len(wRi_input), i2Ui1_input, wRi_input)

        wTi_computed = [
            Pose3(wRi, wti) for wRi, wti in zip(wRi_input, wti_computed)
        ]
        wTi_expected = [
            Pose3(wRi, wti) for wRi, wti in zip(wRi_input, wti_expected)
        ]
        self.assertTrue(
            geometry_comparisons.compare_global_poses(
                wTi_computed, wTi_expected, RELATIVE_ERROR_THRESHOLD,
                ABSOLUTE_ERROR_THRESHOLD))
Esempio n. 11
0
    def test_create_computation_graph(self):
        """Will test Dask multi-processing capabilities and ability to serialize all objects."""
        self.loader = OlssonLoader(str(DATA_ROOT_PATH / "set1_lund_door"),
                                   image_extension="JPG")

        with initialize_config_module(config_module="gtsfm.configs"):

            # config is relative to the gtsfm module
            cfg = compose(config_name="scene_optimizer_unit_test_config.yaml")
            obj: SceneOptimizer = instantiate(cfg.SceneOptimizer)

            # generate the dask computation graph
            sfm_result_graph = obj.create_computation_graph(
                len(self.loader),
                self.loader.get_valid_pairs(),
                self.loader.create_computation_graph_for_images(),
                self.loader.create_computation_graph_for_intrinsics(),
                gt_pose_graph=self.loader.create_computation_graph_for_poses(),
            )

            # create dask client
            cluster = LocalCluster(n_workers=1, threads_per_worker=4)

            with Client(cluster):
                sfm_result = dask.compute(sfm_result_graph)[0]

            self.assertIsInstance(sfm_result, GtsfmData)

            # compare the camera poses
            computed_poses = sfm_result.get_camera_poses()
            computed_rotations = [x.rotation() for x in computed_poses]
            computed_translations = [x.translation() for x in computed_poses]

            # get active cameras from largest connected component, may be <len(self.loader)
            connected_camera_idxs = sfm_result.get_valid_camera_indices()
            expected_poses = [
                self.loader.get_camera_pose(i) for i in connected_camera_idxs
            ]

            self.assertTrue(
                comp_utils.compare_global_poses(expected_poses,
                                                expected_poses))
Esempio n. 12
0
 def assert_results(self, results_a: List[Optional[Point3]], results_b: List[Optional[Point3]]) -> None:
     poses_a = [Pose3(R, t) if t is not None else None for R, t in zip(self._global_rotations_input, results_a)]
     poses_b = [Pose3(R, t) if t is not None else None for R, t in zip(self._global_rotations_input, results_b)]
     self.assertTrue(geometry_comparisons.compare_global_poses(poses_a, poses_b))
Esempio n. 13
0
 def test_compare_poses_exact(self):
     """Check pose comparison with exactly same inputs."""
     self.assertTrue(
         geometry_comparisons.compare_global_poses(POSE_LIST, POSE_LIST))