def run_scene_optimizer() -> None: """ """ with initialize_config_module(config_module="gtsfm.configs"): # config is relative to the gtsfm module cfg = compose(config_name="default_lund_door_set1_config.yaml") scene_optimizer: SceneOptimizer = instantiate(cfg.SceneOptimizer) loader = OlssonLoader(os.path.join(DATA_ROOT, "set1_lund_door"), image_extension="JPG") sfm_result_graph = scene_optimizer.create_computation_graph( num_images=len(loader), image_pair_indices=loader.get_valid_pairs(), image_graph=loader.create_computation_graph_for_images(), camera_intrinsics_graph=loader. create_computation_graph_for_intrinsics(), gt_pose_graph=loader.create_computation_graph_for_poses(), ) # create dask client cluster = LocalCluster(n_workers=2, threads_per_worker=4) with Client(cluster), performance_report(filename="dask-report.html"): sfm_result = sfm_result_graph.compute() assert isinstance(sfm_result, GtsfmData)
def test_get_camera_intrinsics_exif(self): """Tests getter for intrinsics when explicit numpy arrays are absent and we fall back on exif.""" loader = OlssonLoader(EXIF_FOLDER, image_extension="JPG", use_gt_intrinsics=False) computed = loader.get_camera_intrinsics(5) expected = Cal3Bundler(fx=2378.983, k1=0, k2=0, u0=648.0, v0=968.0) self.assertTrue(expected.equals(computed, 1e-3))
class TestSceneOptimizer(unittest.TestCase): """Unit test for SceneOptimizer, which runs SfM for a scene.""" def setUp(self) -> None: self.loader = OlssonLoader(str(DATA_ROOT_PATH / "set1_lund_door"), image_extension="JPG") assert len(self.loader) def test_create_computation_graph(self): """Will test Dask multi-processing capabilities and ability to serialize all objects.""" self.loader = OlssonLoader(str(DATA_ROOT_PATH / "set1_lund_door"), image_extension="JPG") with hydra.initialize_config_module(config_module="gtsfm.configs"): # config is relative to the gtsfm module cfg = hydra.compose( config_name="scene_optimizer_unit_test_config.yaml") scene_optimizer: SceneOptimizer = instantiate(cfg.SceneOptimizer) # generate the dask computation graph delayed_sfm_result, delayed_io = scene_optimizer.create_computation_graph( num_images=len(self.loader), image_pair_indices=self.loader.get_valid_pairs(), image_graph=self.loader.create_computation_graph_for_images(), all_intrinsics=self.loader.get_all_intrinsics(), image_shapes=self.loader.get_image_shapes(), absolute_pose_priors=self.loader.get_absolute_pose_priors(), relative_pose_priors=self.loader.get_relative_pose_priors( self.loader.get_valid_pairs()), cameras_gt=self.loader.get_gt_cameras(), gt_wTi_list=self.loader.get_gt_poses(), ) # create dask client cluster = LocalCluster(n_workers=1, threads_per_worker=4) with Client(cluster): sfm_result, *io = dask.compute(delayed_sfm_result, *delayed_io) self.assertIsInstance(sfm_result, GtsfmData) # compare the camera poses computed_poses = sfm_result.get_camera_poses() # get active cameras from largest connected component, may be <len(self.loader) connected_camera_idxs = sfm_result.get_valid_camera_indices() expected_poses = [ self.loader.get_camera_pose(i) for i in connected_camera_idxs ] self.assertTrue( comp_utils.compare_global_poses(computed_poses, expected_poses, trans_err_atol=1.0, trans_err_rtol=0.1))
class TestDetectorBase(unittest.TestCase): """Main test class for detector base class in frontend.""" def setUp(self): super().setUp() self.detector = DummyDetector() self.loader = OlssonLoader(TEST_DATA_PATH, image_extension="JPG") def test_number_of_detections(self): """Tests that the number of detections is less than the maximum number configured.""" test_image = self.loader.get_image(0) keypoints = self.detector.detect(test_image) self.assertLessEqual(len(keypoints), self.detector.max_keypoints) def test_coordinates_range(self): """Tests that each coordinate is within the image bounds.""" test_image = self.loader.get_image(0) keypoints = self.detector.detect(test_image) np.testing.assert_array_equal(keypoints.coordinates[:, 0] >= 0, True) np.testing.assert_array_equal(keypoints.coordinates[:, 0] <= test_image.width, True) np.testing.assert_array_equal(keypoints.coordinates[:, 1] >= 0, True) np.testing.assert_array_equal(keypoints.coordinates[:, 1] <= test_image.height, True) def test_scale(self): """Tests that the scales are positive.""" keypoints = self.detector.detect(self.loader.get_image(0)) if keypoints.scales is not None: np.testing.assert_array_equal(keypoints.scales >= 0, True) def test_computation_graph(self): """Test the dask's computation graph formation using a single image.""" idx_under_test = 0 image_graph = self.loader.create_computation_graph_for_images()[idx_under_test] keypoints_graph = self.detector.create_computation_graph(image_graph) with dask.config.set(scheduler="single-threaded"): keypoints = dask.compute(keypoints_graph)[0] # check the results via normal workflow and dask workflow for an image expected_keypoints = self.detector.detect(self.loader.get_image(0)) self.assertEqual(keypoints, expected_keypoints) def test_pickleable(self): """Tests that the detector object is pickleable (required for dask).""" try: pickle.dumps(self.detector) except TypeError: self.fail("Cannot dump detector using pickle")
class TestSceneOptimizer(unittest.TestCase): """Unit test for SceneOptimizer, which runs SfM for a scene.""" def setUp(self) -> None: self.loader = OlssonLoader(str(DATA_ROOT_PATH / "set1_lund_door"), image_extension="JPG") assert len(self.loader) def test_create_computation_graph(self): """Will test Dask multi-processing capabilities and ability to serialize all objects.""" self.loader = OlssonLoader(str(DATA_ROOT_PATH / "set1_lund_door"), image_extension="JPG") with initialize_config_module(config_module="gtsfm.configs"): # config is relative to the gtsfm module cfg = compose(config_name="scene_optimizer_unit_test_config.yaml") obj: SceneOptimizer = instantiate(cfg.SceneOptimizer) # generate the dask computation graph sfm_result_graph = obj.create_computation_graph( len(self.loader), self.loader.get_valid_pairs(), self.loader.create_computation_graph_for_images(), self.loader.create_computation_graph_for_intrinsics(), gt_pose_graph=self.loader.create_computation_graph_for_poses(), ) # create dask client cluster = LocalCluster(n_workers=1, threads_per_worker=4) with Client(cluster): sfm_result = dask.compute(sfm_result_graph)[0] self.assertIsInstance(sfm_result, GtsfmData) # compare the camera poses computed_poses = sfm_result.get_camera_poses() computed_rotations = [x.rotation() for x in computed_poses] computed_translations = [x.translation() for x in computed_poses] # get active cameras from largest connected component, may be <len(self.loader) connected_camera_idxs = sfm_result.get_valid_camera_indices() expected_poses = [ self.loader.get_camera_pose(i) for i in connected_camera_idxs ] self.assertTrue( comp_utils.compare_global_poses(expected_poses, expected_poses))
def testSimpleTriangulationOnDoorDataset(self): """Test the tracks of the door dataset using simple triangulation initialization. Using computed tracks with ground truth camera params. Expecting failures on 2 tracks which have incorrect matches.""" with open(DOOR_TRACKS_PATH, "rb") as handle: tracks = pickle.load(handle) loader = OlssonLoader(DOOR_DATASET_PATH, image_extension="JPG") camera_dict = { i: PinholeCameraCal3Bundler(loader.get_camera_pose(i), loader.get_camera_intrinsics(i)) for i in range(len(loader)) } initializer = Point3dInitializer(camera_dict, TriangulationParam.NO_RANSAC, reproj_error_thresh=1e5) # tracks which have expected failures # (both tracks have incorrect measurements) expected_failures = [ SfmTrack2d(measurements=[ SfmMeasurement(i=1, uv=np.array([1252.22729492, 1487.29431152])), SfmMeasurement(i=2, uv=np.array([1170.96679688, 1407.35876465])), SfmMeasurement(i=4, uv=np.array([263.32104492, 1489.76965332 ])), ]), SfmTrack2d(measurements=[ SfmMeasurement(i=6, uv=np.array([1142.34545898, 735.92169189 ])), SfmMeasurement(i=7, uv=np.array([1179.84155273, 763.04095459 ])), SfmMeasurement(i=9, uv=np.array([216.54107666, 774.74017334])), ]), ] for track_2d in tracks: triangulated_track, _, _ = initializer.triangulate(track_2d) if triangulated_track is None: # assert we have failures which are already expected self.assertIn(track_2d, expected_failures)
def construct_loader(self) -> LoaderBase: loader = OlssonLoader( self.parsed_args.dataset_root, image_extension=self.parsed_args.image_extension, max_frame_lookahead=self.parsed_args.max_frame_lookahead, max_resolution=self.parsed_args.max_resolution, ) return loader
def test_lund_door(self): """Unit Test on the door dataset.""" loader = OlssonLoader(str(DATA_ROOT_PATH / "set1_lund_door"), image_extension="JPG") # we will use ground truth poses to generate relative rotations and relative unit translations wTi_expected_list = [ loader.get_camera_pose(x) for x in range(len(loader)) ] wRi_list = [x.rotation() for x in wTi_expected_list] wti_expected_list = [x.translation() for x in wTi_expected_list] i2Ui1_dict = dict() for (i1, i2) in loader.get_valid_pairs(): i2Ti1 = wTi_expected_list[i2].between(wTi_expected_list[i1]) i2Ui1_dict[(i1, i2)] = Unit3((i2Ti1.translation())) self.__execute_test(i2Ui1_dict, wRi_list, wti_expected_list)
def view_scene(args: argparse.Namespace) -> None: """Read Olsson Dataset ground truth from a data.mat file and render the scene to the GUI. Args: args: rendering options. """ loader = OlssonLoader( args.dataset_root, image_extension=args.image_extension, max_frame_lookahead=DUMMY_MAX_FRAME_LOOKAHEAD, max_resolution=args.max_resolution, ) open3d_vis_utils.draw_scene_open3d( point_cloud=loader._point_cloud, rgb=np.ones_like(loader._point_cloud).astype(np.uint8), wTi_list=loader._wTi_list, calibrations=[loader.get_camera_intrinsics_full_res(0)] * loader._num_imgs, args=args, )
def test_exhaustive_retriever_door(self) -> None: """Test the Exhaustive retriever on 12 frames of the Lund Door Dataset.""" loader = OlssonLoader(folder=DOOR_DATA_ROOT, image_extension="JPG") retriever = ExhaustiveRetriever() # create dask client cluster = LocalCluster(n_workers=1, threads_per_worker=4) pairs_graph = retriever.create_computation_graph(loader=loader) with Client(cluster): pairs = pairs_graph.compute() # {12 \choose 2} = (12 * 11) / 2 = 66 self.assertEqual(len(pairs), 66) for (i1, i2) in pairs: self.assertTrue(i1 < i2)
def test_sequential_retriever(self) -> None: """Assert that we get 30 total matches with a lookahead of 3 frames on the Door Dataset.""" max_frame_lookahead = 3 loader = OlssonLoader(str(DEFAULT_FOLDER), image_extension="JPG", max_frame_lookahead=max_frame_lookahead) retriever = SequentialRetriever( max_frame_lookahead=max_frame_lookahead) pairs = retriever.run(loader=loader) expected_pairs = [ (0, 1), (0, 2), (0, 3), (1, 2), (1, 3), (1, 4), (2, 3), (2, 4), (2, 5), (3, 4), (3, 5), (3, 6), (4, 5), (4, 6), (4, 7), (5, 6), (5, 7), (5, 8), (6, 7), (6, 8), (6, 9), (7, 8), (7, 9), (7, 10), (8, 9), (8, 10), (8, 11), (9, 10), (9, 11), (10, 11), ] self.assertEqual(pairs, expected_pairs) # all images have 3 potential forward match pairs, except last three # which have only 2, 1, and 0 such forward pairs self.assertEqual(len(pairs), (9 * 3) + 2 + 1)
def test_get_camera_pose_missing(self): """Tests that the camera pose is None, because it is missing on disk.""" loader = OlssonLoader(str(NO_EXTRINSICS_FOLDER), image_extension="JPG") fetched_pose = loader.get_camera_pose(5) self.assertIsNone(fetched_pose)
def setUp(self): """Set up the loader for the test.""" super().setUp() self.loader = OlssonLoader(str(DEFAULT_FOLDER), image_extension="JPG")
class TestFolderLoader(unittest.TestCase): """Unit tests for folder loader, which loads image from a folder on disk.""" def setUp(self): """Set up the loader for the test.""" super().setUp() self.loader = OlssonLoader(str(DEFAULT_FOLDER), image_extension="JPG") def test_len(self): """Test the number of entries in the loader.""" self.assertEqual(12, len(self.loader)) def test_get_image_valid_index(self): """Tests that get_image works for all valid indices.""" for idx in range(len(self.loader)): self.assertIsNotNone(self.loader.get_image(idx)) def test_get_image_invalid_index(self): """Test that get_image raises an exception on an invalid index.""" # negative index with self.assertRaises(IndexError): self.loader.get_image(-1) # len() as index with self.assertRaises(IndexError): self.loader.get_image(12) # index > len() with self.assertRaises(IndexError): self.loader.get_image(15) def test_image_contents(self): """Test the actual image which is being fetched by the loader at an index. This test's primary purpose is to check if the ordering of filename is being respected by the loader """ index_to_test = 5 file_path = DEFAULT_FOLDER / "images" / "DSC_0006.JPG" loader_image = self.loader.get_image(index_to_test) expected_image = io_utils.load_image(file_path) np.testing.assert_allclose(expected_image.value_array, loader_image.value_array) def test_get_camera_pose_exists(self): """Tests that the correct pose is fetched (present on disk).""" fetched_pose = self.loader.get_camera_pose(1) wRi_expected = np.array([ [0.998079, 0.015881, 0.0598844], [-0.0161175, 0.999864, 0.00346851], [-0.0598212, -0.00442703, 0.998199], ]) wti_expected = np.array([-0.826311, -0.00409053, 0.111315]) expected_pose = Pose3(Rot3(wRi_expected), wti_expected) self.assertTrue(expected_pose.equals(fetched_pose, 1e-2)) def test_get_camera_pose_missing(self): """Tests that the camera pose is None, because it is missing on disk.""" loader = OlssonLoader(str(NO_EXTRINSICS_FOLDER), image_extension="JPG") fetched_pose = loader.get_camera_pose(5) self.assertIsNone(fetched_pose) def test_get_camera_intrinsics_explicit(self): """Tests getter for intrinsics when explicit data.mat file with intrinsics are present on disk.""" expected_fx = 2398.119 expected_fy = 2393.952 expected_fx = min(expected_fx, expected_fy) expected_px = 628.265 expected_py = 932.382 computed = self.loader.get_camera_intrinsics(5) expected = Cal3Bundler(fx=expected_fx, k1=0, k2=0, u0=expected_px, v0=expected_py) self.assertTrue(expected.equals(computed, 1e-3)) def test_get_camera_intrinsics_exif(self): """Tests getter for intrinsics when explicit numpy arrays are absent and we fall back on exif.""" loader = OlssonLoader(EXIF_FOLDER, image_extension="JPG", use_gt_intrinsics=False) computed = loader.get_camera_intrinsics(5) expected = Cal3Bundler(fx=2378.983, k1=0, k2=0, u0=648.0, v0=968.0) self.assertTrue(expected.equals(computed, 1e-3)) def test_get_camera_intrinsics_missing(self): """Tests getter for intrinsics when explicit numpy arrays are absent and we fall back on exif.""" loader = OlssonLoader(NO_EXIF_FOLDER, image_extension="JPG") computed = loader.get_camera_intrinsics(5) self.assertIsNone(computed) def test_create_computation_graph_for_images(self): """Tests the graph for loading all the images.""" image_graph = self.loader.create_computation_graph_for_images() # check the length of the graph self.assertEqual(12, len(image_graph)) results = dask.compute(image_graph)[0] # randomly check image loads from a few indices np.testing.assert_allclose(results[5].value_array, self.loader.get_image(5).value_array) np.testing.assert_allclose(results[7].value_array, self.loader.get_image(7).value_array) def test_create_computation_graph_for_intrinsics(self): """Tests the graph for all intrinsics.""" intrinsics_graph = self.loader.create_computation_graph_for_intrinsics( ) # check the length of the graph self.assertEqual(12, len(intrinsics_graph)) results = dask.compute(intrinsics_graph)[0] # randomly check intrinsics from a few indices self.assertTrue( self.loader.get_camera_intrinsics(5).equals(results[5], 1e-5)) self.assertTrue( self.loader.get_camera_intrinsics(7).equals(results[7], 1e-5))
def test_get_camera_intrinsics_missing(self): """Tests getter for intrinsics when explicit numpy arrays are absent and we fall back on exif.""" loader = OlssonLoader(NO_EXIF_FOLDER, image_extension="JPG") computed = loader.get_camera_intrinsics(5) self.assertIsNone(computed)
class TestDescriptorBase(unittest.TestCase): """Unit tests for the DescriptorBase class. Should be inherited by all descriptor unit tests. """ def setUp(self): self.descriptor = DummyDescriptor() self.loader = OlssonLoader(str(TEST_DATA_PATH), image_extension="JPG") def test_result_size(self): """Check if the number of descriptors are same as number of features.""" input_image = self.loader.get_image(0) input_keypoints = Keypoints(coordinates=np.random.randint( low=[0, 0], high=[input_image.width, input_image.height], size=(5, 2), )) result = self.descriptor.describe(input_image, input_keypoints) self.assertEqual(len(input_keypoints), result.shape[0]) def test_with_no_features(self): """Checks that empty feature inputs works well.""" input_image = self.loader.get_image(0) input_keypoints = Keypoints(coordinates=np.array([])) result = self.descriptor.describe(input_image, input_keypoints) self.assertEqual(0, result.size) def test_create_computation_graph(self): """Checks the dask computation graph.""" # testing some indices idxs_under_test = [0, 5] for idx in idxs_under_test: test_image = self.loader.get_image(idx) test_keypoints = Keypoints(coordinates=np.random.randint( low=[0, 0], high=[test_image.width, test_image.height], size=(np.random.randint(5, 10), 2), )) descriptor_graph = self.descriptor.create_computation_graph( dask.delayed(test_image), dask.delayed(test_keypoints), ) with dask.config.set(scheduler="single-threaded"): descriptors = dask.compute(descriptor_graph)[0] expected_descriptors = self.descriptor.describe( test_image, test_keypoints) np.testing.assert_allclose(descriptors, expected_descriptors) def test_pickleable(self): """Tests that the descriptor is pickleable (required for dask).""" try: pickle.dumps(self.descriptor) except TypeError: self.fail("Cannot dump descriptor using pickle")
def setUp(self): super().setUp() self.detector = DummyDetector() self.loader = OlssonLoader(TEST_DATA_PATH, image_extension="JPG")
def setUp(self): self.descriptor = DummyDescriptor() self.loader = OlssonLoader(str(TEST_DATA_PATH), image_extension="JPG")
def setUp(self) -> None: self.loader = OlssonLoader(str(DATA_ROOT_PATH / "set1_lund_door"), image_extension="JPG") assert len(self.loader)