Esempio n. 1
0
 def test_equal_poses(self):
     self.assertTrue(equal_poses(self.pose_none, self.pose_none))
     self.assertFalse(equal_poses(self.pose_none, self.pose_r_none))
     self.assertFalse(equal_poses(self.pose_none, self.pose_t_none))
     self.assertFalse(equal_poses(self.pose_t_none, self.pose_r_none))
     self.assertFalse(equal_poses(self.pose_r_none, self.pose_t_none))
     self.assertTrue(equal_poses(self.pose_t_none, self.pose_t_none))
     self.assertTrue(equal_poses(self.pose_r_none, self.pose_r_none))
     self.assertTrue(equal_poses(self.pose_a, self.pose_a))
     self.assertFalse(equal_poses(self.pose_a, self.pose_b))
     self.assertTrue(equal_poses(self.pose_a, self.pose_a_negative))
Esempio n. 2
0
 def _verify_data(self, kapture_data) -> None:
     cameras = kapture_data.cameras
     self.assertIsNotNone(cameras, "Cameras exist")
     self.assertEqual(1, len(cameras), "One camera")
     camera = next(iter(
         cameras.values()))  # just take the first camera defined
     self.assertEqual(camera.camera_type,
                      kapture.CameraType.SIMPLE_RADIAL_FISHEYE,
                      "Type fisheye")
     camera_params = camera.camera_params
     self.assertEqual(848, camera_params[0], "width")
     self.assertEqual(800, camera_params[1], "height")
     records_camera = kapture_data.records_camera
     self.assertEqual(5, len(records_camera), "Number of images")
     first_record = records_camera[0]
     img_path = next(iter(first_record.values()))
     self.assertEqual("images/frame_000000001.jpg", img_path, "Image path")
     trajectories = kapture_data.trajectories
     self.assertEqual(5, len(trajectories), "Trajectories points")
     k_pose6d = next(iter(
         trajectories[0].values()))  # Kapture.PoseTransform
     ref_pose = kapture.PoseTransform(t=FIRST_TRAJECTORY_TRANSLATION,
                                      r=FIRST_TRAJECTORY_ROTATION)
     self.assertTrue(equal_poses(ref_pose, k_pose6d),
                     "First trajectory pose")
     self.assertIsNone(kapture_data.keypoints, "No keypoints")
     self.assertIsNone(kapture_data.observations, "No observations")
     self.assertIsNone(kapture_data.points3d, "No 3D points")
Esempio n. 3
0
    def test_t265_db_only(self):
        kapture_data = import_colmap_database(self._database_filepath,
                                              self._kapture_dirpath,
                                              no_geometric_filtering=True)

        # check the numbers
        self.assertEqual(2, len(kapture_data.sensors))
        self.assertEqual(6, len(kapture_data.trajectories))
        self.assertEqual(6, len(kapture_data.records_camera))

        # check camera ids
        camera_ids_expected = set(['cam_00001', 'cam_00002'
                                   ])  # may evolve in future, not crucial
        camera_ids_actual = set(kapture_data.sensors.keys())
        self.assertEqual(camera_ids_expected, camera_ids_actual)
        # check camera ids consistent in trajectories
        camera_ids_trajectories = set(
            cam_id
            for _, cam_id, _ in kapture.flatten(kapture_data.trajectories))
        self.assertEqual(camera_ids_actual, camera_ids_trajectories)
        # check camera ids consistent in records_camera
        camera_ids_records = set(
            cam_id
            for _, cam_id, _ in kapture.flatten(kapture_data.records_camera))
        self.assertEqual(camera_ids_actual, camera_ids_records)

        # check camera parameters
        cam1 = kapture_data.sensors['cam_00001']
        self.assertIsInstance(cam1, kapture.Camera)
        self.assertEqual('camera', cam1.sensor_type)
        self.assertEqual(kapture.CameraType.OPENCV_FISHEYE, cam1.camera_type)
        params_expected = [
            848.0, 800.0, 284.468, 285.51, 424.355, 393.742, 0.0008, 0.031,
            -0.03, 0.005
        ]
        self.assertAlmostEqual(params_expected, cam1.camera_params)

        # check records
        timestamp, cam_id, image = next(
            kapture.flatten(kapture_data.records_camera, is_sorted=True))
        self.assertEqual(1, timestamp)
        self.assertEqual('cam_00002', cam_id)
        self.assertEqual('rightraw/frame_000000001.jpg', image)

        # check trajectories
        timestamp, cam_id, pose = next(
            kapture.flatten(kapture_data.trajectories, is_sorted=True))
        self.assertEqual(1, timestamp)
        self.assertEqual('cam_00002', cam_id)
        pose_expected = kapture.PoseTransform(
            r=[
                0.9540331248716523, -0.03768128483784883, -0.2972570621910482,
                -0.0062565444214723875
            ],
            t=[2.7109402281860904, 0.13236653865769618, -2.868626176500939])
        self.assertTrue(equal_poses(pose_expected, pose))

        # this sample has no keypoints, descriptors nor matches
        self.assertFalse(
            path.exists(path.join(self._kapture_dirpath, 'reconstruction')))
Esempio n. 4
0
def convert_training_extrinsics(
        offset: int,
        training_extrinsics: Iterable[VirtualGalleryTrainingExtrinsic],
        images: kapture.RecordsCamera, trajectories: kapture.Trajectories,
        as_rig: bool) -> None:
    """
    Import all training extrinsics into the images and trajectories.

    :param offset:
    :param training_extrinsics: training extrinsics to import
    :param images: image list to add to
    :param trajectories: trajectories to add to
    :param as_rig: writes the position of the rig instead of individual cameras
    """
    # Map (light_id, loop_id, frame_id) to a unique timestamp
    training_frames_tuples = ((extrinsic.light_id, extrinsic.loop_id,
                               extrinsic.frame_id)
                              for extrinsic in training_extrinsics)
    training_frames_tuples = OrderedDict.fromkeys(
        training_frames_tuples).keys()
    training_frame_mapping = {
        v: n + offset
        for n, v in enumerate(training_frames_tuples)
    }

    # Export images and trajectories
    logger.info("Converting training images and trajectories...")
    for extrinsic in training_extrinsics:
        rotation_matrix = [
            extrinsic.extrinsics[0:3], extrinsic.extrinsics[4:7],
            extrinsic.extrinsics[8:11]
        ]
        rotation = quaternion.from_rotation_matrix(rotation_matrix)
        timestamp = training_frame_mapping[(extrinsic.light_id,
                                            extrinsic.loop_id,
                                            extrinsic.frame_id)]
        camera_device_id = _get_training_camera_name(extrinsic.camera_id)
        translation_vector = [
            extrinsic.extrinsics[3], extrinsic.extrinsics[7],
            extrinsic.extrinsics[11]
        ]
        images[(timestamp, camera_device_id)] = (
            f'training/gallery_light{extrinsic.light_id}_loop'
            f'{extrinsic.loop_id}/frames/rgb/camera_{extrinsic.camera_id}'
            f'/rgb_{extrinsic.frame_id:05}.jpg')
        pose_cam = kapture.PoseTransform(rotation, translation_vector)
        if as_rig:
            pose_rig = kapture.PoseTransform.compose([
                training_rig_config[(_get_training_rig_name(),
                                     camera_device_id)].inverse(), pose_cam
            ])
            if (timestamp, _get_training_rig_name()) in trajectories:
                assert equal_poses(
                    pose_rig,
                    trajectories[(timestamp, _get_training_rig_name())])
            else:
                trajectories[(timestamp, _get_training_rig_name())] = pose_rig
        else:
            trajectories[(timestamp, camera_device_id)] = pose_cam
Esempio n. 5
0
    def test_maupertuis_import_txt_only(self):
        kapture_data = import_colmap_from_reconstruction_files(
            self._reconstruction_path, self._temp_dirpath, set())

        # check the numbers
        self.assertEqual(1, len(kapture_data.sensors))
        self.assertEqual(4, len(kapture_data.trajectories))
        self.assertEqual(4, len(kapture_data.records_camera))
        self.assertIs(kapture_data.records_lidar, None)
        self.assertIs(kapture_data.records_wifi, None)
        self.assertEqual(4, len(kapture_data.keypoints))
        self.assertIs(kapture_data.descriptors, None)
        self.assertIs(kapture_data.matches, None)
        self.assertEqual(1039, len(kapture_data.points3d))
        self.assertEqual(1039, len(kapture_data.observations))

        # check camera
        camera = kapture_data.sensors['cam_00001']
        self.assertEqual('camera', camera.sensor_type)
        self.assertEqual(kapture.CameraType.SIMPLE_PINHOLE, camera.camera_type)
        self.assertAlmostEqual(camera.camera_params,
                               [1919.0, 1079.0, 1847.53, 959.5, 539.5])

        # check snapshots
        snapshots = kapture_data.records_camera
        self.assertTrue(all('cam_00001' in ts for ts in snapshots.values()))
        self.assertEqual(
            ['00.jpg', '01.jpg', '02.jpg', '03.jpg'],
            [filename for _, _, filename in kapture.flatten(snapshots, True)])

        # check trajectories
        trajectory = kapture_data.trajectories
        self.assertTrue(all('cam_00001' in ts for ts in trajectory.values()))
        self.assertTrue(
            all(pose.r is not None and pose.t is not None
                for ts in trajectory.values() for pose in ts.values()))
        self.assertTrue(
            equal_poses(
                trajectory[1, 'cam_00001'],
                kapture.PoseTransform(
                    r=[0.998245, -0.000889039, -0.0384732, -0.045019],
                    t=[3.24777, -2.58119, -0.0457181])))

        # check points3d
        self.assertEqual((1039, 6), kapture_data.points3d.shape)
        self.assertAlmostEqual([-2.39675, 4.62278, 13.2759, 57.0, 57.0, 49.0],
                               kapture_data.points3d[0].tolist())

        # check observations
        observations = kapture_data.observations
        # self.assertEqual(4, len(observations[0]))
        self.assertEqual(
            {('01.jpg', 4561), ('02.jpg', 3389), ('00.jpg', 4975),
             ('03.jpg', 3472)}, set(observations[0]))