def test_export(self): # the rig rigs = kapture.Rigs() rigs['rig0', get_camera_kapture_id_from_colmap_id(0)] = kapture.PoseTransform() rigs['rig0', get_camera_kapture_id_from_colmap_id(1)] = kapture.PoseTransform() # the records records_camera = kapture.RecordsCamera() records_camera[0, get_camera_kapture_id_from_colmap_id(0)] = 'camL/0000.jpg' records_camera[0, get_camera_kapture_id_from_colmap_id(1)] = 'camR/0000.jpg' records_camera[1, get_camera_kapture_id_from_colmap_id(0)] = 'camL/0001.jpg' records_camera[1, get_camera_kapture_id_from_colmap_id(1)] = 'camR/0001.jpg' # expect expected_rigs = [{ "cameras": [ { "camera_id": 0, "image_prefix": "camL" }, { "camera_id": 1, "image_prefix": "camR" } ], "ref_camera_id": 0 }] colmap_camera_ids = {get_camera_kapture_id_from_colmap_id(i): i for i in range(2)} colmap_rigs = export_colmap_rig_json(rigs, records_camera, colmap_camera_ids) self.assertEqual(colmap_rigs, expected_rigs)
def test_trajectories_write(self): pose1 = kapture.PoseTransform(r=[1.0, 0.0, 0.0, 0.0], t=[0.0, 0.0, 0.0]) pose2 = kapture.PoseTransform(r=[0.5, 0.5, 0.5, 0.5], t=[4., 2., -2.]) content_expected = [ csv.KAPTURE_FORMAT_1 + kapture_linesep, '# timestamp, device_id, qw, qx, qy, qz, tx, ty, tz' + kapture_linesep, ' 0, cam0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0' + kapture_linesep, ' 0, cam1, 0.5, 0.5, 0.5, 0.5, 4.0, 2.0, -2.0' + kapture_linesep, ' 100, cam2, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0' + kapture_linesep ] trajectories = kapture.Trajectories() timestamp1, timestamp2 = 0, 100 sensor_id1, sensor_id2, sensor_id3 = 'cam0', 'cam1', 'cam2' trajectories[(timestamp1, sensor_id1)] = pose1 trajectories[(timestamp1, sensor_id2)] = pose2 trajectories[(timestamp2, sensor_id3)] = pose1 csv.trajectories_to_file(self._temp_filepath, trajectories) with open(self._temp_filepath, 'rt') as f: content_actual = f.readlines() self.assertListEqual(content_actual, content_expected)
def setUp(self): self._rigs = kapture.Rigs() looking_not_straight = quaternion.from_rotation_vector( [0, np.deg2rad(5.), 0]) self._rigs['rig', 'cam1'] = kapture.PoseTransform( t=[-10, 0, 0], r=looking_not_straight).inverse() self._rigs['rig', 'cam2'] = kapture.PoseTransform( t=[+10, 0, 0], r=looking_not_straight.inverse()).inverse() self._trajectories_rigs = kapture.Trajectories() for timestamp, ratio in enumerate(np.linspace(0., 1., num=8)): looking_around = quaternion.from_rotation_vector( [0, np.deg2rad(360. * ratio), 0]) self._trajectories_rigs[timestamp, 'rig'] = kapture.PoseTransform( t=[0, 0, -100.], r=looking_around) self._trajectories_cams = kapture.Trajectories() for timestamp, rig_id, pose_rig_from_world in kapture.flatten( self._trajectories_rigs, is_sorted=True): for rig_id2, cam_id, pose_cam_from_rig in kapture.flatten( self._rigs): assert rig_id == rig_id pose_cam_from_world = kapture.PoseTransform.compose( [pose_cam_from_rig, pose_rig_from_world]) self._trajectories_cams[timestamp, cam_id] = pose_cam_from_world
def test_rig_remove(self): rigs = kapture.Rigs() rigs['rig0', 'cam0'] = kapture.PoseTransform(r=[1, 0, 0, 0], t=[100, 0, 0]) rigs['rig0', 'cam1'] = kapture.PoseTransform(r=[1, 0, 0, 0], t=[-100, 0, 0]) trajectories = kapture.Trajectories() trajectories[0, 'rig0'] = kapture.PoseTransform(r=[1, 0, 0, 0], t=[0, 0, 0]) trajectories[1, 'rig0'] = kapture.PoseTransform(r=[1, 0, 0, 0], t=[0, 0, 10]) trajectories[2, 'rig0'] = kapture.PoseTransform(r=[1, 0, 0, 0], t=[0, 0, 20]) trajectories_ = kapture.rigs_remove(trajectories, rigs) # timestamps should be unchanged self.assertEqual(trajectories_.keys(), trajectories.keys()) self.assertNotEqual(trajectories_.key_pairs(), trajectories.key_pairs()) self.assertEqual(len(trajectories_.key_pairs()), len(trajectories.key_pairs()) * len(rigs.key_pairs())) self.assertIn((0, 'cam0'), trajectories_.key_pairs()) self.assertIn((0, 'cam1'), trajectories_.key_pairs()) self.assertIn((2, 'cam0'), trajectories_.key_pairs()) self.assertIn((2, 'cam1'), trajectories_.key_pairs()) self.assertAlmostEqual(trajectories_[2, 'cam1'].t_raw, [-100.0, 0.0, 20.0]) self.assertAlmostEqual(trajectories_[2, 'cam1'].r_raw, [1.0, 0.0, 0.0, 0.0])
def world_pose_transform_distance(pose_a: kapture.PoseTransform, pose_b: kapture.PoseTransform) -> Tuple[float, float]: """ get position and rotation error between two PoseTransform pose_a and pose_b should be world to device :return: (position_distance, rotation_distance in deg), can be nan is case of invalid comparison """ if pose_a.r is None and pose_a.t is None: return math.nan, math.nan elif pose_a.r is None: device_to_world_a = kapture.PoseTransform(r=None, t=-pose_a.t) elif pose_a.t is None: device_to_world_a = kapture.PoseTransform(r=pose_a.r.inverse(), t=None) else: device_to_world_a = pose_a.inverse() if pose_b.r is None and pose_b.t is None: return math.nan, math.nan elif pose_b.r is None: device_to_world_b = kapture.PoseTransform(r=None, t=-pose_b.t) elif pose_b.t is None: device_to_world_b = kapture.PoseTransform(r=pose_b.r.inverse(), t=None) else: device_to_world_b = pose_b.inverse() pose_error = pose_transform_distance(device_to_world_a, device_to_world_b) return pose_error[0], np.rad2deg(pose_error[1])
def test_import(self): colmap_rigs = [{ "cameras": [{ "camera_id": 0, "image_prefix": "camL" }, { "camera_id": 1, "image_prefix": "camR" }], "ref_camera_id": 0 }] rigs_kapture, reconstructed_images, reconstructed_trajectories = import_colmap_rig_json( rigs_colmap=colmap_rigs) self.assertEqual([('rig0', get_camera_kapture_id_from_colmap_id(0)), ('rig0', get_camera_kapture_id_from_colmap_id(1))], rigs_kapture.key_pairs()) self.assertIsNone(reconstructed_images) self.assertIsNone(reconstructed_trajectories) # the records images = kapture.RecordsCamera() images[0, get_camera_kapture_id_from_colmap_id(0)] = 'camL/0000.jpg' images[1, get_camera_kapture_id_from_colmap_id(1)] = 'camR/0000.jpg' images[2, get_camera_kapture_id_from_colmap_id(0)] = 'camL/0001.jpg' images[3, get_camera_kapture_id_from_colmap_id(1)] = 'camR/0001.jpg' rigs_kapture, reconstructed_images, reconstructed_trajectories = import_colmap_rig_json( rigs_colmap=colmap_rigs, images=images) # check timestamps has been recovered. self.assertEqual([(0, get_camera_kapture_id_from_colmap_id(0)), (0, get_camera_kapture_id_from_colmap_id(1)), (1, get_camera_kapture_id_from_colmap_id(0)), (1, get_camera_kapture_id_from_colmap_id(1))], reconstructed_images.key_pairs()) # trajectories trajectories = kapture.Trajectories() trajectories[ 0, get_camera_kapture_id_from_colmap_id(0)] = kapture.PoseTransform() trajectories[ 1, get_camera_kapture_id_from_colmap_id(1)] = kapture.PoseTransform() trajectories[ 2, get_camera_kapture_id_from_colmap_id(0)] = kapture.PoseTransform() trajectories[ 3, get_camera_kapture_id_from_colmap_id(1)] = kapture.PoseTransform() rigs_kapture, reconstructed_images, reconstructed_trajectories = import_colmap_rig_json( rigs_colmap=colmap_rigs, images=images, trajectories=trajectories) self.assertEqual([(0, get_camera_kapture_id_from_colmap_id(0)), (0, get_camera_kapture_id_from_colmap_id(1)), (1, get_camera_kapture_id_from_colmap_id(0)), (1, get_camera_kapture_id_from_colmap_id(1))], reconstructed_trajectories.key_pairs())
def test_pose_write(self): pose = kapture.PoseTransform(r=[1.0, 0.0, 0.0, 0.0], t=[0.0, 0.0, 0.0]) fields = csv.pose_to_list(pose) self.assertListEqual(fields, [1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]) pose = kapture.PoseTransform(r=[0.5, 0.5, 0.5, 0.5], t=[0., 0., 0.]) fields = csv.pose_to_list(pose) self.assertListEqual(fields, [0.5, 0.5, 0.5, 0.5, 0.0, 0.0, 0.0]) pose = kapture.PoseTransform(r=[0.5, 0.5, 0.5, 0.5], t=[4., 2., -2.]) fields = csv.pose_to_list(pose) self.assertListEqual(fields, [0.5, 0.5, 0.5, 0.5, 4., 2., -2.])
def setUp(self): self.rotation_a = quaternion.quaternion(-0.572, 0.198, 0.755, -0.252) self.rotation_a_negative = quaternion.quaternion( 0.572, -0.198, -0.755, 0.252) self.translation_a = [144.801, -74.548, -17.746] self.pose_a = kapture.PoseTransform(self.rotation_a, self.translation_a) self.pose_a_negative = kapture.PoseTransform(self.rotation_a_negative, self.translation_a) self.rotation_b = quaternion.quaternion(0.878, 0.090, 0.374, -0.285) self.rotation_b_negative = quaternion.quaternion( -0.878, -0.090, -0.374, 0.285) self.translation_b = [4.508, 45.032, -37.840] self.pose_b = kapture.PoseTransform(self.rotation_b, self.translation_b) self.pose_b_negative = kapture.PoseTransform(self.rotation_b_negative, self.translation_b) self.pose_ab = kapture.PoseTransform(self.rotation_a, self.translation_b) self.pose_ba = kapture.PoseTransform(self.rotation_b, self.translation_a) self.pose_none = kapture.PoseTransform(r=None, t=None) self.pose_r_none = kapture.PoseTransform(r=None, t=self.translation_a) self.pose_t_none = kapture.PoseTransform(r=self.rotation_a, t=None)
def test_timestamps_length(self): trajectories = kapture.Trajectories() self.assertEqual(trajectories.timestamp_length(), -1) trajectories[1614362592378, 'cam0'] = kapture.PoseTransform(r=[1, 0, 0, 0], t=[0, 0, 20]) trajectories[1614362592634, 'cam0'] = kapture.PoseTransform(r=[1, 0, 0, 0], t=[0, 0, 0]) trajectories[1614362592378, 'cam1'] = kapture.PoseTransform(r=[1, 0, 0, 0], t=[10, 0, 20]) trajectories[1614362593123, 'cam1'] = kapture.PoseTransform(r=[1, 0, 0, 0], t=[10, 0, 30]) self.assertEqual(trajectories.timestamp_length(), 13) # Check that if we have timestamps of different precision, we can not compute a common length trajectories[1614362594, 'lidar0'] = kapture.PoseTransform(r=[1, 0, 0, 0], t=[0, 0, 0]) self.assertEqual(trajectories.timestamp_length(), -1)
def test_identity(self): # default construct pose_identity = kapture.PoseTransform() actual_points3d = pose_identity.transform_points( self.expected_points3d) self.assertTrue(np.all(actual_points3d == self.expected_points3d)) # explicit identity pose_identity = kapture.PoseTransform(r=[1, 0, 0, 0], t=[0, 0, 0]) actual_points3d = pose_identity.transform_points( self.expected_points3d) self.assertTrue(np.all(actual_points3d == self.expected_points3d))
def test_init(self): rigs = kapture.Rigs() rigs['rig0', 'cam0'] = kapture.PoseTransform() rigs['rig0', 'cam1'] = kapture.PoseTransform() rigs['rig1'] = { 'cam2': kapture.PoseTransform(), 'cam3': kapture.PoseTransform() } self.assertEqual(2, len(rigs)) self.assertIn('rig0', rigs) self.assertIn('rig1', rigs) self.assertIn('cam0', rigs['rig0']) self.assertNotIn('cam0', rigs['rig1'])
def test_rig_write(self): rigs = kapture.Rigs() rigs['rig1', 'cam0'] = kapture.PoseTransform() rigs['rig1', 'cam1'] = kapture.PoseTransform(r=[0.5, 0.5, 0.5, 0.5]) content_expected = '\n'.join([csv.KAPTURE_FORMAT_1, '# rig_id, sensor_id, qw, qx, qy, qz, tx, ty, tz', 'rig1, cam0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0', 'rig1, cam1, 0.5, 0.5, 0.5, 0.5, 0.0, 0.0, 0.0', '']) csv.rigs_to_file(self._temp_filepath, rigs) with open(self._temp_filepath, 'rt') as f: content_actual = ''.join(f.readlines()) self.assertEqual(content_expected, content_actual)
def _make_rigs(replace_pose_rig, trajectories) -> kapture.Rigs: logger.info('Making up a rig ...') rigs = kapture.Rigs() pose_babord = kapture.PoseTransform(t=[0, 0, 0], r=quaternion.from_rotation_vector( [0, -np.pi / 2, 0])) pose_tribord = kapture.PoseTransform(t=[0, 0, 0], r=quaternion.from_rotation_vector( [0, np.pi / 2, 0])) rigs['silda_rig', '0'] = pose_babord rigs['silda_rig', '1'] = pose_tribord if replace_pose_rig: logger.info('replacing camera poses with rig poses.') kapture.rigs_recover_inplace(trajectories, rigs) return rigs
def setUp(self): self._samples_folder = path.abspath( path.join(path.dirname(__file__), '..', 'samples', 'm1x')) self._kapture_data = csv.kapture_from_dir(self._samples_folder) rotation_a = quaternion.quaternion(-0.572, 0.198, 0.755, -0.252) rotation_b = quaternion.quaternion(0.878, 0.090, 0.374, -0.285) translation_a = [144.801, -74.548, -17.746] translation_b = [144.701, -73.548, -17.746] self._pose_a = kapture.PoseTransform(rotation_a, translation_a) self._pose_b = kapture.PoseTransform(rotation_b, translation_b) self._rigs = kapture.Rigs() self._rigs['rig_a', '144323'] = self._pose_a self._rigs['rig_b', '144323'] = self._pose_b
def test_translation(self): pose_translate = kapture.PoseTransform(r=[1, 0, 0, 0], t=[0, 0, 10]) actual_points3d = pose_translate.transform_points( self.expected_points3d) diff = (actual_points3d - self.expected_points3d) self.assertTrue(np.all(diff[:, 2] == 10)) self.assertTrue(np.all(diff[:, 0:2] == 0))
def test_trajectories_read(self): content = [ '# timestamp, device_id, qw, qx, qy, qz, tx, ty, tz', '0, cam0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0', ' 0, cam1, 0.5, 0.5, 0.5, 0.5, 4.0, 2.0, -2.0', ' 100, cam2, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0' ] with open(self._temp_filepath, 'wt') as f: f.write(kapture_linesep.join(content)) device_ids = {'cam0', 'cam1', 'cam2'} trajectories = csv.trajectories_from_file(self._temp_filepath, device_ids) self.assertIsInstance(trajectories, kapture.Trajectories) self.assertEqual(2, len(trajectories.keys())) # timestamps self.assertEqual(3, len(trajectories.key_pairs())) # timestamp x devices self.assertIn(0, trajectories) self.assertIn(100, trajectories) self.assertIn('cam0', trajectories[0]) self.assertIn('cam1', trajectories[0]) pose = kapture.PoseTransform(r=[0.5, 0.5, 0.5, 0.5], t=[4., 2., -2.]) self.assertAlmostEqual(trajectories[(0, 'cam1')].r_raw, pose.r_raw) self.assertAlmostEqual(trajectories[(0, 'cam1')].t_raw, pose.t_raw)
def rigs_from_file(filepath: str, sensor_ids: Set[str]) -> kapture.Rigs: """ Reads rigs from CSV file. :param filepath: input file path :param sensor_ids: input set of valid sensor ids. If a rig id collides one of them, raise error. If a sensor in rig is not in sensor_ids, it is ignored. :return: rigs """ # rig_id, sensor_id, qw, qx, qy, qz, tx, ty, tz rigs = kapture.Rigs() with open(filepath) as file: table = table_from_file(file) for rig_id, sensor_id, qw, qx, qy, qz, tx, ty, tz in table: if rig_id in sensor_ids: raise ValueError(f'collision between a sensor ID and rig ID ({rig_id})') if sensor_id not in sensor_ids: # just ignore continue rotation = float_array_or_none([qw, qx, qy, qz]) translation = float_array_or_none([tx, ty, tz]) pose = kapture.PoseTransform(rotation, translation) # rigs.setdefault(str(rig_id), kapture.Rig())[sensor_id] = pose rigs[str(rig_id), sensor_id] = pose return rigs
def test_t265_db_only(self): kapture_data = import_colmap_database(self._database_filepath, self._kapture_dirpath, no_geometric_filtering=True) # check the numbers self.assertEqual(2, len(kapture_data.sensors)) self.assertEqual(6, len(kapture_data.trajectories)) self.assertEqual(6, len(kapture_data.records_camera)) # check camera ids camera_ids_expected = set(['cam_00001', 'cam_00002' ]) # may evolve in future, not crucial camera_ids_actual = set(kapture_data.sensors.keys()) self.assertEqual(camera_ids_expected, camera_ids_actual) # check camera ids consistent in trajectories camera_ids_trajectories = set( cam_id for _, cam_id, _ in kapture.flatten(kapture_data.trajectories)) self.assertEqual(camera_ids_actual, camera_ids_trajectories) # check camera ids consistent in records_camera camera_ids_records = set( cam_id for _, cam_id, _ in kapture.flatten(kapture_data.records_camera)) self.assertEqual(camera_ids_actual, camera_ids_records) # check camera parameters cam1 = kapture_data.sensors['cam_00001'] self.assertIsInstance(cam1, kapture.Camera) self.assertEqual('camera', cam1.sensor_type) self.assertEqual(kapture.CameraType.OPENCV_FISHEYE, cam1.camera_type) params_expected = [ 848.0, 800.0, 284.468, 285.51, 424.355, 393.742, 0.0008, 0.031, -0.03, 0.005 ] self.assertAlmostEqual(params_expected, cam1.camera_params) # check records timestamp, cam_id, image = next( kapture.flatten(kapture_data.records_camera, is_sorted=True)) self.assertEqual(1, timestamp) self.assertEqual('cam_00002', cam_id) self.assertEqual('rightraw/frame_000000001.jpg', image) # check trajectories timestamp, cam_id, pose = next( kapture.flatten(kapture_data.trajectories, is_sorted=True)) self.assertEqual(1, timestamp) self.assertEqual('cam_00002', cam_id) pose_expected = kapture.PoseTransform( r=[ 0.9540331248716523, -0.03768128483784883, -0.2972570621910482, -0.0062565444214723875 ], t=[2.7109402281860904, 0.13236653865769618, -2.868626176500939]) self.assertTrue(equal_poses(pose_expected, pose)) # this sample has no keypoints, descriptors nor matches self.assertFalse( path.exists(path.join(self._kapture_dirpath, 'reconstruction')))
def rig_to_ply_stream(stream, rig: Dict[str, kapture.PoseTransform], axis_length: float = 1.) -> None: """ Writes the rig to a stream. :param stream: an open stream to write to :param rig: rig to write :param axis_length: length of the axis """ device_list = [(cam_id, pose_tr) for cam_id, pose_tr in rig.items()] # add the rig center in devices device_list = [(-1, kapture.PoseTransform())] + device_list # create 4 points per device: 1 for center, 3 for axis points_colored_list = [] edges_list = [] for cam_id, pose_tr in device_list: axis = get_axis_in_world(pose_tr, axis_length) edges_list += [(0, len(points_colored_list))] points_colored_list += [p + AXIS_COLORS[i] for i, p in enumerate(axis.tolist())] # write points into ply header_to_ply_stream(stream, nb_vertex=len(points_colored_list), nb_edges=len(edges_list)) for p3d in points_colored_list: line = ['{:<25}'.format(i) for i in p3d[0:3]] line += ['{:03}'.format(int(i)) for i in p3d[3:6]] stream.write(' '.join(line) + '\n') for e in edges_list: line = ['{:2}'.format(i) for i in e] stream.write(' '.join(line) + '\n')
def read_robotcar_v2_train( robotcar_path: str) -> Dict[str, kapture.PoseTransform]: """ Read the robot car v2 train data file :param robotcar_path: path to the data file :return: train data """ with (open(path.join(robotcar_path, 'robotcar_v2_train.txt'), 'r')) as f: lines = f.readlines() # remove empty lines lines = (l2 for l2 in lines if l2.strip()) # trim trailing EOL lines = (l3.rstrip("\n\r") for l3 in lines) # split space lines = (l4.split() for l4 in lines) lines = list(lines) train_data = { table[0].replace(".png", ".jpg"): kapture.PoseTransform( r=quaternion.from_rotation_matrix( [[float(v) for v in table[1:4]], [float(v) for v in table[5:8]], [float(v) for v in table[9:12]]]), t=[float(v) for v in [table[4], table[8], table[12]]]).inverse() for table in lines } return train_data
def _import_trajectories(silda_dir_path, image_name_to_ids, hide_progress_bars) -> kapture.Trajectories: logger.info('Processing trajectories ...') trajectories = kapture.Trajectories() with open(path.join(silda_dir_path, 'silda-train-poses.txt')) as file: lines = file.readlines() lines = (line.rstrip().split() for line in lines) extrinsics = { line[0]: np.array(line[1:8], dtype=np.float) for line in lines } for silda_image_name, pose_params in tqdm(extrinsics.items(), disable=hide_progress_bars): # Silda poses are 7-dim vectors with the rotation quaternion, # and the translation vector. The order needs to be: # qw,qx,qy,qz,tx,ty,tz # The parameters should be described in terms of camera to world transformations if silda_image_name not in image_name_to_ids: # if this is not referenced: means its part of the corpus to be ignored. continue pose = kapture.PoseTransform(pose_params[0:4], pose_params[4:7]).inverse() timestamp, cam_id = image_name_to_ids[silda_image_name] trajectories[timestamp, cam_id] = pose # if query, trajectories is empty, so juste do not save it if len(trajectories) == 0: trajectories = None return trajectories
def average_pose_transform_weighted(poses: List[kapture.PoseTransform], weights: Union[List[float], np.ndarray]) -> kapture.PoseTransform: """ average a list of poses with any weights :param poses: list of poses to average :param weights: a (len(poses),) vector :return: average PoseTransform """ assert isinstance(poses, list) assert len(poses) > 0 assert isinstance(weights, list) assert len(weights) == len(poses) weight_sum = np.sum(weights) # handle NoneType with try expect blocks try: translation = np.sum(tuple([weights[i] * pose.t for i, pose in enumerate(poses)]), axis=0) / weight_sum except TypeError: translation = None try: rotation = average_quaternion_weighted(np.vstack(tuple([quaternion.as_float_array(pose.r) for pose in poses])), weights) except TypeError: rotation = None return kapture.PoseTransform(r=rotation, t=translation)
def convert_testing_extrinsics(offset: int, testing_extrinsics: Iterable[VirtualGalleryTestingExtrinsic], images: kapture.RecordsCamera, trajectories: kapture.Trajectories) -> None: """ Import all testing extrinsics into the images and trajectories. :param offset: :param testing_extrinsics: testing extrinsics to import :param images: image list to add to :param trajectories: trajectories to add to """ # Map (light_id, loop_id, frame_id) to a unique timestamp testing_frames_tuples = ((extrinsic.light_id, extrinsic.occlusion_id, extrinsic.frame_id) for extrinsic in testing_extrinsics) testing_frames_tuples = OrderedDict.fromkeys(testing_frames_tuples).keys() testing_frame_mapping = {v: n + offset for n, v in enumerate(testing_frames_tuples)} # Export images and trajectories logger.info("Converting testing images and trajectories...") for extrinsic in testing_extrinsics: rotation_matrix = [extrinsic.extrinsics[0:3], extrinsic.extrinsics[4:7], extrinsic.extrinsics[8:11]] rotation = quaternion.from_rotation_matrix(rotation_matrix) timestamp = testing_frame_mapping[(extrinsic.light_id, extrinsic.occlusion_id, extrinsic.frame_id)] camera_device_id = _get_testing_camera_name(extrinsic.light_id, extrinsic.occlusion_id, extrinsic.frame_id) translation_vector = [extrinsic.extrinsics[3], extrinsic.extrinsics[7], extrinsic.extrinsics[11]] images[(timestamp, camera_device_id)] = (f'testing/gallery_light{extrinsic.light_id}_occlusion' f'{extrinsic.occlusion_id}/frames/rgb/' f'camera_0/rgb_{extrinsic.frame_id:05}.jpg') trajectories[(timestamp, camera_device_id)] = kapture.PoseTransform(rotation, translation_vector)
def test_type_checking(self): records_camera = kapture.RecordsCamera() valid_ts, valid_id, valid_record = 0, 'cam0', 'cam0/image0.jpg' invalid_ts, invalid_id, invalid_record = '0', float( 0), kapture.PoseTransform() self.assertRaises(TypeError, records_camera.__setitem__, (invalid_ts, valid_id), valid_record) self.assertRaises(TypeError, records_camera.__setitem__, (valid_ts, invalid_id), valid_record) self.assertRaises(TypeError, records_camera.__setitem__, (valid_ts, valid_id), invalid_record) self.assertRaises(TypeError, records_camera.__setitem__, (invalid_ts, invalid_id), invalid_record) self.assertRaises(TypeError, records_camera.__setitem__, invalid_ts, {valid_id: valid_record}) self.assertRaises(TypeError, records_camera.__setitem__, valid_ts, {invalid_id: valid_record}) self.assertRaises(TypeError, records_camera.__setitem__, valid_ts, {valid_id: invalid_record}) self.assertRaises(TypeError, records_camera.__setitem__, invalid_ts, valid_record) self.assertRaises(TypeError, records_camera.__contains__, invalid_ts, valid_id) self.assertRaises(TypeError, records_camera.__contains__, valid_ts, invalid_id) self.assertRaises(TypeError, records_camera.__contains__, invalid_ts, invalid_id) self.assertRaises(TypeError, records_camera.__delitem__, invalid_ts) self.assertRaises(TypeError, records_camera.__delitem__, (valid_ts, invalid_id))
def test_type_checking(self): traj = kapture.Trajectories() valid_ts, valid_id, valid_pose = 0, 'cam0', kapture.PoseTransform() invalid_ts, invalid_id, invalid_pose = '0', float(0), 'pose' self.assertRaises(TypeError, traj.__setitem__, (invalid_ts, valid_id), valid_pose) self.assertRaises(TypeError, traj.__setitem__, (valid_ts, invalid_id), valid_pose) self.assertRaises(TypeError, traj.__setitem__, (valid_ts, valid_id), invalid_pose) self.assertRaises(TypeError, traj.__setitem__, (invalid_ts, invalid_id), invalid_pose) self.assertRaises(TypeError, traj.__setitem__, invalid_ts, {valid_id: valid_pose}) self.assertRaises(TypeError, traj.__setitem__, valid_ts, {invalid_id: valid_pose}) self.assertRaises(TypeError, traj.__setitem__, valid_ts, {valid_id: invalid_pose}) self.assertRaises(TypeError, traj.__setitem__, invalid_ts, valid_pose) self.assertRaises(TypeError, traj.__contains__, invalid_ts, valid_id) self.assertRaises(TypeError, traj.__contains__, valid_ts, invalid_id) self.assertRaises(TypeError, traj.__contains__, invalid_ts, invalid_id) self.assertRaises(TypeError, traj.__delitem__, invalid_ts) self.assertRaises(TypeError, traj.__delitem__, (valid_ts, invalid_id))
def import_robotcar_rig(extrinsics_dir_path: str) -> kapture.Rigs: """ Read extrinsics files and convert to kapture rigs :param extrinsics_dir_path: path to directory with extrinsics :return: kapture.Rigs object """ # From dataset documentation: # The extrinsic parameters of the three cameras on the car with respect to the car itself. # The extrinsics are provided as a 4x4 matrix # [R c] # [0 1] # where R is a rotation matrix that maps from camera to world coordinates and c is the position of the camera in # world coordinates. The entries of the matrix are separated by ,. rigs = kapture.Rigs() for root, dirs, files in os.walk(extrinsics_dir_path): for extrinsics_filepath in files: (camera_id, _) = extrinsics_filepath.split('_') extrinsic_file = open( path.join(extrinsics_dir_path, extrinsics_filepath), 'r') with extrinsic_file as f: m = np.array([[float(num) for num in line.split(',')] for line in f]) rotation_matrix = m[0:3, 0:3] position = m[0:3, 3:4] pose_transform = kapture.PoseTransform( t=position, r=quaternion.from_rotation_matrix(rotation_matrix)) # kapture rig format is "rig to sensor" rigs['car', camera_id] = pose_transform.inverse() return rigs
def _verify_data(self, kapture_data) -> None: cameras = kapture_data.cameras self.assertIsNotNone(cameras, "Cameras exist") self.assertEqual(1, len(cameras), "One camera") camera = next(iter( cameras.values())) # just take the first camera defined self.assertEqual(camera.camera_type, kapture.CameraType.SIMPLE_RADIAL_FISHEYE, "Type fisheye") camera_params = camera.camera_params self.assertEqual(848, camera_params[0], "width") self.assertEqual(800, camera_params[1], "height") records_camera = kapture_data.records_camera self.assertEqual(5, len(records_camera), "Number of images") first_record = records_camera[0] img_path = next(iter(first_record.values())) self.assertEqual("images/frame_000000001.jpg", img_path, "Image path") trajectories = kapture_data.trajectories self.assertEqual(5, len(trajectories), "Trajectories points") k_pose6d = next(iter( trajectories[0].values())) # Kapture.PoseTransform ref_pose = kapture.PoseTransform(t=FIRST_TRAJECTORY_TRANSLATION, r=FIRST_TRAJECTORY_ROTATION) self.assertTrue(equal_poses(ref_pose, k_pose6d), "First trajectory pose") self.assertIsNone(kapture_data.keypoints, "No keypoints") self.assertIsNone(kapture_data.observations, "No observations") self.assertIsNone(kapture_data.points3d, "No 3D points")
def convert_results_to_kapture(query_path: str, results: str, outpath: str): """ convert file with name qw qx qy qz tx ty tz to kapture """ skip_heavy_useless = [ kapture.Trajectories, kapture.RecordsLidar, kapture.RecordsWifi, kapture.Keypoints, kapture.Descriptors, kapture.GlobalFeatures, kapture.Matches, kapture.Points3d, kapture.Observations ] kapture_query = kapture_from_dir(query_path, skip_list=skip_heavy_useless) inverse_records_camera = { image_name: (timestamp, sensor_id) for timestamp, sensor_id, image_name in kapture.flatten( kapture_query.records_camera) } trajectories = kapture.Trajectories() with open(results) as fid: lines = fid.readlines() lines = [line.rstrip().split() for line in lines if line != '\n'] for line in lines: image_name = line[0] rotation = quaternion.quaternion(float(line[1]), float(line[2]), float(line[3]), float(line[4])) translation = [float(line[5]), float(line[6]), float(line[7])] timestamp, sensor_id = inverse_records_camera[image_name] trajectories[timestamp, sensor_id] = kapture.PoseTransform(rotation, translation) kapture_query.trajectories = trajectories kapture_to_dir(outpath, kapture_query)
def convert_training_extrinsics( offset: int, training_extrinsics: Iterable[VirtualGalleryTrainingExtrinsic], images: kapture.RecordsCamera, trajectories: kapture.Trajectories, as_rig: bool) -> None: """ Import all training extrinsics into the images and trajectories. :param offset: :param training_extrinsics: training extrinsics to import :param images: image list to add to :param trajectories: trajectories to add to :param as_rig: writes the position of the rig instead of individual cameras """ # Map (light_id, loop_id, frame_id) to a unique timestamp training_frames_tuples = ((extrinsic.light_id, extrinsic.loop_id, extrinsic.frame_id) for extrinsic in training_extrinsics) training_frames_tuples = OrderedDict.fromkeys( training_frames_tuples).keys() training_frame_mapping = { v: n + offset for n, v in enumerate(training_frames_tuples) } # Export images and trajectories logger.info("Converting training images and trajectories...") for extrinsic in training_extrinsics: rotation_matrix = [ extrinsic.extrinsics[0:3], extrinsic.extrinsics[4:7], extrinsic.extrinsics[8:11] ] rotation = quaternion.from_rotation_matrix(rotation_matrix) timestamp = training_frame_mapping[(extrinsic.light_id, extrinsic.loop_id, extrinsic.frame_id)] camera_device_id = _get_training_camera_name(extrinsic.camera_id) translation_vector = [ extrinsic.extrinsics[3], extrinsic.extrinsics[7], extrinsic.extrinsics[11] ] images[(timestamp, camera_device_id)] = ( f'training/gallery_light{extrinsic.light_id}_loop' f'{extrinsic.loop_id}/frames/rgb/camera_{extrinsic.camera_id}' f'/rgb_{extrinsic.frame_id:05}.jpg') pose_cam = kapture.PoseTransform(rotation, translation_vector) if as_rig: pose_rig = kapture.PoseTransform.compose([ training_rig_config[(_get_training_rig_name(), camera_device_id)].inverse(), pose_cam ]) if (timestamp, _get_training_rig_name()) in trajectories: assert equal_poses( pose_rig, trajectories[(timestamp, _get_training_rig_name())]) else: trajectories[(timestamp, _get_training_rig_name())] = pose_rig else: trajectories[(timestamp, camera_device_id)] = pose_cam
def test_none(self): expect_r = None expect_t = None pose = kapture.PoseTransform(expect_r, expect_t) actual_r = pose.r_raw actual_t = pose.t_raw self.assertEqual(actual_r, None) self.assertEqual(actual_t, None)