def convert_rel_traj_to_abs_traj(traj):
    """ Converts a relative pose trajectory to an absolute-pose trajectory.

        The incoming trajectory is processed elemente-wise. Poses at each 
        timestamp are appended to the absolute pose from the previous timestamp.

        Args:
            traj: A PoseTrajectory3D object with timestamps as indices containing, at a minimum,
                columns representing the xyz position and wxyz quaternion-rotation at each 
                timestamp, corresponding to the pose between previous and current timestamps.
            
        Returns:
            A PoseTrajectory3D object with xyz position and wxyz quaternion fields for the
            relative pose trajectory corresponding to the relative one given in `traj`.
    """
    from evo.core import lie_algebra as lie

    new_poses = [lie.se3()]  # origin at identity

    for i in range(0, len(traj.timestamps)):
        abs_pose = np.dot(new_poses[-1], traj.poses_se3[i])
        new_poses.append(abs_pose)

    return trajectory.PoseTrajectory3D(timestamps=traj.timestamps[1:],
                                       poses_se3=new_poses)
Exemplo n.º 2
0
    def test_convert_abs_traj_to_rel_traj_0(self):
        """ Test translation-only relative trajectory """
        pos_xyz = [[0, 0, 0], [1, 0, 0]]
        quat_wxyz = [[
            0.7071068,
            0.7071068,
            0,
            0,
        ], [
            0.7071068,
            0.7071068,
            0,
            0,
        ]]
        timestamps = [0, 1]

        traj_abs = trajectory.PoseTrajectory3D(pos_xyz, quat_wxyz, timestamps)
        traj_rel = convert_abs_traj_to_rel_traj(traj_abs, False)

        self.assertEqual(len(traj_rel.positions_xyz), 1)
        self.assertEqual(len(traj_rel.orientations_quat_wxyz), 1)
        self.assertEqual(len(traj_rel.timestamps), 1)
        self.assertEqual(traj_rel.timestamps[0], timestamps[1])

        self.assertTrue(
            np.allclose(traj_rel.positions_xyz[0], pos_xyz[1], atol=1e-6))
        self.assertTrue(
            np.allclose(traj_rel.orientations_quat_wxyz[0], [1, 0, 0, 0],
                        atol=1e-6))
Exemplo n.º 3
0
def path2trajectory(rpose, rtime):
    rtraj = trajectory.PoseTrajectory3D(
        positions_xyz=rpose.positions_xyz,
        orientations_quat_wxyz=rpose.orientations_quat_wxyz,
        timestamps=rtime,
        poses_se3=rpose.poses_se3)
    return rtraj
Exemplo n.º 4
0
def df_to_trajectory(df):
    if not isinstance(df, pd.DataFrame):
        raise TypeError("pandas.DataFrame or derived required")
    positions_xyz = df.loc[:, ['x', 'y', 'z']].to_numpy()
    quaternions_wxyz = df.loc[:, ['qw', 'qx', 'qy', 'qz']].to_numpy()
    # NOTE: df must have timestamps as index
    stamps = np.divide(df.index, 1e9)  # n x 1 - nanoseconds to seconds
    return trajectory.PoseTrajectory3D(positions_xyz, quaternions_wxyz, stamps)
Exemplo n.º 5
0
    def accumulate_error(self, seq, est):
        assert (seq in self.gt_traj)
        est_traj = trajectory.PoseTrajectory3D(
            poses_se3=est, timestamps=self.raw_timestamps[seq][:len(est)])
        ape_metric, ape_stat = calc_euroc_seq_errors(est_traj,
                                                     self.gt_traj[seq])
        self.errors.append(ape_stat)

        return ape_stat
Exemplo n.º 6
0
    def test_get_ape_rot_0(self):
        """ Test equivalent trajectories for zero ARE """
        pos_xyz = [[random.random() for _ in range(3)] for _ in range(15)]
        quat_wxyz = [[random.random() for _ in range(4)] for _ in range(15)]
        timestamps = [i for i in range(15)]

        traj_1 = trajectory.PoseTrajectory3D(pos_xyz, quat_wxyz, timestamps)
        traj_2 = trajectory.PoseTrajectory3D(pos_xyz, quat_wxyz, timestamps)

        ape_metric_rot = get_ape_rot((traj_1, traj_2))

        self.assertEqual(ape_metric_rot.pose_relation,
                         metrics.PoseRelation.rotation_angle_deg)
        self.assertEqual(ape_metric_rot.unit, metrics.Unit.degrees)
        self.assertTrue(
            np.allclose(ape_metric_rot.error,
                        [0. for i in range(len(ape_metric_rot.error))],
                        atol=1e-5))
Exemplo n.º 7
0
    def test_get_rpe_trans_0(self):
        """ Test equivalent trajectories for zero RTE """
        pos_xyz = [[random.random() for _ in range(3)] for _ in range(15)]
        quat_wxyz = [[random.random() for _ in range(4)] for _ in range(15)]
        timestamps = [i for i in range(15)]

        traj_1 = trajectory.PoseTrajectory3D(pos_xyz, quat_wxyz, timestamps)
        traj_2 = trajectory.PoseTrajectory3D(pos_xyz, quat_wxyz, timestamps)

        rpe_metric_trans = get_rpe_trans((traj_1, traj_2))

        self.assertEqual(rpe_metric_trans.pose_relation,
                         metrics.PoseRelation.translation_part)
        self.assertEqual(rpe_metric_trans.unit, metrics.Unit.meters)
        self.assertTrue(
            np.allclose(rpe_metric_trans.error,
                        [0. for i in range(len(rpe_metric_trans.error))],
                        atol=1e-5))
Exemplo n.º 8
0
    def test_convert_abs_traj_to_rel_traj_1(self):
        """ Test equivalent trajectories for zero relative ATE (not to-scale) """
        pos_xyz = np.array([[random.random() for _ in range(3)]
                            for _ in range(15)])
        quat_wxyz = np.array([[random.random() for _ in range(4)]
                              for _ in range(15)])
        timestamps = np.array([i for i in range(15)])

        traj_1 = trajectory.PoseTrajectory3D(pos_xyz, quat_wxyz, timestamps)
        pos_xyz = [pos * 2 for pos in pos_xyz]
        traj_2 = trajectory.PoseTrajectory3D(pos_xyz, quat_wxyz, timestamps)

        traj_1 = convert_abs_traj_to_rel_traj(traj_1, True)
        traj_2 = convert_abs_traj_to_rel_traj(traj_2, True)
        ape_metric_trans = get_ape_trans((traj_1, traj_2))

        self.assertTrue(
            np.allclose(ape_metric_trans.error,
                        [0. for i in range(len(ape_metric_trans.error))],
                        atol=1e-5))
Exemplo n.º 9
0
def read_tum(vins_mono_poses):
    wxyz = np.zeros_like(vins_mono_poses[:, 4:])
    wxyz[:, 0] = vins_mono_poses[:, 7]
    wxyz[:, 1:4] = vins_mono_poses[:, 4:7]

    xyz = vins_mono_poses[:, 1:4]
    ts = vins_mono_poses[:, 0]

    return trajectory.PoseTrajectory3D(positions_xyz=xyz,
                                       orientations_quat_wxyz=wxyz,
                                       timestamps=ts)
Exemplo n.º 10
0
    def test_get_rpe_trans_1(self):
        """ Test a 1-meter RTE on the first pose only """
        pos_xyz = [[random.random() for _ in range(3)] for _ in range(15)]
        quat_wxyz = [[random.random() for _ in range(4)] for _ in range(15)]
        timestamps = [i for i in range(15)]

        pos_xyz[0] = [0, 0, 0]
        traj_1 = trajectory.PoseTrajectory3D(pos_xyz, quat_wxyz, timestamps)

        pos_xyz[0] = [
            1,
            0,
            0,
        ]
        traj_2 = trajectory.PoseTrajectory3D(pos_xyz, quat_wxyz, timestamps)

        rpe_metric_trans = get_rpe_trans((traj_1, traj_2))

        self.assertAlmostEqual(rpe_metric_trans.error[0], 1.)
        self.assertTrue(
            np.allclose(rpe_metric_trans.error[1:],
                        [0. for i in range(len(rpe_metric_trans.error[1:]))],
                        atol=1e-5))
Exemplo n.º 11
0
    def test_get_ape_rot_1(self):
        """ Test a 90-deg ARE on the first pose only """
        pos_xyz = [[random.random() for _ in range(3)] for _ in range(15)]
        quat_wxyz = [[random.random() for _ in range(4)] for _ in range(15)]
        timestamps = [i for i in range(15)]

        quat_wxyz[0] = [1, 0, 0, 0]
        traj_1 = trajectory.PoseTrajectory3D(pos_xyz, quat_wxyz, timestamps)

        quat_wxyz[0] = [
            0.7071068,
            0.7071068,
            0,
            0,
        ]
        traj_2 = trajectory.PoseTrajectory3D(pos_xyz, quat_wxyz, timestamps)

        ape_metric_rot = get_ape_rot((traj_1, traj_2))

        self.assertAlmostEqual(ape_metric_rot.error[0], 90.)
        self.assertTrue(
            np.allclose(ape_metric_rot.error[1:],
                        [0. for i in range(len(ape_metric_rot.error[1:]))],
                        atol=1e-5))
Exemplo n.º 12
0
def convert_abs_traj_to_rel_traj(traj, up_to_scale=False):
    """ Converts an absolute-pose trajectory to a relative-pose trajectory.
    
        The incoming trajectory is processed element-wise. At each timestamp
        starting from the second (index 1), the relative pose 
        from the previous timestamp to the current one is calculated (in the previous-
        timestamp's coordinate frame). This relative pose is then appended to the 
        resulting trajectory.
        The resulting trajectory has timestamp indices corresponding to poses that represent
        the relative transformation between that timestamp and the **next** one.
        
        Args:
            traj: A PoseTrajectory3D object with timestamps as indices containing, at a minimum,
                columns representing the xyz position and wxyz quaternion-rotation at each
                timestamp, corresponding to the absolute pose at that time.
            up_to_scale: A boolean. If set to True, relative poses will have their translation
                part normalized.
        
        Returns:
            A PoseTrajectory3D object with xyz position and wxyz quaternion fields for the 
            relative pose trajectory corresponding to the absolute one given in `traj`.
    """
    from evo.core import transformations
    from evo.core import lie_algebra as lie

    new_poses = []

    for i in range(1, len(traj.timestamps)):
        rel_pose = lie.relative_se3(traj.poses_se3[i - 1], traj.poses_se3[i])

        if up_to_scale:
            bim1_t_bi = rel_pose[:3, 3]
            norm = np.linalg.norm(bim1_t_bi)
            if norm > 1e-6:
                bim1_t_bi = bim1_t_bi / norm
                rel_pose[:3, 3] = bim1_t_bi

        new_poses.append(rel_pose)

    return trajectory.PoseTrajectory3D(timestamps=traj.timestamps[1:],
                                       poses_se3=new_poses)
def convert_rel_traj_from_body_to_cam(rel_traj, body_T_cam):
    """Converts a relative pose trajectory from body frame to camera frame

    Args:
        rel_traj: Relative trajectory, a PoseTrajectory3D object containing timestamps
            and relative poses at each timestamp. It has to have the poses_se3 field.

        body_T_cam: The SE(3) transformation from camera from to body frame. Also known
            as camera extrinsics matrix.

    Returns:
        A PoseTrajectory3D object in camera frame
    """
    def assert_so3(R):
        assert (np.isclose(np.linalg.det(R), 1, atol=1e-06))
        assert (np.allclose(np.matmul(R, R.transpose()), np.eye(3),
                            atol=1e-06))

    assert_so3(body_T_cam[0:3, 0:3])

    new_poses = []
    for i in range(len(rel_traj.timestamps)):
        im1_body_T_body_i = rel_traj.poses_se3[i]
        assert_so3(im1_body_T_body_i[0:3, 0:3])

        im1_cam_T_cam_i = np.matmul(
            np.matmul(np.linalg.inv(body_T_cam), im1_body_T_body_i),
            body_T_cam)

        assert_so3(np.linalg.inv(body_T_cam)[0:3, 0:3])
        assert_so3(im1_cam_T_cam_i[0:3, 0:3])

        new_poses.append(im1_cam_T_cam_i)

    return trajectory.PoseTrajectory3D(timestamps=rel_traj.timestamps,
                                       poses_se3=new_poses)
traj_est_unassociated = pandas_bridge.df_to_trajectory(mono_df)

# Associate the trajectories
traj_ref_abs, traj_est_rel = sync.associate_trajectories(
    traj_ref_unassociated, traj_est_unassociated)

traj_ref_rel = convert_abs_traj_to_rel_traj(traj_ref_abs, up_to_scale=False)

# Transform the relative gt trajectory from body to left camera frame
traj_ref_cam_rel = convert_rel_traj_from_body_to_cam(traj_ref_rel,
                                                     body_T_leftCam)

# Remove the first timestamp; we don't have relative pose at first gt timestamp
traj_est_rel = trajectory.PoseTrajectory3D(
    traj_est_rel._positions_xyz[1:],
    traj_est_rel._orientations_quat_wxyz[1:],
    traj_est_rel.timestamps[1:],
)

print("traj_ref_rel: ", traj_ref_rel)
print("traj_ref_cam_rel: ", traj_ref_cam_rel)
print("traj_est_rel: ", traj_est_rel)

# Frames of trajectories:
# traj_rel_rel: body frame relative poses
# traj_ref_cam_rel: left camera frame relative poses
# traj_est_rel: left camera frame relative poses

# Save this relative-pose ground truth file to disk as a csv for later use, if needed.
# gt_rel_filename = "/home/marcus/output_gt_rel_poses_mono.csv"
# gt_rel_df.to_csv(filename, sep=',', columns=['x', 'y', 'z', 'qw', 'qx', 'qy', 'qz'])
Exemplo n.º 15
0
gt_poses = np.loadtxt(os.path.join(base_dir, "EUROC_vins_mono", seq, "gt.tum"))
vanilla_poses = np.load(
    os.path.join(pfind(base_dir, "EUROC", seq + "_train"),
                 "saved_model.checkpoint.traj/est_poses", seq + ".npy"))
vision_only_poses = np.load(
    os.path.join(pfind(base_dir, "EUROC_vision_only", seq),
                 "saved_model.eval.traj/est_poses", seq + ".npy"))
vins_mono_poses = np.loadtxt(
    os.path.join(base_dir, "EUROC_vins_mono", seq, "vinsmono_output.tum"))
# imu_only_poses = np.load(os.path.join(base_dir, "KITTI_imu_only", seq, "est.npy"))
seq_data = SequenceData(seq)
raw_timestamps = np.array(seq_data.df.loc[:, "timestamp_raw"])
timestamps = timestamps_rel + raw_timestamps[0] / 1e9

gt_traj = read_tum(gt_poses)
vanilla_traj = trajectory.PoseTrajectory3D(poses_se3=vanilla_poses,
                                           timestamps=timestamps)
vision_only_traj = trajectory.PoseTrajectory3D(poses_se3=vision_only_poses,
                                               timestamps=timestamps)

vins_mono_traj = read_tum(vins_mono_poses)

gt_traj_synced_vanilla, vanilla_traj_synced = sync.associate_trajectories(
    gt_traj, vanilla_traj, max_diff=0.01)
gt_traj_synced_vision_only, vision_only_traj_synced = sync.associate_trajectories(
    gt_traj, vision_only_traj, max_diff=0.01)
gt_traj_synced_vins_mono, vins_mono_traj_synced = sync.associate_trajectories(
    gt_traj, vins_mono_traj, max_diff=0.01)

vanilla_traj_aligned = trajectory.align_trajectory(vanilla_traj_synced,
                                                   gt_traj_synced_vanilla,
                                                   correct_scale=False,