Exemplo n.º 1
0
def make_pose_error(estimated_pose: tf.Transform, reference_pose: tf.Transform) -> PoseError:
    """
    Make a pose error object from an estimated
    :param estimated_pose:
    :param reference_pose:
    :return:
    """
    trans_error = estimated_pose.location - reference_pose.location
    trans_error_length = np.linalg.norm(trans_error)

    trans_error_direction = np.nan  # No direction if the vectors are the same
    if trans_error_length > 0:
        # Get the unit vector in the direction of the true location
        reference_norm = np.linalg.norm(reference_pose.location)
        if reference_norm > 0:
            unit_reference = reference_pose.location / reference_norm
            # Find the angle between the trans error and the true location
            dot_product = np.dot(trans_error / trans_error_length, unit_reference)
            trans_error_direction = np.arccos(
                # Clip to arccos range to avoid errors
                min(1.0, max(0.0, dot_product))
            )
    # Different to the trans_direction, this is the angle between the estimated orientation and true orientation
    rot_error = tf.quat_diff(estimated_pose.rotation_quat(w_first=True), reference_pose.rotation_quat(w_first=True))
    return PoseError(
        x=trans_error[0],
        y=trans_error[1],
        z=trans_error[2],
        length=trans_error_length,
        direction=trans_error_direction,
        rot=rot_error
    )
def get_error_from_motion(motion: tf.Transform, gt_motion: tf.Transform, avg_motion: tf.Transform = None) \
        -> typing.Tuple[float, float, float, float, float, float, float, float, float, float, float, float]:
    """
    Given a motion, ground truth motion, and average estimated motion, extract 12 different error statistics
    :param motion:
    :param gt_motion:
    :param avg_motion:
    :return:
    """
    # Error
    trans_error = motion.location - gt_motion.location
    trans_error_length = np.linalg.norm(trans_error)
    trans_error_direction = np.arccos(
        min(
            1.0,
            max(
                -1.0,
                np.dot(trans_error / trans_error_length, gt_motion.location /
                       np.linalg.norm(gt_motion.location))))
    ) if trans_error_length > 0 else 0  # No error direction when there is no error
    rot_error = tf.quat_diff(motion.rotation_quat(w_first=True),
                             gt_motion.rotation_quat(w_first=True))

    # Noise
    if avg_motion is None:
        return (
            trans_error[0],
            trans_error[1],
            trans_error[2],
            trans_error_length,
            trans_error_direction,
            rot_error,

            # No average estimate,
            np.nan,
            np.nan,
            np.nan,
            np.nan,
            np.nan,
            np.nan)
    else:
        trans_noise = motion.location - avg_motion.location
        trans_noise_length = np.linalg.norm(trans_noise)
        trans_noise_direction = np.arccos(
            min(
                1.0,
                max(
                    -1.0,
                    np.dot(
                        trans_noise / trans_noise_length, gt_motion.location /
                        np.linalg.norm(gt_motion.location))))
        ) if trans_noise_length > 0 else 0  # No noise direction for 0 noise
        rot_noise = tf.quat_diff(motion.rotation_quat(w_first=True),
                                 avg_motion.rotation_quat(w_first=True))

        return (trans_error[0], trans_error[1], trans_error[2],
                trans_error_length, trans_error_direction, rot_error,
                trans_noise[0], trans_noise[1], trans_noise[2],
                trans_noise_length, trans_noise_direction, rot_noise)
Exemplo n.º 3
0
def fix_coordinates(trans: tf.Transform) -> tf.Transform:
    """
    Exchange the coordinates on a transform from camera frame to world frame.
    Used for the camera extrinsics
    :param trans:
    :return:
    """
    x, y, z = trans.location
    qw, qx, qy, qz = trans.rotation_quat(w_first=True)
    return make_camera_pose(x, y, z, qw, qx, qy, qz)
Exemplo n.º 4
0
def align_point(pose: tf.Transform,
                shift: np.ndarray,
                rotation: np.ndarray,
                scale: float = 1.0) -> tf.Transform:
    """
    Transform an estimated point
    :param pose:
    :param shift:
    :param rotation:
    :param scale:
    :return:
    """
    return tf.Transform(
        location=shift +
        scale * t3.quaternions.rotate_vector(pose.location, rotation),
        rotation=t3.quaternions.qmult(rotation,
                                      pose.rotation_quat(w_first=True)),
        w_first=True)