def test_givenDataset_whenCallingItem_ShouldReturnTheTupleWithProperAttitude(
            self):
        dataset = MidAirImageSequenceDatasetEulerDifferences(
            "./Ressources/MidAir/Kite_test",
            new_size=(512, 512),
            img_mean=[1, 1, 1],
            img_std=[1, 1, 1],
            trajectories=["trajectory_3000"])
        relative_attitude = dataset.__getitem__(1)[1][:, :3]

        with dataset.HDF5["./Ressources/MidAir/Kite_test/cloudy"] as hdf5:
            expected_attitude = hdf5["trajectory_3000"]["groundtruth"][
                "attitude"][4:8]
            initial_rotation = Quaternion(expected_attitude[0])

        relative_attitude = Geometry.tait_bryan_rotations_to_quaternions(
            relative_attitude)
        actual_attitudes = Geometry.assemble_delta_quaternion_rotations(
            relative_attitude)
        actual_attitudes = self._rotate_quaternions_to_world_frame(
            actual_attitudes)

        actual_attitudes = Geometry.reset_orientations_to_origin(
            initial_rotation, actual_attitudes)
        actual_attitudes = [att.elements for att in actual_attitudes]
        self.assertTrue(numpy.allclose(actual_attitudes, expected_attitude))
    def test_givenDataset_whenCallingItem_ShouldReturnTheTupleWithProperPosition(
            self):
        dataset = MidAirImageSequenceDatasetEulerDifferences(
            "./Ressources/MidAir/Kite_test",
            new_size=(512, 512),
            img_mean=[1, 1, 1],
            img_std=[1, 1, 1],
            trajectories=["trajectory_3000"])

        with dataset.HDF5["./Ressources/MidAir/Kite_test/cloudy"] as hdf5:
            expected_position = hdf5["trajectory_3000"]["groundtruth"][
                "position"][4:8]
            initial_rotation = Quaternion(
                hdf5["trajectory_3000"]["groundtruth"]["attitude"][4])
            initial_position = expected_position[0]

        #Reverse position differences
        actual_position_differences = dataset.__getitem__(1)[1][:, 3:].numpy()
        self.assertEqual(actual_position_differences.shape, (3, 3))

        actual_positions = Geometry.assemble_delta_translations(
            actual_position_differences)
        actual_positions = self._rotate_camera_frame_to_world_frame(
            torch.Tensor(actual_positions)).numpy()
        #remap axis to original absolute frame
        actual_positions = Geometry.remap_position_axes(
            initial_rotation, actual_positions)
        actual_positions = Geometry.reset_positions_to_origin(
            initial_position, actual_positions)

        self.assertEqual(actual_positions.shape, (4, 3))
        self.assertTrue(numpy.allclose(actual_positions, expected_position))
    def test_givenDataset_whenCallingGetPositionDifferences_shouldReturnProperPositionDifferences(
            self):
        with self.dataset.HDF5["./Ressources/MidAir/Kite_test/cloudy"] as hdf5:
            expected_position = hdf5["trajectory_3000"]["groundtruth"][
                "position"][4:8]
            initial_rotation = Quaternion(
                hdf5["trajectory_3000"]["groundtruth"]["attitude"][4])
            initial_position = expected_position[0]

        #Reverse position differences
        actual_position_differences = self.second_segment.get_position_differences(
        ).numpy()
        self.assertEqual(actual_position_differences.shape, (3, 3))
        actual_positions_camera_frame = Geometry.assemble_delta_translations(
            actual_position_differences)

        #remap axis to original absolute frame
        actual_positions = self._rotate_camera_frame_to_world_frame(
            torch.Tensor(actual_positions_camera_frame)).numpy()

        for i, pos in enumerate(actual_positions_camera_frame):
            #assert that the positions were switch to the KITTI camera coordinate systen
            numpy.testing.assert_array_equal(actual_positions[i],
                                             pos[[2, 0, 1]])

        actual_positions = Geometry.remap_position_axes(
            initial_rotation, actual_positions)
        actual_positions = Geometry.reset_positions_to_origin(
            initial_position, actual_positions)

        self.assertEqual(actual_positions.shape, (4, 3))
        self.assertTrue(numpy.allclose(actual_positions, expected_position))
    def test_givenDataset_whenCallingGetAttitudes_shouldReturnProperAttitude(
            self):
        with self.dataset.HDF5["./Ressources/MidAir/Kite_test/cloudy"] as hdf5:
            expected_attitude = hdf5["trajectory_3000"]["groundtruth"][
                "attitude"][4:8]
            initial_attitude = expected_attitude[0]

        actual_attitude = self.second_segment.get_attitudes()
        self.assertEqual(len(actual_attitude), 4)
        self.assertEqual(
            None,
            numpy.testing.assert_array_almost_equal(
                actual_attitude[0].elements,
                numpy.asarray([1.0, 0.0, 0.0, 0.0])))
        actual_attitude_camera_frame = Geometry.quaternion_elements_to_quaternions(
            actual_attitude)
        actual_attitude = self._rotate_quaternions_to_world_frame(
            actual_attitude_camera_frame)

        for i, quat in enumerate(actual_attitude_camera_frame):
            imaginaries = quat.elements[1:]
            #assert that the quaternions were switch to the KITTI camera coordinate systen
            numpy.testing.assert_array_equal(actual_attitude[i].elements[1:],
                                             imaginaries[[2, 0, 1]])

        actual_attitude = Geometry.reset_orientations_to_origin(
            Quaternion(initial_attitude), actual_attitude)

        self.assertTrue(
            numpy.allclose(
                numpy.asarray([att.elements for att in actual_attitude]),
                expected_attitude))
Esempio n. 5
0
 def test_assemble_delta_tait_bryan_rotations(self):
     assembled = Geometry.assemble_delta_tait_bryan_rotations(
         self.dataset.__getitem__(1)[1].numpy()[:, :3])
     expected = numpy.asarray(
         Geometry.quaternions_to_tait_bryan_rotations(
             self.segment.get_attitudes()))
     numpy.testing.assert_almost_equal(assembled, expected)
Esempio n. 6
0
 def test_get_tait_bryan_orientation_differences(self):
     expected = self.dataset.__getitem__(1)[1].numpy()[:, :3]
     attitudes = numpy.asarray(
         Geometry.quaternions_to_tait_bryan_rotations(
             self.segment.get_attitudes()))
     differences = Geometry.get_tait_bryan_orientation_differences(
         attitudes)
     numpy.testing.assert_almost_equal(differences, expected)
Esempio n. 7
0
 def test_reset_euler_orientations_to_origin(self):
     assembled = Geometry.assemble_delta_tait_bryan_rotations(
         self.dataset.__getitem__(1)[1].numpy()[:, :3])
     initial_rotation = Geometry.matrix_to_tait_bryan_euler(
         self.dataframe5x5.iloc[1, 2][0, :9])
     expected = Geometry.rotation_matrices_to_euler(
         self.dataframe5x5.iloc[1, 2][:, :9])
     resetted = Geometry.reset_euler_orientations_to_origin(
         initial_rotation, assembled)
     numpy.testing.assert_almost_equal(resetted, expected)
Esempio n. 8
0
    def test_givenDataset_whenCallingGetPosition_shouldReturnProperPosition(self):
        expected_position = self.dataframe5x5.iloc[1, :].pose[:, 9:]
        initial_rotation = Geometry.matrix_to_quaternion(self.dataframe5x5.iloc[1, :].pose[0, :9])
        initial_position = expected_position[0]

        actual_position = self.segment.get_positions().numpy()
        self.assertEqual(actual_position.shape, (5, 3))
        numpy.testing.assert_array_almost_equal(actual_position[0], numpy.asarray([0.0, 0.0, 0.0]))
        #remap axis to original absolute frame
        actual_position = Geometry.remap_position_axes(initial_rotation, actual_position)
        actual_position = Geometry.reset_positions_to_origin(initial_position, actual_position)
        self.assertTrue(numpy.allclose(actual_position, expected_position))
Esempio n. 9
0
    def _reset_attitude_origin(self, attitude: np.ndarray) -> list:
        """Reset the sequence's rotation relative to the first frame. Expects angles to be in quaternions"""
        initial_orientation_quat = Geometry.matrix_to_quaternion(attitude[0])
        resetted_attitude = []

        for i, orientation in enumerate(attitude):
            current_orientation_quat = Geometry.matrix_to_quaternion(
                orientation)
            resetted_oriention_quat = initial_orientation_quat.inverse * current_orientation_quat
            resetted_attitude.append(resetted_oriention_quat)

        return resetted_attitude
Esempio n. 10
0
    def test_quaternions_to_tait_bryan_rotations(self):
        y = 0.1745329
        x_prime = 0.3490659
        z_prime_prime = 0.7853982
        tait_bryan = numpy.asarray([y, x_prime, z_prime_prime])
        quat = Geometry.tait_bryan_rotation_to_quaternion(tait_bryan)

        series_of_rotations = numpy.asarray([quat, quat, quat])
        expected = numpy.asarray([tait_bryan, tait_bryan, tait_bryan])

        actual = numpy.asarray(
            Geometry.quaternions_to_tait_bryan_rotations(series_of_rotations))

        numpy.testing.assert_almost_equal(actual, expected)
Esempio n. 11
0
    def test_assemble_batch_delta_tait_bryan_rotations(self):
        segment = self.dataset.__getitem__(1)[1].numpy()[:, :3]
        batch = [segment, segment, segment]
        batch = numpy.asarray(batch)

        assembled_batch = Geometry.assemble_batch_delta_tait_bryan_rotations(
            batch)
        expected = numpy.asarray(
            Geometry.quaternions_to_tait_bryan_rotations(
                self.segment.get_attitudes()))

        expected_batch = [expected, expected, expected]
        expected_batch = numpy.asarray(expected_batch)

        numpy.testing.assert_almost_equal(assembled_batch, expected_batch)
Esempio n. 12
0
    def _log_matrix_poses(self, poses, poses_gt, dataset_name: str,
                          trajectory_name: str):
        """
        Logs the pose in text format where the angle is a rotation matrix:
            T00 T01 T02 T03
            T10 T11 T12 T13
            T20 T21 T22 T23
            0   0   0   1

            T00 T01 T02 T03 T10 T11 T12 T13 T20 T21 T22 T23
        """
        pose_output = ""
        pose_gt_output = ""
        matrices = Geometry.poses_to_transformations_matrix(
            poses[:, 3:], poses[:, :3])
        matrices_gt = Geometry.poses_to_transformations_matrix(
            poses_gt[:, 3:], poses_gt[:, :3])

        for i, _ in enumerate(matrices):
            pose_matrix = matrices[i]
            pose_matrix_gt = matrices_gt[i]

            pose_output = pose_output + f"{pose_matrix[0][0]} {pose_matrix[0][1]} {pose_matrix[0][2]} {pose_matrix[0][3]} " \
                                        f"{pose_matrix[1][0]} {pose_matrix[1][1]} {pose_matrix[1][2]} {pose_matrix[1][3]} " \
                                        f"{pose_matrix[2][0]} {pose_matrix[2][1]} {pose_matrix[2][2]} {pose_matrix[2][3]}"

            pose_gt_output = pose_gt_output + f"{pose_matrix_gt[0][0]} {pose_matrix_gt[0][1]} {pose_matrix_gt[0][2]} {pose_matrix_gt[0][3]} " \
                                        f"{pose_matrix_gt[1][0]} {pose_matrix_gt[1][1]} {pose_matrix_gt[1][2]} {pose_matrix_gt[1][3]} " \
                                        f"{pose_matrix_gt[2][0]} {pose_matrix_gt[2][1]} {pose_matrix_gt[2][2]} {pose_matrix_gt[2][3]}"
            if i < len(poses) - 1:
                pose_output = pose_output + "\n"
                pose_gt_output = pose_gt_output + "\n"

        metadata = dict()
        metadata["title"] = "pose_output_matrix"
        metadata["dataset"] = dataset_name
        metadata["trajectory"] = trajectory_name
        metadata["model"] = self.model_name
        filename = f'{metadata["title"]}_{dataset_name}_{trajectory_name}_{metadata["model"]}.txt'
        CometLogger.get_experiment().log_asset_data(pose_output,
                                                    name=filename,
                                                    metadata=metadata)

        metadata["title"] = "pose_gt_output_matrix"
        filename = f'{metadata["title"]}_{dataset_name}_{trajectory_name}_{metadata["model"]}.txt'
        CometLogger.get_experiment().log_asset_data(pose_gt_output,
                                                    name=filename,
                                                    metadata=metadata)
Esempio n. 13
0
    def test_givenDataset_whenCallingGetPositionDifferences_shouldReturnProperPositionDifferences(self):
        expected_position = self.dataframe5x5.iloc[1, :].pose[:, 9:]
        initial_rotation = Geometry.matrix_to_quaternion(self.dataframe5x5.iloc[1, :].pose[0, :9])
        initial_position = expected_position[0]

        #Reverse position differences
        actual_position_differences = self.segment.get_position_differences().numpy()
        self.assertEqual(actual_position_differences.shape, (4, 3))
        actual_positions = Geometry.assemble_delta_translations(actual_position_differences)

        #remap axis to original absolute frame
        actual_positions = Geometry.remap_position_axes(initial_rotation, actual_positions)
        actual_positions = Geometry.reset_positions_to_origin(initial_position, actual_positions)

        self.assertEqual(actual_positions.shape, (5, 3))
        self.assertTrue(numpy.allclose(actual_positions, expected_position))
Esempio n. 14
0
    def _get_segment_pose(self, segment: Segment) -> torch.Tensor:
        position = segment.get_position_differences()
        attitude = segment.get_attitude_differences()
        # output is intrinsic Tait-Bryan angles following the y-x'-z''
        attitude = torch.Tensor(numpy.around(Geometry.quaternions_to_tait_bryan_rotations(attitude), 7))

        return torch.cat([attitude, position], 1)
Esempio n. 15
0
    def test_givenDataset_whenCallingGetAttitudes_shouldReturnProperAttitude(self):

        expected_attitude = self.dataframe5x5.iloc[1, :].pose[:, :9]
        initial_attitude = Geometry.matrix_to_quaternion(self.dataframe5x5.iloc[1, :].pose[0, :9])

        actual_attitude = self.segment.get_attitudes()
        self.assertEqual(len(actual_attitude), 5)
        self.assertEqual(None, numpy.testing.assert_array_almost_equal(actual_attitude[0].elements,
                                                                numpy.asarray([1.0, 0.0, 0.0, 0.0])))

        actual_attitude = Geometry.reset_orientations_to_origin(initial_attitude, actual_attitude)

        self.assertTrue(numpy.allclose(numpy.asarray(
                                           [Geometry.quaternion_to_matrix(att) for att in actual_attitude]
                                       ),
                                           expected_attitude
                                       ))
Esempio n. 16
0
    def test_givenDataset_whenCallingItem_ShouldReturnTheTupleWithProperAttitude(self):
        dataset = KITTIImageSequenceDatasetEulerDifferences("./datainfo/test_seq5x5.pickle", new_size=(512, 512),
                                                            img_mean=[1, 1, 1], img_std=[1, 1, 1])
        relative_attitude = dataset.__getitem__(0)[1][:, :3]

        expected_attitude = self.dataframe5x5.iloc[0, :].pose[:, :9]
        initial_attitude = Geometry.matrix_to_quaternion(self.dataframe5x5.iloc[0, :].pose[0, :9])

        actual_attitudes = [Quaternion()]
        for i, rotation in enumerate(relative_attitude):
            quat = Rotation.from_euler("YXZ", rotation).as_quat()[[3, 0, 1, 2]]
            quat = Quaternion(quat)
            quat = quat * actual_attitudes[i]
            actual_attitudes.append(quat)

        actual_attitudes = Geometry.reset_orientations_to_origin(Quaternion(initial_attitude), actual_attitudes)
        actual_attitudes = numpy.asarray([Geometry.quaternion_to_matrix(att) for att in actual_attitudes])
        self.assertTrue(numpy.allclose(actual_attitudes, expected_attitude, atol=1.e-7))
Esempio n. 17
0
    def get_absolute_pose_for_item(self, item: int):
        segment, _ = super().__getitem__(item)
        position = segment.get_positions()
        attitude = segment.get_attitudes()
        # output is intrinsic Tait-Bryan angles following the y-x'-z''
        attitude = torch.Tensor(numpy.around(Geometry.quaternions_to_tait_bryan_rotations(attitude), 7))

        numpy.testing.assert_almost_equal(attitude[0], numpy.asarray([0.0, 0.0, 0.0]))

        return torch.cat([attitude, position], 1)
Esempio n. 18
0
    def test_tait_bryan_rotation_to_matrix(self):
        y = 0.1745329
        x_prime = 0.3490659
        z_prime_prime = 0.7853982
        tait_bryan = numpy.asarray([y, x_prime, z_prime_prime])

        matrix = Geometry.tait_bryan_rotation_to_matrix(tait_bryan)
        self.assertEqual(matrix.shape, (3, 3))
        numpy.testing.assert_almost_equal(
            tait_bryan,
            Rotation.from_matrix(matrix).as_euler("YXZ"))
Esempio n. 19
0
    def test_assemble_batch_delta_translations(self):
        segment = self.dataset.__getitem__(1)[1].numpy()[:, 3:]
        batch = [segment, segment, segment]
        batch = numpy.asarray(batch)

        assembled_batch = Geometry.assemble_batch_delta_translations(batch)
        expected = numpy.asarray(self.segment.get_positions())
        expected_batch = [expected, expected, expected]
        expected_batch = numpy.asarray(expected_batch)

        numpy.testing.assert_almost_equal(assembled_batch, expected_batch)
Esempio n. 20
0
    def _log_quaternion_poses(self, poses, poses_gt, dataset_name: str,
                              trajectory_name: str):
        """
        Logs the pose in text format where the angle is a quaternion:
            timestamp tx ty tz qx qy qz qw
        """
        pose_output = ""
        pose_gt_output = ""

        for i, pose in enumerate(poses):
            # att.elements[[1, 2, 3, 0]] reorganizes quaternion elements
            # from scalar first w-x-y-z to scalar last x-y-z-w
            rotation_quat = Geometry.tait_bryan_rotation_to_quaternion(
                pose[:3]).elements[[1, 2, 3, 0]]
            rotation_quat_gt = Geometry.tait_bryan_rotation_to_quaternion(
                poses_gt[i][:3]).elements[[1, 2, 3, 0]]

            pose_output = pose_output + f"{i} {pose[3]} {pose[4]} {pose[5]} " \
                                        f"{rotation_quat[0]} {rotation_quat[1]} {rotation_quat[2]} {rotation_quat[3]}"
            pose_gt_output = pose_gt_output + f"{i} {poses_gt[i][3]} {poses_gt[i][4]} {poses_gt[i][5]} " \
                                        f"{rotation_quat_gt[0]} {rotation_quat_gt[1]} {rotation_quat_gt[2]} " \
                                        f"{rotation_quat_gt[3]}"
            if i < len(poses) - 1:
                pose_output = pose_output + "\n"
                pose_gt_output = pose_gt_output + "\n"

        metadata = dict()
        metadata["title"] = "pose_output_quaternion"
        metadata["dataset"] = dataset_name
        metadata["trajectory"] = trajectory_name
        metadata["model"] = self.model_name
        filename = f'{metadata["title"]}_{dataset_name}_{trajectory_name}_{metadata["model"]}.txt'
        CometLogger.get_experiment().log_asset_data(pose_output,
                                                    name=filename,
                                                    metadata=metadata)

        metadata["title"] = "pose_gt_output_quaternion"
        filename = f'{metadata["title"]}_{dataset_name}_{trajectory_name}_{metadata["model"]}.txt'
        CometLogger.get_experiment().log_asset_data(pose_gt_output,
                                                    name=filename,
                                                    metadata=metadata)
Esempio n. 21
0
    def test_givenDataset_whenCallingItem_ShouldReturnTheTupleWithProperPosition(self):
        dataset = KITTIImageSequenceDatasetEulerDifferences("./datainfo/test_seq5x5.pickle", new_size=(512, 512),
                                                            img_mean=[1, 1, 1], img_std=[1, 1, 1])

        expected_position = self.dataframe5x5.iloc[0, :].pose[:, 9:]
        initial_rotation = Geometry.matrix_to_quaternion(self.dataframe5x5.iloc[0, :].pose[0, :9])
        initial_position = expected_position[0]

        #Reverse position differences
        actual_position_differences = dataset.__getitem__(0)[1][:, 3:].numpy()
        self.assertEqual(actual_position_differences.shape, (4, 3))
        actual_positions = numpy.zeros((5, 3))
        for i, position_difference in enumerate(actual_position_differences):
            actual_positions[i + 1] = actual_positions[i] + position_difference

        #remap axis to original absolute frame
        actual_positions = Geometry.remap_position_axes(initial_rotation, actual_positions)
        actual_positions = Geometry.reset_positions_to_origin(initial_position, actual_positions)

        self.assertEqual(actual_positions.shape, (5, 3))
        self.assertTrue(numpy.allclose(actual_positions, expected_position))
Esempio n. 22
0
    def _relative_target_pose_to_absolute_pose(self, previous_iteration_pose,
                                               pose):
        """
        Transforms relative pose data to absolute pose
        @param previous_iteration_pose:
        @param pose:
        @return:
        """
        if self._is_input_data_relative:
            translations = Geometry.assemble_delta_translations(pose[:, 3:])
            rotations = Geometry.assemble_delta_tait_bryan_rotations(
                pose[:, :3])
        else:
            translations = pose[:, 3:]
            rotations = pose[:, :3]

        if previous_iteration_pose is None:
            translations = Geometry.remap_position_axes(
                Geometry.tait_bryan_rotation_to_quaternion(rotations[0]),
                translations)
            translations = Geometry.reset_positions_to_origin(
                translations[0], translations)
            rotations = Geometry.reset_euler_orientations_to_origin(
                rotations[0], rotations)
        else:
            translations = Geometry.remap_position_axes(
                Geometry.tait_bryan_rotation_to_quaternion(
                    previous_iteration_pose[
                        -self._sliding_window_overlap, :3]), translations)
            translations = Geometry.reset_positions_to_origin(
                previous_iteration_pose[-self._sliding_window_overlap, 3:],
                translations)
            rotations = Geometry.reset_euler_orientations_to_origin(
                previous_iteration_pose[-self._sliding_window_overlap, :3],
                rotations)
        rotations = numpy.around(rotations, 7)
        return numpy.concatenate((rotations, translations), axis=1)
Esempio n. 23
0
    def _relative_predicted_pose_to_absolute_pose(self,
                                                  previous_iteration_pose,
                                                  pose):
        """
        Transforms relative pose data to absolute pose
        @param previous_iteration_pose:
        @param pose:
        @return:
        """
        if previous_iteration_pose is None and self._is_input_data_relative:
            translations = Geometry.assemble_delta_translations(pose[:, 3:])
            rotations = Geometry.assemble_delta_tait_bryan_rotations(
                pose[:, :3])
            return numpy.concatenate((rotations, translations), axis=1)
        elif previous_iteration_pose is not None and not self._is_input_data_relative:
            translations = Geometry.get_position_differences(pose[:, 3:])
            rotations = Geometry.get_tait_bryan_orientation_differences(
                pose[:, :3])
        else:
            translations = pose[:, 3:]
            rotations = pose[:, :3]

        # only keep the non overlapping predictions
        relative_pose_overlap = self._sliding_window_overlap - 1
        translations = translations[relative_pose_overlap:]
        rotations = rotations[relative_pose_overlap:]

        translations = Geometry.remap_position_axes(
            Geometry.tait_bryan_rotation_to_quaternion(
                previous_iteration_pose[-1, :3]), translations)

        # assemble with the previous iteration poses
        temp_translation = numpy.concatenate(
            (previous_iteration_pose[-1, 3:].reshape((1, 3)), translations),
            axis=0)
        positions = Geometry.assemble_delta_translations(temp_translation)[2:]

        temp_rotations = numpy.concatenate(
            (previous_iteration_pose[-1, :3].reshape((1, 3)), rotations),
            axis=0)
        orientations = Geometry.assemble_delta_tait_bryan_rotations(
            temp_rotations)[2:]
        orientations = numpy.around(orientations, 7)
        return numpy.concatenate((orientations, positions), axis=1)
Esempio n. 24
0
    def test_poses_to_transformations_matrix(self):
        y = 0.1745329
        x_prime = 0.3490659
        z_prime_prime = 0.7853982
        tait_bryans = numpy.asarray([[y, x_prime, z_prime_prime],
                                     [y, x_prime, z_prime_prime],
                                     [y, x_prime, z_prime_prime]])

        x = 1
        y = 2
        z = 3

        locations = numpy.asarray([[x, y, z], [x, y, z], [x, y, z]])
        transformations = Geometry.poses_to_transformations_matrix(
            locations, tait_bryans)
        self.assertEqual(transformations.shape, (3, 4, 4))
        for transformation in transformations:
            numpy.testing.assert_equal(transformation[3, :], [0, 0, 0, 1])

        numpy.testing.assert_almost_equal(
            tait_bryans,
            Rotation.from_matrix(transformations[:, :3, :3]).as_euler("YXZ"))
        numpy.testing.assert_almost_equal(locations, transformations[:, :3, 3])
Esempio n. 25
0
 def _get_raw_attitude_as_quat(self) -> List[Quaternion]:
     return Geometry.tait_bryan_rotations_to_quaternions(
         self._get_raw_attitude())
Esempio n. 26
0
 def test_get_position_differences(self):
     expected = self.dataset.__getitem__(1)[1].numpy()[:, 3:]
     differences = Geometry.get_position_differences(
         numpy.asarray(self.segment.get_positions()))
     numpy.testing.assert_almost_equal(differences, expected)
Esempio n. 27
0
 def _reset_attitude_origin(self, raw_attitude: List[Quaternion]):
     return Geometry.reset_orientations_to_origin(
         raw_attitude[0], raw_attitude)
Esempio n. 28
0
 def test_assemble_delta_translations(self):
     assembled = Geometry.assemble_delta_translations(
         self.dataset.__getitem__(1)[1].numpy()[:, 3:])
     expected = numpy.asarray(self.segment.get_positions())
     numpy.testing.assert_almost_equal(assembled, expected)
Esempio n. 29
0
 def _get_raw_attitude_as_quat(self) -> List[Quaternion]:
     return Geometry.rotation_matrices_to_quaternions(
         self._get_raw_attitude())