def test_givenDataset_whenCallingItem_ShouldReturnTheTupleWithProperPosition(
            self):
        dataset = MidAirImageSequenceDatasetEulerDifferences(
            "./Ressources/MidAir/Kite_test",
            new_size=(512, 512),
            img_mean=[1, 1, 1],
            img_std=[1, 1, 1],
            trajectories=["trajectory_3000"])

        with dataset.HDF5["./Ressources/MidAir/Kite_test/cloudy"] as hdf5:
            expected_position = hdf5["trajectory_3000"]["groundtruth"][
                "position"][4:8]
            initial_rotation = Quaternion(
                hdf5["trajectory_3000"]["groundtruth"]["attitude"][4])
            initial_position = expected_position[0]

        #Reverse position differences
        actual_position_differences = dataset.__getitem__(1)[1][:, 3:].numpy()
        self.assertEqual(actual_position_differences.shape, (3, 3))

        actual_positions = Geometry.assemble_delta_translations(
            actual_position_differences)
        actual_positions = self._rotate_camera_frame_to_world_frame(
            torch.Tensor(actual_positions)).numpy()
        #remap axis to original absolute frame
        actual_positions = Geometry.remap_position_axes(
            initial_rotation, actual_positions)
        actual_positions = Geometry.reset_positions_to_origin(
            initial_position, actual_positions)

        self.assertEqual(actual_positions.shape, (4, 3))
        self.assertTrue(numpy.allclose(actual_positions, expected_position))
    def test_givenDataset_whenCallingGetPositionDifferences_shouldReturnProperPositionDifferences(
            self):
        with self.dataset.HDF5["./Ressources/MidAir/Kite_test/cloudy"] as hdf5:
            expected_position = hdf5["trajectory_3000"]["groundtruth"][
                "position"][4:8]
            initial_rotation = Quaternion(
                hdf5["trajectory_3000"]["groundtruth"]["attitude"][4])
            initial_position = expected_position[0]

        #Reverse position differences
        actual_position_differences = self.second_segment.get_position_differences(
        ).numpy()
        self.assertEqual(actual_position_differences.shape, (3, 3))
        actual_positions_camera_frame = Geometry.assemble_delta_translations(
            actual_position_differences)

        #remap axis to original absolute frame
        actual_positions = self._rotate_camera_frame_to_world_frame(
            torch.Tensor(actual_positions_camera_frame)).numpy()

        for i, pos in enumerate(actual_positions_camera_frame):
            #assert that the positions were switch to the KITTI camera coordinate systen
            numpy.testing.assert_array_equal(actual_positions[i],
                                             pos[[2, 0, 1]])

        actual_positions = Geometry.remap_position_axes(
            initial_rotation, actual_positions)
        actual_positions = Geometry.reset_positions_to_origin(
            initial_position, actual_positions)

        self.assertEqual(actual_positions.shape, (4, 3))
        self.assertTrue(numpy.allclose(actual_positions, expected_position))
Beispiel #3
0
    def _relative_predicted_pose_to_absolute_pose(self,
                                                  previous_iteration_pose,
                                                  pose):
        """
        Transforms relative pose data to absolute pose
        @param previous_iteration_pose:
        @param pose:
        @return:
        """
        if previous_iteration_pose is None and self._is_input_data_relative:
            translations = Geometry.assemble_delta_translations(pose[:, 3:])
            rotations = Geometry.assemble_delta_tait_bryan_rotations(
                pose[:, :3])
            return numpy.concatenate((rotations, translations), axis=1)
        elif previous_iteration_pose is not None and not self._is_input_data_relative:
            translations = Geometry.get_position_differences(pose[:, 3:])
            rotations = Geometry.get_tait_bryan_orientation_differences(
                pose[:, :3])
        else:
            translations = pose[:, 3:]
            rotations = pose[:, :3]

        # only keep the non overlapping predictions
        relative_pose_overlap = self._sliding_window_overlap - 1
        translations = translations[relative_pose_overlap:]
        rotations = rotations[relative_pose_overlap:]

        translations = Geometry.remap_position_axes(
            Geometry.tait_bryan_rotation_to_quaternion(
                previous_iteration_pose[-1, :3]), translations)

        # assemble with the previous iteration poses
        temp_translation = numpy.concatenate(
            (previous_iteration_pose[-1, 3:].reshape((1, 3)), translations),
            axis=0)
        positions = Geometry.assemble_delta_translations(temp_translation)[2:]

        temp_rotations = numpy.concatenate(
            (previous_iteration_pose[-1, :3].reshape((1, 3)), rotations),
            axis=0)
        orientations = Geometry.assemble_delta_tait_bryan_rotations(
            temp_rotations)[2:]
        orientations = numpy.around(orientations, 7)
        return numpy.concatenate((orientations, positions), axis=1)
    def test_givenDataset_whenCallingGetPositionDifferences_shouldReturnProperPositionDifferences(self):
        expected_position = self.dataframe5x5.iloc[1, :].pose[:, 9:]
        initial_rotation = Geometry.matrix_to_quaternion(self.dataframe5x5.iloc[1, :].pose[0, :9])
        initial_position = expected_position[0]

        #Reverse position differences
        actual_position_differences = self.segment.get_position_differences().numpy()
        self.assertEqual(actual_position_differences.shape, (4, 3))
        actual_positions = Geometry.assemble_delta_translations(actual_position_differences)

        #remap axis to original absolute frame
        actual_positions = Geometry.remap_position_axes(initial_rotation, actual_positions)
        actual_positions = Geometry.reset_positions_to_origin(initial_position, actual_positions)

        self.assertEqual(actual_positions.shape, (5, 3))
        self.assertTrue(numpy.allclose(actual_positions, expected_position))
Beispiel #5
0
    def _relative_target_pose_to_absolute_pose(self, previous_iteration_pose,
                                               pose):
        """
        Transforms relative pose data to absolute pose
        @param previous_iteration_pose:
        @param pose:
        @return:
        """
        if self._is_input_data_relative:
            translations = Geometry.assemble_delta_translations(pose[:, 3:])
            rotations = Geometry.assemble_delta_tait_bryan_rotations(
                pose[:, :3])
        else:
            translations = pose[:, 3:]
            rotations = pose[:, :3]

        if previous_iteration_pose is None:
            translations = Geometry.remap_position_axes(
                Geometry.tait_bryan_rotation_to_quaternion(rotations[0]),
                translations)
            translations = Geometry.reset_positions_to_origin(
                translations[0], translations)
            rotations = Geometry.reset_euler_orientations_to_origin(
                rotations[0], rotations)
        else:
            translations = Geometry.remap_position_axes(
                Geometry.tait_bryan_rotation_to_quaternion(
                    previous_iteration_pose[
                        -self._sliding_window_overlap, :3]), translations)
            translations = Geometry.reset_positions_to_origin(
                previous_iteration_pose[-self._sliding_window_overlap, 3:],
                translations)
            rotations = Geometry.reset_euler_orientations_to_origin(
                previous_iteration_pose[-self._sliding_window_overlap, :3],
                rotations)
        rotations = numpy.around(rotations, 7)
        return numpy.concatenate((rotations, translations), axis=1)
Beispiel #6
0
 def test_assemble_delta_translations(self):
     assembled = Geometry.assemble_delta_translations(
         self.dataset.__getitem__(1)[1].numpy()[:, 3:])
     expected = numpy.asarray(self.segment.get_positions())
     numpy.testing.assert_almost_equal(assembled, expected)