def _relative_predicted_pose_to_absolute_pose(self, previous_iteration_pose, pose): """ Transforms relative pose data to absolute pose @param previous_iteration_pose: @param pose: @return: """ if previous_iteration_pose is None and self._is_input_data_relative: translations = Geometry.assemble_delta_translations(pose[:, 3:]) rotations = Geometry.assemble_delta_tait_bryan_rotations( pose[:, :3]) return numpy.concatenate((rotations, translations), axis=1) elif previous_iteration_pose is not None and not self._is_input_data_relative: translations = Geometry.get_position_differences(pose[:, 3:]) rotations = Geometry.get_tait_bryan_orientation_differences( pose[:, :3]) else: translations = pose[:, 3:] rotations = pose[:, :3] # only keep the non overlapping predictions relative_pose_overlap = self._sliding_window_overlap - 1 translations = translations[relative_pose_overlap:] rotations = rotations[relative_pose_overlap:] translations = Geometry.remap_position_axes( Geometry.tait_bryan_rotation_to_quaternion( previous_iteration_pose[-1, :3]), translations) # assemble with the previous iteration poses temp_translation = numpy.concatenate( (previous_iteration_pose[-1, 3:].reshape((1, 3)), translations), axis=0) positions = Geometry.assemble_delta_translations(temp_translation)[2:] temp_rotations = numpy.concatenate( (previous_iteration_pose[-1, :3].reshape((1, 3)), rotations), axis=0) orientations = Geometry.assemble_delta_tait_bryan_rotations( temp_rotations)[2:] orientations = numpy.around(orientations, 7) return numpy.concatenate((orientations, positions), axis=1)
def test_get_position_differences(self): expected = self.dataset.__getitem__(1)[1].numpy()[:, 3:] differences = Geometry.get_position_differences( numpy.asarray(self.segment.get_positions())) numpy.testing.assert_almost_equal(differences, expected)