Esempio n. 1
0
    def _extract_egocentric_observation(self, rich_observation):
        """Extract egocentric map and path from rich observation
        :param rich_observation RichObservation: observation, generated by a parent class
        :return (array(W, H)[uint8], array(N, 3)[float]): egocentric obstacle data and a path
        """
        costmap = rich_observation.costmap
        robot_pose = self._env.get_robot().get_pose()

        ego_costmap = extract_egocentric_costmap(
            costmap,
            robot_pose,
            resulting_origin=(self._egomap_x_bounds[0],
                              self._egomap_y_bounds[0]),
            resulting_size=(self._egomap_x_bounds[1] -
                            self._egomap_x_bounds[0],
                            self._egomap_y_bounds[1] -
                            self._egomap_y_bounds[0]))

        ego_path = from_global_to_egocentric(rich_observation.path, robot_pose)
        obs = np.expand_dims(ego_costmap.get_data(), -1)
        normalized_goal = ego_path[-1, :2] / ego_costmap.world_size()
        normalized_goal = normalized_goal / np.linalg.norm(normalized_goal)

        robot_egocentric_state = rich_observation.robot_state.egocentric_state_numpy_array(
        )
        goal_n_state = np.hstack([normalized_goal, robot_egocentric_state])

        return OrderedDict(
            (('environment', obs), ('goal', np.expand_dims(goal_n_state, -1))))
Esempio n. 2
0
def test_costmap_2d_extract_egocentric_costmap_with_nonzero_origin():
    costmap2d = CostMap2D(np.zeros((100, 100), dtype=np.float64), 0.05,
                          np.array([1.0, 2.0]))
    costmap2d.get_data()[10, 20] = CostMap2D.LETHAL_OBSTACLE

    # rotate, shift and cut -> extract ego costmap centered on the robot
    cut_map = extract_egocentric_costmap(costmap2d, (3.5, 3.5, -np.pi / 4),
                                         resulting_origin=np.array(
                                             [-2, -2], dtype=np.float64),
                                         resulting_size=(4, 4))
    assert cut_map.get_resolution() == costmap2d.get_resolution()
    np.testing.assert_array_equal(cut_map.get_origin(), (-2, -2))
    np.testing.assert_array_equal(cut_map.get_data().shape, (80, 80))
    np.testing.assert_array_almost_equal(cut_map.world_bounds(),
                                         (-2, 2, -2, 2))
    assert_mark_at(cut_map, 33, 5)
Esempio n. 3
0
def test_costmap_2d_extract_egocentric_binary_costmap():
    costmap = CostMap2D.create_empty((5, 5), 0.05)
    costmap.get_data()[30:32, 30:32] = CostMap2D.LETHAL_OBSTACLE

    cut_map = extract_egocentric_costmap(costmap, (1.5, 1.5, -np.pi / 4),
                                         resulting_origin=np.array(
                                             [-2.0, -2.0], dtype=np.float64),
                                         resulting_size=(4., 4))

    rotated_data = cut_map.get_data()
    non_free_indices = np.where(rotated_data != 0)
    np.testing.assert_array_equal(non_free_indices[0], [40, 41, 41, 41, 42])
    np.testing.assert_array_equal(non_free_indices[1], [40, 39, 40, 41, 40])

    assert (
        rotated_data[non_free_indices[0],
                     non_free_indices[1]] == CostMap2D.LETHAL_OBSTACLE).all()
Esempio n. 4
0
    def observation(self, observation):
        """Extract egocentric map and path from rich observation
        :param observation Observation: observation, generated by the non-wrapped class
        :return (array(W, H)[uint8], array(N, 3)[float]): egocentric obstacle data and a path
        """
        costmap = observation.costmap
        robot_pose = observation.pose

        ego_costmap = extract_egocentric_costmap(
            costmap,
            robot_pose,
            resulting_origin=(self._egomap_x_bounds[0],
                              self._egomap_y_bounds[0]),
            resulting_size=np.array([
                self._egomap_x_bounds[1] - self._egomap_x_bounds[0],
                self._egomap_y_bounds[1] - self._egomap_y_bounds[0]
            ]))

        ego_path = from_global_to_egocentric(observation.path, robot_pose)
        obs = np.expand_dims(ego_costmap.get_data(), -1)
        if len(ego_path) == 0:
            # 3 is goal len + 6 is len of the state
            size_of_goal_n_state = 3 + observation.robot_state.to_numpy_array(
            ).shape[0]
            return OrderedDict((
                ('env', obs),
                # 9 values characterize robot's state including velocities
                ('goal_n_state',
                 np.expand_dims(
                     np.zeros(size_of_goal_n_state, dtype=np.float32), -1))))
        else:
            normalized_goal = ego_path[0, :2] / ego_costmap.world_size()
            normalized_goal = np.clip(normalized_goal, (-1., -1.), (1., 1.))
            goal_pose = np.hstack([normalized_goal, ego_path[0, 2]])
            goal_n_state = np.hstack(
                [goal_pose,
                 observation.robot_state.to_numpy_array()])
            # observation.robot_state is: wheel_angle, measured_v, measured_w, steering_motor_command
            return OrderedDict((
                ('env', obs),
                ('goal_n_state', np.expand_dims(goal_n_state,
                                                -1).astype(np.float32)),
            ))
Esempio n. 5
0
def test_costmap_2d_extract_egocentric_costmap():
    costmap2d = CostMap2D(np.zeros((100, 100), dtype=np.float64), 0.05,
                          np.array([0.0, 0.0]))
    costmap2d.get_data()[10, 20] = CostMap2D.LETHAL_OBSTACLE

    np.testing.assert_array_equal(costmap2d.world_bounds(), (0, 5, 0, 5))
    assert_mark_at(costmap2d, 20, 10)

    # # dummy cut
    cut_map = extract_egocentric_costmap(costmap2d, (0., 0., 0.))
    assert cut_map.get_resolution() == costmap2d.get_resolution()
    np.testing.assert_array_equal(cut_map.get_origin(), (0.0, 0.0))
    np.testing.assert_array_equal(cut_map.get_data().shape, (100, 100))
    np.testing.assert_array_equal(cut_map.world_bounds(), (0.0, 5, 0.0, 5))
    assert_mark_at(cut_map, 20, 10)

    # shift
    cut_map = extract_egocentric_costmap(costmap2d, (0.2, 0.2, 0.0))
    assert cut_map.get_resolution() == costmap2d.get_resolution()
    np.testing.assert_array_equal(cut_map.get_origin(), (-0.2, -0.2))
    np.testing.assert_array_equal(cut_map.get_data().shape, (100, 100))
    np.testing.assert_array_equal(cut_map.world_bounds(),
                                  (-0.2, 4.8, -0.2, 4.8))

    # ego centric view doesn't shift the data itself if not rotated
    assert_mark_at(cut_map, 20, 10)

    # rotate so that mark is almost in the front of the robot
    cut_map = extract_egocentric_costmap(costmap2d,
                                         (0.0, 0.0, np.pi / 6 - 0.05))
    assert cut_map.get_resolution() == costmap2d.get_resolution()
    np.testing.assert_array_equal(cut_map.get_origin(), (-0., -0.))
    np.testing.assert_array_equal(cut_map.get_data().shape, (100, 100))
    np.testing.assert_array_equal(cut_map.world_bounds(), (0.0, 5, 0.0, 5))

    assert_mark_at(cut_map, 22, 0, check_single=False)

    # rotate as if robot is in the center of the costmap
    cut_map = extract_egocentric_costmap(costmap2d, (2.5, 2.5, -np.pi / 2.))
    assert cut_map.get_resolution() == costmap2d.get_resolution()
    np.testing.assert_array_equal(cut_map.get_origin(), (-2.5, -2.5))
    np.testing.assert_array_equal(cut_map.get_data().shape, (100, 100))
    np.testing.assert_array_equal(cut_map.world_bounds(),
                                  (-2.5, 2.5, -2.5, 2.5))
    assert_mark_at(cut_map, 90, 20)

    # rotate as if robot is in the center of the costmap with explicit parameters
    cut_map = extract_egocentric_costmap(costmap2d, (2.5, 2.5, -np.pi / 2),
                                         resulting_origin=np.array(
                                             [-2.5, -2.5], dtype=np.float64),
                                         resulting_size=np.array((5., 5.)))
    assert cut_map.get_resolution() == costmap2d.get_resolution()
    np.testing.assert_array_equal(cut_map.get_origin(), (-2.5, -2.5))
    np.testing.assert_array_equal(cut_map.get_data().shape, (100, 100))
    np.testing.assert_array_equal(cut_map.world_bounds(),
                                  (-2.5, 2.5, -2.5, 2.5))
    assert_mark_at(cut_map, 90, 20)

    # rotate as if robot is in the center of the costmap with smaller size
    cut_map = extract_egocentric_costmap(costmap2d, (2.5, 2.5, -np.pi / 2),
                                         resulting_origin=np.array(
                                             [-2.5, -2.5], dtype=np.float64),
                                         resulting_size=(4.6, 4.9))
    assert cut_map.get_resolution() == costmap2d.get_resolution()
    np.testing.assert_array_equal(cut_map.get_origin(), (-2.5, -2.5))
    np.testing.assert_array_equal(cut_map.get_data().shape, (98, 92))
    np.testing.assert_array_almost_equal(cut_map.world_bounds(),
                                         (-2.5, 2.1, -2.5, 2.4))
    assert_mark_at(cut_map, 90, 20)

    # do not rotate but shift
    cut_map = extract_egocentric_costmap(costmap2d, (2.5, 2.5, 0.0),
                                         resulting_origin=np.array(
                                             [-5, -4], dtype=np.float64),
                                         resulting_size=(5, 5))
    assert cut_map.get_resolution() == costmap2d.get_resolution()
    np.testing.assert_array_equal(cut_map.get_origin(), (-5, -4))
    np.testing.assert_array_equal(cut_map.get_data().shape, (100, 100))
    np.testing.assert_array_almost_equal(cut_map.world_bounds(),
                                         (-5, 0, -4, 1))
    assert_mark_at(cut_map, 70, 40)

    # do not rotate but shift and cut
    cut_map = extract_egocentric_costmap(costmap2d, (2.5, 2.5, 0.0),
                                         resulting_origin=np.array(
                                             [-5, -4], dtype=np.float64),
                                         resulting_size=(4, 4))
    assert cut_map.get_resolution() == costmap2d.get_resolution()
    np.testing.assert_array_equal(cut_map.get_origin(), (-5, -4))
    np.testing.assert_array_equal(cut_map.get_data().shape, (80, 80))
    np.testing.assert_array_almost_equal(cut_map.world_bounds(),
                                         (-5, -1, -4, 0))
    assert_mark_at(cut_map, 70, 40)

    # rotate, shift and cut -> extract ego costmap centered on the robot
    cut_map = extract_egocentric_costmap(costmap2d, (1.5, 1.5, -np.pi / 4),
                                         resulting_origin=np.array(
                                             [-2, -2], dtype=np.float64),
                                         resulting_size=(4, 4))
    assert cut_map.get_resolution() == costmap2d.get_resolution()
    np.testing.assert_array_equal(cut_map.get_origin(), (-2, -2))
    np.testing.assert_array_equal(cut_map.get_data().shape, (80, 80))
    np.testing.assert_array_almost_equal(cut_map.world_bounds(),
                                         (-2, 2, -2, 2))
    assert_mark_at(cut_map, 47, 19)

    # rotate, shift and expand -> extract ego costmap centered on the robot, going off the limits of original map
    cut_map = extract_egocentric_costmap(costmap2d, (1., 1.5, -np.pi / 4),
                                         resulting_origin=np.array(
                                             [-3, -3], dtype=np.float64),
                                         resulting_size=(7, 6))
    assert cut_map.get_resolution() == costmap2d.get_resolution()
    np.testing.assert_array_equal(cut_map.get_origin(), (-3, -3))
    np.testing.assert_array_equal(cut_map.get_data().shape, (120, 140))
    np.testing.assert_array_almost_equal(cut_map.world_bounds(),
                                         (-3, 4, -3, 3))
    assert_mark_at(cut_map, 74, 46)

    costmap_with_image = CostMap2D(np.zeros((100, 100, 3), dtype=np.float64),
                                   0.05, np.array([0., 0.]))
    extract_egocentric_costmap(costmap_with_image, [0., 0., np.pi / 4])