Exemplo n.º 1
0
def test_incremental_rotation(dev_str, call):
    if call in [helpers.np_call, helpers.jnp_call, helpers.mx_call]:
        # convolutions not yet implemented in numpy or jax
        # mxnet is unable to stack or expand zero-dimensional tensors
        pytest.skip()
    batch_size = 1
    num_timesteps = 1
    num_cams = 1
    num_feature_channels = 3
    image_dims = [3, 3]
    esm = ESM(omni_image_dims=[10, 20], smooth_mean=False)
    empty_memory = esm.empty_memory(batch_size, num_timesteps)
    empty_obs = _get_dummy_obs(batch_size, num_timesteps, num_cams, image_dims, num_feature_channels, empty=True)
    rel_rot_vec_pose = ivy.array([[[0., 0., 0., 0., 0.1, 0.]]])
    empty_obs['control_mean'] = rel_rot_vec_pose
    empty_obs['agent_rel_mat'] = ivy_mech.rot_vec_pose_to_mat_pose(rel_rot_vec_pose)

    first_obs = _get_dummy_obs(batch_size, num_timesteps, num_cams, image_dims, num_feature_channels, ones=True)
    memory_1 = esm(first_obs, empty_memory, batch_size=batch_size, num_timesteps=num_timesteps, num_cams=num_cams,
                   image_dims=image_dims)
    memory_2 = esm(empty_obs, memory_1, batch_size=batch_size, num_timesteps=num_timesteps, num_cams=num_cams,
                   image_dims=image_dims)
    memory_3 = esm(empty_obs, memory_2, batch_size=batch_size, num_timesteps=num_timesteps, num_cams=num_cams,
                   image_dims=image_dims)

    assert not np.allclose(memory_1.mean, memory_3.mean)
Exemplo n.º 2
0
 def __init__(self):
     rot_vec_pose = np.array([0.1, 0.2, 0.3, 0.4, 0.5, 0.6])
     self.inv_ext_mat = ivy_mech.rot_vec_pose_to_mat_pose(rot_vec_pose)
     self.rel_body_points = np.array([[0., 0., 0.],
                                      [-0.2, -0.2, 0.],
                                      [-0.2, 0.2, 0.],
                                      [0.2, -0.2, 0.],
                                      [0.2, 0.2, 0.]])
     self.sampled_body = np.array([[0.1, 0.2, 0.3],
                                   [0.04361792, -0.0751835, 0.26690764],
                                   [-0.12924806, 0.22732089, 0.46339797],
                                   [0.32924806, 0.17267911, 0.13660203],
                                   [0.15638208, 0.4751835, 0.33309236]])
Exemplo n.º 3
0
def compute_cost_and_sdfs(learnable_anchor_vals, anchor_points,
                          start_anchor_val, end_anchor_val, query_points, sim):
    anchor_vals = ivy.concatenate(
        (ivy.expand_dims(start_anchor_val, 0), learnable_anchor_vals,
         ivy.expand_dims(end_anchor_val, 0)), 0)
    poses = ivy_robot.sample_spline_path(anchor_points, anchor_vals,
                                         query_points)
    inv_ext_mat_query_vals = ivy_mech.rot_vec_pose_to_mat_pose(poses)
    body_positions = ivy.transpose(
        sim.ivy_drone.sample_body(inv_ext_mat_query_vals), (1, 0, 2))
    length_cost = compute_length(body_positions)
    sdf_vals = sim.sdf(ivy.reshape(body_positions, (-1, 3)))
    coll_cost = -ivy.reduce_mean(sdf_vals)
    total_cost = length_cost + coll_cost * 10
    return total_cost[0], poses, body_positions, ivy.reshape(
        sdf_vals, (-1, 100, 1))
Exemplo n.º 4
0
 def execute_motion(self, poses):
     print('\nSpline path optimized.\n')
     if self._interactive:
         input('\nPress enter in the terminal to execute motion.\n')
     print('\nExecuting motion...\n')
     if self.with_pyrep:
         for i in range(100):
             pose = poses[i]
             inv_ext_mat = ivy_mech.rot_vec_pose_to_mat_pose(pose)
             self._drone.set_matrix(
                 ivy.to_numpy(inv_ext_mat).reshape((-1, )).tolist())
             time.sleep(0.05)
     elif self._interactive:
         this_dir = os.path.dirname(os.path.realpath(__file__))
         for i in range(11):
             plt.ion()
             plt.imshow(
                 mpimg.imread(
                     os.path.join(this_dir, 'dsp_no_sim',
                                  'motion_{}.png'.format(i))))
             plt.show()
             plt.pause(0.1)
             plt.ioff()
Exemplo n.º 5
0
    def __init__(self, interactive, try_use_sim):
        super().__init__(interactive, try_use_sim)

        # ivy robot
        rel_body_points = ivy.array([[0., 0., 0.], [-0.15, 0., -0.15],
                                     [-0.15, 0., 0.15], [0.15, 0., -0.15],
                                     [0.15, 0., 0.15]])
        self.ivy_drone = RigidMobile(rel_body_points)

        # initialize scene
        if self.with_pyrep:
            self._spherical_vision_sensor.remove()
            for i in range(6):
                self._vision_sensors[i].remove()
                self._vision_sensor_bodies[i].remove()
                [ray.remove() for ray in self._vision_sensor_rays[i]]
            drone_start_pos = np.array([-1.15, -1.028, 0.6])
            target_pos = np.array([1.025, 1.125, 0.6])
            self._drone.set_position(drone_start_pos)
            self._drone.set_orientation(
                np.array([-90, 50, -180]) * np.pi / 180)
            self._target.set_position(target_pos)
            self._target.set_orientation(
                np.array([-90, 50, -180]) * np.pi / 180)
            self._default_camera.set_position(
                np.array([-3.2835, -0.88753, 1.3773]))
            self._default_camera.set_orientation(
                np.array([-151.07, 70.079, -120.45]) * np.pi / 180)

            input(
                '\nScene initialized.\n\n'
                'The simulator visualizer can be translated and rotated by clicking either the left mouse button or the wheel, '
                'and then dragging the mouse.\n'
                'Scrolling the mouse wheel zooms the view in and out.\n\n'
                'You can click on any object either in the scene or the left hand panel, '
                'then select the box icon with four arrows in the top panel of the simulator, '
                'and then drag the object around dynamically.\n'
                'Starting to drag and then holding ctrl allows you to also drag the object up and down.\n'
                'Clicking the top icon with a box and two rotating arrows similarly allows rotation of the object.\n\n'
                'Once you have aranged the scene as desired, press enter in the terminal to continue with the demo...\n'
            )

            # primitive scene
            self.setup_primitive_scene()

            # public objects
            drone_starting_inv_ext_mat = ivy.array(
                np.reshape(self._drone.get_matrix(), (3, 4)), 'float32')
            drone_start_rot_vec_pose = ivy_mech.mat_pose_to_rot_vec_pose(
                drone_starting_inv_ext_mat)
            self.drone_start_pose = drone_start_rot_vec_pose
            target_inv_ext_mat = ivy.array(
                np.reshape(self._target.get_matrix(), (3, 4)), 'float32')
            target_rot_vec_pose = ivy_mech.mat_pose_to_rot_vec_pose(
                target_inv_ext_mat)
            self.drone_target_pose = target_rot_vec_pose

            # spline path
            drone_start_to_target_poses = ivy.transpose(
                ivy.linspace(self.drone_start_pose, self.drone_target_pose,
                             100), (1, 0))
            drone_start_to_target_inv_ext_mats = ivy_mech.rot_vec_pose_to_mat_pose(
                drone_start_to_target_poses)
            drone_start_to_target_positions =\
                ivy.transpose(self.ivy_drone.sample_body(drone_start_to_target_inv_ext_mats), (1, 0, 2))
            initil_sdf_vals = ivy.reshape(
                self.sdf(
                    ivy.reshape(
                        ivy.cast(drone_start_to_target_positions, 'float32'),
                        (-1, 3))), (-1, 100, 1))
            self.update_path_visualization(drone_start_to_target_positions,
                                           initil_sdf_vals, None)

            # wait for user input
            self._user_prompt(
                '\nInitialized scene with a drone and a target position to reach.'
                '\nPress enter in the terminal to use method ivy_robot.interpolate_spline_points '
                'to plan a spline path which reaches the target whilst avoiding the objects in the scene...\n'
            )

        else:

            # primitive scene
            self.setup_primitive_scene_no_sim()

            # public objects
            self.drone_start_pose = ivy.array(
                [-1.1500, -1.0280, 0.6000, 0.7937, 1.7021, 1.7021])
            self.drone_target_pose = ivy.array(
                [1.0250, 1.1250, 0.6000, 0.7937, 1.7021, 1.7021])

            # message
            print(
                '\nInitialized dummy scene with a drone and a target position to reach.'
                '\nClose the visualization window to use method ivy_robot.interpolate_spline_points '
                'to plan a spline path which reaches the target whilst avoiding the objects in the scene...\n'
            )

            # plot scene before rotation
            if interactive:
                plt.imshow(
                    mpimg.imread(
                        os.path.join(
                            os.path.dirname(os.path.realpath(__file__)),
                            'dsp_no_sim', 'path_0.png')))
                plt.show()

        print('\nOptimizing spline path...\n')
Exemplo n.º 6
0
def main(f=None):

    # Framework Setup #
    # ----------------#

    # choose random framework

    f = choose_random_framework() if f is None else f
    set_framework(f)

    # Orientation #
    # ------------#

    # rotation representations

    # 3
    rot_vec = ivy.array([0., 1., 0.])

    # 3 x 3
    rot_mat = ivy_mech.rot_vec_to_rot_mat(rot_vec)

    # 3
    euler_angles = ivy_mech.rot_mat_to_euler(rot_mat, 'zyx')

    # 4
    quat = ivy_mech.euler_to_quaternion(euler_angles)

    # 4
    axis_and_angle = ivy_mech.quaternion_to_axis_angle(quat)

    # 3
    rot_vec_again = axis_and_angle[..., :-1] * axis_and_angle[..., -1:]

    # Pose #
    # -----#

    # pose representations

    # 3
    position = ivy.ones_like(rot_vec)

    # 6
    rot_vec_pose = ivy.concatenate((position, rot_vec), 0)

    # 3 x 4
    mat_pose = ivy_mech.rot_vec_pose_to_mat_pose(rot_vec_pose)

    # 6
    euler_pose = ivy_mech.mat_pose_to_euler_pose(mat_pose)

    # 7
    quat_pose = ivy_mech.euler_pose_to_quaternion_pose(euler_pose)

    # 6
    rot_vec_pose_again = ivy_mech.quaternion_pose_to_rot_vec_pose(quat_pose)

    # Position #
    # ---------#

    # conversions of positional representation

    # 3
    cartesian_coord = ivy.random_uniform(0., 1., (3, ))

    # 3
    polar_coord = ivy_mech.cartesian_to_polar_coords(cartesian_coord)

    # 3
    cartesian_coord_again = ivy_mech.polar_to_cartesian_coords(polar_coord)

    # cartesian co-ordinate frame-of-reference transformations

    # 3 x 4
    trans_mat = ivy.random_uniform(0., 1., (3, 4))

    # 4
    cartesian_coord_homo = ivy_mech.make_coordinates_homogeneous(
        cartesian_coord)

    # 3
    trans_cartesian_coord = ivy.matmul(
        trans_mat, ivy.expand_dims(cartesian_coord_homo, -1))[:, 0]

    # 4
    trans_cartesian_coord_homo = ivy_mech.make_coordinates_homogeneous(
        trans_cartesian_coord)

    # 4 x 4
    trans_mat_homo = ivy_mech.make_transformation_homogeneous(trans_mat)

    # 3 x 4
    inv_trans_mat = ivy.inv(trans_mat_homo)[0:3]

    # 3
    cartesian_coord_again = ivy.matmul(
        inv_trans_mat, ivy.expand_dims(trans_cartesian_coord_homo, -1))[:, 0]

    # message
    print('End of Run Through Demo!')
Exemplo n.º 7
0
def main(interactive=True, f=None):

    global INTERACTIVE
    INTERACTIVE = interactive

    # Framework Setup #
    # ----------------#

    # choose random framework

    set_framework(choose_random_framework() if f is None else f)

    # Spline Interpolation #
    # ---------------------#

    # config
    num_free_anchors = 3
    num_samples = 100
    constant_rot_vec = ivy.array([[0., 0., 0.]])
    constant_z = ivy.array([[0.]])

    # xy positions

    # 1 x 2
    start_xy = ivy.array([[0., 0.]])
    target_xy = ivy.array([[1., 1.]])

    # 1 x 2
    anchor1_xy = ivy.array([[0.6, 0.2]])
    anchor2_xy = ivy.array([[0.5, 0.5]])
    anchor3_xy = ivy.array([[0.4, 0.8]])

    # as 6DOF poses

    # 1 x 6
    start_pose = ivy.concatenate((start_xy, constant_z, constant_rot_vec), -1)
    anchor1_pose = ivy.concatenate((anchor1_xy, constant_z, constant_rot_vec),
                                   -1)
    anchor2_pose = ivy.concatenate((anchor2_xy, constant_z, constant_rot_vec),
                                   -1)
    anchor3_pose = ivy.concatenate((anchor3_xy, constant_z, constant_rot_vec),
                                   -1)
    target_pose = ivy.concatenate((target_xy, constant_z, constant_rot_vec),
                                  -1)

    num_anchors = num_free_anchors + 2

    # num_anchors x 6
    anchor_poses = ivy.concatenate(
        (start_pose, anchor1_pose, anchor2_pose, anchor3_pose, target_pose), 0)

    # uniform sampling for spline

    # num_anchors x 1
    anchor_points = ivy.expand_dims(ivy.linspace(0., 1., num_anchors), -1)

    # num_samples x 1
    query_points = ivy.expand_dims(ivy.linspace(0., 1., num_samples), -1)

    # interpolated spline poses

    # num_samples x 6
    interpolated_poses = ivy_robot.sample_spline_path(anchor_points,
                                                      anchor_poses,
                                                      query_points)

    # xy motion

    # num_samples x 2
    anchor_xy_positions = anchor_poses[..., 0:2]

    # num_samples x 2
    interpolated_xy_positions = interpolated_poses[..., 0:2]

    # show xy path
    show_2d_spline_path(anchor_xy_positions, interpolated_xy_positions,
                        [-0.095, 0.055], [0.638, 0.171], [0.544, 0.486],
                        [0.445, 0.766], [0.9, 0.9], 'x', 'y',
                        'Interpolated Drone Pose Spline in XY Plane',
                        'start point', 'target point')

    # Rigid Mobile #
    # -------------#

    # drone relative body points
    rel_body_points = ivy.array([[0., 0., 0.], [-0.1, -0.1, 0.],
                                 [-0.1, 0.1, 0.], [0.1, -0.1, 0.],
                                 [0.1, 0.1, 0.]])

    # create drone as ivy rigid mobile robot
    drone = RigidMobile(rel_body_points)

    # rotatin vectors

    # 1 x 3
    start_rot_vec = ivy.array([[0., 0., 0.]])
    target_rot_vec = ivy.array([[0., 0., np.pi]])

    # 1 x 3
    anchor1_rot_vec = ivy.array([[0., 0., np.pi / 4]])
    anchor2_rot_vec = ivy.array([[0., 0., 2 * np.pi / 4]])
    anchor3_rot_vec = ivy.array([[0., 0., 3 * np.pi / 4]])

    # as 6DOF poses

    # 1 x 6
    start_pose = ivy.concatenate((start_xy, constant_z, start_rot_vec), -1)
    anchor1_pose = ivy.concatenate((anchor1_xy, constant_z, anchor1_rot_vec),
                                   -1)
    anchor2_pose = ivy.concatenate((anchor2_xy, constant_z, anchor2_rot_vec),
                                   -1)
    anchor3_pose = ivy.concatenate((anchor3_xy, constant_z, anchor3_rot_vec),
                                   -1)
    target_pose = ivy.concatenate((target_xy, constant_z, target_rot_vec), -1)

    # num_anchors x 6
    anchor_poses = ivy.concatenate(
        (start_pose, anchor1_pose, anchor2_pose, anchor3_pose, target_pose), 0)

    # interpolated spline poses

    # num_samples x 6
    interpolated_poses = ivy_robot.sample_spline_path(anchor_points,
                                                      anchor_poses,
                                                      query_points)

    # as matrices

    # num_anchors x 3 x 4
    anchor_matrices = ivy_mech.rot_vec_pose_to_mat_pose(anchor_poses)

    # num_samples x 3 x 4
    interpolated_matrices = ivy_mech.rot_vec_pose_to_mat_pose(
        interpolated_poses)

    # sample drone body

    # num_anchors x num_body_points x 3
    anchor_body_points = drone.sample_body(anchor_matrices)

    # num_samples x num_body_points x 3
    interpolated_body_points = drone.sample_body(interpolated_matrices)

    # show
    show_full_spline_path(anchor_body_points, interpolated_body_points,
                          [-0.168, 0.19], [0.88, 0.73], 'x', 'y',
                          'Sampled Drone Body Positions in XY Plane',
                          'start points', 'target points', False)

    # Manipulator #
    # ------------#

    class SimpleManipulator(Manipulator):

        # noinspection PyShadowingNames
        def __init__(self, base_inv_ext_mat=None):
            a_s = ivy.array([0.5, 0.5])
            d_s = ivy.array([0., 0.])
            alpha_s = ivy.array([0., 0.])
            dh_joint_scales = ivy.ones((2, ))
            dh_joint_offsets = ivy.array([-np.pi / 2, 0.])
            super().__init__(a_s, d_s, alpha_s, dh_joint_scales,
                             dh_joint_offsets, base_inv_ext_mat)

    # create manipulator as ivy manipulator
    manipulator = SimpleManipulator()

    # joint angles

    # 1 x 2
    start_joint_angles = ivy.array([[0., 0.]])
    target_joint_angles = ivy.array([[-np.pi / 4, -np.pi / 4]])

    # 1 x 2
    anchor1_joint_angles = -ivy.array([[0.2, 0.6]]) * np.pi / 4
    anchor2_joint_angles = -ivy.array([[0.5, 0.5]]) * np.pi / 4
    anchor3_joint_angles = -ivy.array([[0.8, 0.4]]) * np.pi / 4

    # num_anchors x 2
    anchor_joint_angles = ivy.concatenate(
        (start_joint_angles, anchor1_joint_angles, anchor2_joint_angles,
         anchor3_joint_angles, target_joint_angles), 0)

    # interpolated joint angles

    # num_anchors x 2
    interpolated_joint_angles = ivy_robot.sample_spline_path(
        anchor_points, anchor_joint_angles, query_points)

    # sample links

    # num_anchors x num_link_points x 3
    anchor_link_points = manipulator.sample_links(anchor_joint_angles,
                                                  samples_per_metre=5)

    # num_anchors x num_link_points x 3
    interpolated_link_points = manipulator.sample_links(
        interpolated_joint_angles, samples_per_metre=5)

    # show
    show_2d_spline_path(anchor_joint_angles, interpolated_joint_angles,
                        [-0.222, -0.015], [-0.147, -0.52], [-0.38, -0.366],
                        [-0.64, -0.263], [-0.752, -0.79], r'$\theta_1$',
                        r'$\theta_2$',
                        'Interpolated Manipulator Joint Angles Spline',
                        'start angles', 'target angles')
    show_full_spline_path(anchor_link_points, interpolated_link_points,
                          [0.026, 0.8], [0.542, 0.26], 'x', 'y',
                          'Sampled Manipulator Links in XY Plane',
                          'start config', 'target config', True)