def __init__(self, right_arm):
        # Make sure there is a clean shutdown
        rospy.on_shutdown(self.clean_shutdown)

        # control parameters
        self._rate = 500.0  # Hz

        self._right_arm = right_arm

        self._right_joint_names = self._right_arm.joint_names()

        self.set_neutral()

        #  end-effector initial pose
        eef_pose = self._right_arm.endpoint_pose()
        self.start_eef_pos = np.array([eef_pose['position'].x,
                                       eef_pose['position'].y,
                                       eef_pose['position'].z,
                                       ])
        self.start_eef_quat = np.array([eef_pose['orientation'].x,
                                        eef_pose['orientation'].y,
                                        eef_pose['orientation'].z,
                                        eef_pose['orientation'].w, ])

        self.base_rot_in_eef = T.quat2mat(self.start_eef_quat).T

        self.start_eef_pos = self.calibrate_initial_position()

        # Eef velocity control
        self.controller = SawyerEEFVelocityController()

        # Sin-cos control
        self.period_factor = 0.15
        self.amplitude_factor = 0.1
Esempio n. 2
0
    def get_control(self, action, safety_max = 0.1):
        current_joint_angles = [self._right_arm.joint_angles()['right_j{}'.format(i)] for i in range(7)]

        z_vel = self.get_right_hand_pos()[2] - self.start_eef_pos[2]
        action = np.concatenate([action, [2 * z_vel]])

        # get x,y,z velocity in base frame
        action_in_base = self.base_rot_in_eef.dot(action)
        action_in_base = np.clip(action_in_base,-safety_max,safety_max)

        # Correct for any change in orientation using a proportional controller
        current_right_hand_quat = self.get_right_hand_quat()
        reference_right_hand_quat = self.start_eef_quat
        orn_diff = T.quat_multiply(reference_right_hand_quat, T.quat_inverse(current_right_hand_quat))

        orn_diff_mat = T.quat2mat(orn_diff)
        orn_diff_twice = orn_diff_mat.dot(orn_diff_mat)

        # Construct pose matrix
        pose_matrix = np.zeros((4, 4))
        pose_matrix[:3, :3] = orn_diff_twice
        pose_matrix[:3, 3] = action_in_base
        pose_matrix[3, 3] = 1

        # pose_matrix = T.pose2mat((action_in_base, orn_diff))
        joint_velocities = self.controller.compute_joint_velocities_for_endpoint_velocity(pose_matrix,
                                                                                          current_joint_angles)
        j_v = {}
        for i in range(7):
            key = 'right_j{}'.format(i)
            j_v[key] = joint_velocities[i]
        return j_v
    def get_control(self, action, max_action = 0.1, safety_max = 0.15):
        '''Convert the action of joint velocity commands.
                Use inverse kinematics to go from end effector cartesian velocities to joint velocities.'''
        action = max_action * action
        current_joint_angles = [self._right_arm.joint_angles()['right_j{}'.format(i)] for i in range(7)]

        # get x,y,z velocity in base frame
        action_in_base = self.base_rot_in_eef.dot(action)
        action_in_base = np.clip(action_in_base, -safety_max, safety_max)

        # Correct for any change in orientation using a proportional controller
        current_right_hand_quat = self.get_right_hand_quat()
        reference_right_hand_quat = self.start_eef_quat
        orn_diff = T.quat_multiply(reference_right_hand_quat, T.quat_inverse(current_right_hand_quat))

        orn_diff_mat = T.quat2mat(orn_diff)
        orn_diff_twice = orn_diff_mat.dot(orn_diff_mat)

        # Construct pose matrix
        pose_matrix = np.zeros((4, 4))
        pose_matrix[:3, :3] = orn_diff_twice
        pose_matrix[:3, 3] = action_in_base
        pose_matrix[3, 3] = 1

        # pose_matrix = T.pose2mat((action_in_base, orn_diff))
        joint_velocities = self.controller.compute_joint_velocities_for_endpoint_velocity(pose_matrix,
                                                                                          current_joint_angles)
        j_v = {}
        for i in range(7):
            key = 'right_j{}'.format(i)
            j_v[key] = joint_velocities[i]
        return j_v
Esempio n. 4
0
    def __init__(self, ):
        rospy.on_shutdown(self.clean_shutdown)

        self._rate = 500.0
        self._right_arm = intera_interface.limb.Limb("right")
        self._right_joint_names = self._right_arm.joint_names()

        print("Getting robot state... ")
        self._rs = intera_interface.RobotEnable(CHECK_VERSION)
        self._init_state = self._rs.state().enabled
        print("Enabling robot... ")
        self._rs.enable()

        self.controller = SawyerEEFVelocityController()

        self._right_arm.move_to_neutral(speed=0.1)

        # Getting the base rotation in eef for coordinate frame transformations
        eef_pose_in_base = self._right_arm.endpoint_pose()
        self.eef_quat_in_base = np.array([
            eef_pose_in_base['orientation'].x,
            eef_pose_in_base['orientation'].y,
            eef_pose_in_base['orientation'].z,
            eef_pose_in_base['orientation'].w,
        ])

        self.eef_rot_in_base = T.quat2mat(self.eef_quat_in_base)
        self.base_rot_in_eef = self.eef_rot_in_base.T
Esempio n. 5
0
    def grab_data(self):
        '''Returns the object observations in the end-effector goal frame (appart from z-angle, which is around the table normal (or goal frame)'''

        # Grab raw data
        object_pose_in_cam, time_cam = self.grab_data_raw()

        # Compute velocities
        object_vel_in_cam, lost = self.compute_velocities(
            object_pose_in_cam[0], time_cam)

        if (lost):
            # If the tracking lost the object, recover slightly by using the previous velocity in order to predict the current position
            object_pose_in_cam = list(object_pose_in_cam)
            object_pose_in_cam[0] = object_pose_in_cam[
                0] + object_vel_in_cam / self.publication_rate

        # Convert data to the correct frame
        object_posemat_in_eefg = T.pose_in_A_to_pose_in_B(
            T.pose2mat(object_pose_in_cam), self.cam_posemat_in_eefg)
        object_pos_in_eefg = object_posemat_in_eefg[:3, 3]
        object_vel_in_eefg = self.cam_posemat_in_eefg[:3, :3].dot(
            object_vel_in_cam)

        object_rot_in_camera = T.quat2mat(object_pose_in_cam[1])
        object_rot_in_goal = self.cam_rot_in_goal.dot(object_rot_in_camera)
        object_euler_in_goal = T.mat2euler(object_rot_in_goal)
        z_angle = np.array([object_euler_in_goal[2]])

        return object_pos_in_eefg, object_vel_in_eefg, z_angle, lost
    def __init__(self, right_arm, reset_goal_pos=False, predefined_goal=None):
        '''
        Creates observations for the Reaching task
        :param right_arm: The intera link object to control the robot.
        :param reset_goal_pos: Whether to reset any pre-existing goal position
        :param predefined_goal:  Use one of the hard coded goal positions to avoid placing the end effector at the goal by hand.
        '''
        self._right_arm = right_arm
        self._joint_names = self._right_arm.joint_names()

        self.previous_joint_vels_array = np.zeros((7, ))

        self._right_arm.move_to_neutral(speed=0.1)

        # Getting the base rotation in eef for coordinate frame transformations
        eef_pose_in_base = self._right_arm.endpoint_pose()
        self.eef_quat_in_base = np.array([
            eef_pose_in_base['orientation'].x,
            eef_pose_in_base['orientation'].y,
            eef_pose_in_base['orientation'].z,
            eef_pose_in_base['orientation'].w,
        ])

        self.eef_rot_in_base = T.quat2mat(self.eef_quat_in_base)
        self.base_rot_in_eef = self.eef_rot_in_base.T

        self.controller = SawyerEEFVelocityController()

        # Predifined, hard-coded, goal positions for fast switching between pre-defined goals.
        #  These vectors correspond to the different positions of eefg in the base frame.
        self.predefined_goals = {
            'goal_1':
            np.array([
                0.44889998342216186, 0.15767033384085796, -0.07223470331644867
            ]),
            'goal_2':
            np.array(
                [0.4064366403627939, 0.2151227875993398,
                 -0.07152062349980176]),
            'goal_3':
            np.array(
                [0.5353834307301527, 0.0993579385905306, -0.072644653035373])
        }

        if (predefined_goal is None):
            self.goal_pos_in_base = self.record_goal_pos(reset_goal_pos)
        elif predefined_goal in self.predefined_goals:
            self.set_predefined_goal(predefined_goal)
        else:
            raise NotImplementedError()
Esempio n. 7
0
    def _finish_initialisation(self):
        '''
        Find the goal position at initialisation and then keep it constant until reset of the observations.
        '''
        rospy.loginfo('Initialising observations ...')

        while True:
            # If a goal position is already in the ROS parameters, there is nothing to be done
            if (rospy.has_param('{}/pushing/goal_position'.format(
                    self.camera))):
                break

            # Until enough individual goal positions have been recorded, do nothing
            if len(self.goal_positions_in_cam) > self.calibration_samples:
                break

        self.calibration_lock.acquire()
        try:
            # Get the mean position and orientation of the goal in cam1 and cam2
            goal_positions_in_cam = np.stack(self.goal_positions_in_cam,
                                             axis=0)
            goal_position_in_cam = np.mean(goal_positions_in_cam, axis=0)

            goal_orientations_in_cam = np.stack(self.goal_orientations_in_cam,
                                                axis=0)
            goal_orientation_in_cam = np.mean(goal_orientations_in_cam, axis=0)

            rospy.set_param('{}/pushing/goal_position'.format(self.camera),
                            goal_position_in_cam.tolist())
            rospy.set_param('{}/pushing/goal_orientation'.format(self.camera),
                            goal_orientation_in_cam.tolist())

            goal_pose_in_cam = (goal_position_in_cam, goal_orientation_in_cam)
            goal_rot_in_cam = T.quat2mat(goal_orientation_in_cam)

            self.cam_rot_in_goal = goal_rot_in_cam.T
            goal_posemat_in_cam = T.pose2mat(goal_pose_in_cam)
            cam_posemat_in_goal = T.pose_inv(goal_posemat_in_cam)
            self.cam_posemat_in_eefg = T.pose_in_A_to_pose_in_B(
                cam_posemat_in_goal, self.goal_posemat_in_eefg)

            rospy.loginfo('Done')
            self.operational = True

        finally:
            self.calibration_lock.release()
    def __init__(self):
        # Make sure there is a clean shutdown
        rospy.on_shutdown(self.clean_shutdown)
        # Setting up Sawyer
        self._right_arm = intera_interface.limb.Limb("right")
        self._right_joint_names = self._right_arm.joint_names()

        print("Getting robot state... ")
        self._rs = intera_interface.RobotEnable(CHECK_VERSION)
        self._init_state = self._rs.state().enabled
        print("Enabling robot... ")
        self._rs.enable()

        self.set_neutral()

        #  end-effector initial pose
        self.eef_pose = self._right_arm.endpoint_pose()
        self.eef_quat = np.array([
            self.eef_pose['orientation'].x,
            self.eef_pose['orientation'].y,
            self.eef_pose['orientation'].z,
            self.eef_pose['orientation'].w,
        ])

        self.base_rot_in_eef = T.quat2mat(self.eef_quat).T

        # control parameters
        self._rate = 500.0  # Hz

        # Eef velocity control
        self.controller = SawyerEEFVelocityController()

        #Policy parameters
        self.policy_type = 'joint'
        self.line_policy = np.array([0., 0., 0.])
        self.period_factor = 0.15
        self.amplitude_factor = 0.1
Esempio n. 9
0
    def __init__(self):
        # Make sure there is a clean shutdown
        rospy.on_shutdown(self.clean_shutdown)

        # control parameters
        self._rate = 500.0  # Hz

        # Setting up the observation subscriber
        self.characterisation_observation = None
        self.observation = None

        self.characterisation_observation_subscriber = rospy.Subscriber('characterisation_observations',
                                                                        pushing_characterisation_observations,
                                                                        self.characterisation_observation_callback,
                                                                        queue_size=1)
        self.observation_subscriber = rospy.Subscriber('observations', numpy_msg(Floats64),
                                                       self.observation_callback,
                                                       queue_size=1)


        while True:
            if (self.characterisation_observation is not None and self.observation is not None):
                break


        # Setting up Sawyer
        self._right_arm = intera_interface.limb.Limb("right")
        self._right_joint_names = self._right_arm.joint_names()

        print("Getting robot state... ")
        self._rs = intera_interface.RobotEnable(CHECK_VERSION)
        self._init_state = self._rs.state().enabled
        print("Enabling robot... ")
        self._rs.enable()

        self.set_neutral()

        #  end-effector initial pose
        eef_pose = self._right_arm.endpoint_pose()
        self.start_eef_pos = np.array([eef_pose['position'].x,
                                       eef_pose['position'].y,
                                       eef_pose['position'].z,
                                       ])
        self.start_eef_quat = np.array([eef_pose['orientation'].x,
                                        eef_pose['orientation'].y,
                                        eef_pose['orientation'].z,
                                        eef_pose['orientation'].w, ])

        self.base_rot_in_eef = T.quat2mat(self.start_eef_quat).T

        if (not rospy.has_param('pushing/start_position')):
            self.start_eef_pos, self.start_eef_quat = self.calibrate_initial_position()
        else:
            self.start_eef_pos = np.array(rospy.get_param('pushing/start_position'))
            self.start_eef_quat = np.array(rospy.get_param('pushing/start_orientation'))

        # Eef velocity control
        self.controller = SawyerEEFVelocityController()

        # Sin-cos control
        self.period_factor = 0.15
        self.amplitude_factor = 0.1
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        "-p",
        "--log_path",
        type=str,
        default='../logs/internal_dynamics_calibration/trajectories.pckl',
        help="Path where the trajectories produced are stored")
    args = parser.parse_args(rospy.myargv()[1:])
    log_path = args.log_path

    rospy.init_node('calibrate_internal_dynamics', anonymous=True)

    # Controller
    controller = Control()

    done = False
    print('Ready. Press any key to start.')
    while not done and not rospy.is_shutdown():
        c = intera_external_devices.getch()
        if c:
            done = True

    # Set control rate
    rate = rospy.Rate(10)

    trajectories_gathered = []

    # Time and iteration count
    for policy_id, policy_params in enumerate(_POLICIES):
        if (rospy.is_shutdown()):
            break

        trajectories_gathered.append([])

        controller.set_neutral()
        time.sleep(2)

        controller.set_policy_type(policy_params['policy_type'])
        controller.set_period_amplitude(policy_params['period'],
                                        policy_params['amplitude'])
        controller.set_line_policy(policy_params['line_policy'])

        steps = policy_params['steps']
        start = rospy.Time.now()
        for i in range(steps):
            if (rospy.is_shutdown()):
                break
            elapsed = rospy.Time.now() - start

            # Get observation
            eef_pose_in_base = controller._right_arm.endpoint_pose()
            eef_pos_in_base = np.array([
                eef_pose_in_base['position'].x,
                eef_pose_in_base['position'].y,
                eef_pose_in_base['position'].z,
            ])

            eef_quat_in_base = np.array([
                eef_pose_in_base['orientation'].x,
                eef_pose_in_base['orientation'].y,
                eef_pose_in_base['orientation'].z,
                eef_pose_in_base['orientation'].w,
            ])

            eef_rot_in_base = T.quat2mat(eef_quat_in_base)

            eef_vel_in_eef = controller._right_arm.endpoint_velocity()
            eef_vel_in_eef = np.array([
                eef_vel_in_eef['linear'].x,
                eef_vel_in_eef['linear'].y,
                eef_vel_in_eef['linear'].z,
            ])
            eef_vel_in_base = eef_rot_in_base.dot(eef_vel_in_eef)

            obs_joint_pos = controller._right_arm.joint_angles()
            obs_joint_pos = np.array([
                obs_joint_pos['right_j0'], obs_joint_pos['right_j1'],
                obs_joint_pos['right_j2'], obs_joint_pos['right_j3'],
                obs_joint_pos['right_j4'], obs_joint_pos['right_j5'],
                obs_joint_pos['right_j6']
            ])
            obs_joint_vels = controller._right_arm.joint_velocities()
            obs_joint_vels = np.array([
                obs_joint_vels['right_j0'],
                obs_joint_vels['right_j1'],
                obs_joint_vels['right_j2'],
                obs_joint_vels['right_j3'],
                obs_joint_vels['right_j4'],
                obs_joint_vels['right_j5'],
                obs_joint_vels['right_j6'],
            ])

            # Find action
            action = controller.get_policy(elapsed.to_sec(),
                                           theshold_time=steps * (5. / 60.))

            # Get control
            if (controller.policy_type == 'joint'):
                joint_vels = dict([('right_j{}'.format(idx), action[idx])
                                   for idx in range(7)])
            else:
                joint_vels = controller.get_control(action)
            # Send contgrol to robot
            controller._right_arm.set_joint_velocities(joint_vels)

            # Record
            trajectories_gathered[-1].append([
                eef_pos_in_base, eef_vel_in_base, obs_joint_pos, obs_joint_vels
            ])

            rate.sleep()

    #pickle results
    log_path = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                            log_path)
    pickle.dump(trajectories_gathered, open(os.path.abspath(log_path), 'wb'))

    rospy.signal_shutdown('Done')