コード例 #1
0
    def get_grasp_plan(self, object_name, marker_num=None):
        """ The grasp plan for the AR tracker is simply based on a fixed offset for each object
        Params
        ---
        object_name: The name of the object to be grasped. e.g. cup
        marker_num: the associated marker for the object. If you would like
        to override the parameter put in the dictionary
        Returns
        ---
        A tuple containing pregrasp pose and grasp pose
        """
        if object_name not in self.object_dict and not marker_num:
            raise ValueError(
                "no marker assigned for marker {}".format(object_name))
        if marker_num:
            self.object_dict[object_name][0] = marker_num
        ar_frame = "/ar_marker_{}".format(self.object_dict[object_name][0])
        world_frame = "/world"

        self.listener.waitForTransform(ar_frame, world_frame, rospy.Time(0),
                                       rospy.Duration(4.0))
        try:
            (trans,
             rot) = self.listener.lookupTransform(world_frame, ar_frame,
                                                  rospy.Time(0))
            #break
        except (tf.LookupException, tf.ConnectivityException,
                tf.ExtrapolationException):
            rospy.logerr(
                "Did not get for AR tracker tf frame {}".format(ar_frame))

        if object_name == "target":
            hard_code_pose = Pose.load(
                "grasp_hardcode")  # opposite_grasp for non-flipping
        else:
            hard_code_pose = Pose.load(
                "opposite_grasp")  # opposite_grasp for non-flipping
        print "z: ", hard_code_pose.position.z

        rospy.loginfo("AR tracker translation: {}, rotation: {}".format(
            trans, rot))
        rospy.loginfo("Hardcoded pose {}".format(hard_code_pose))

        cup_position = np.asarray(
            [trans[0], trans[1], trans[2] + self.object_dict[object_name][1]])
        #cup_position = np.asarray([trans[0], trans[1], (hard_code_pose.position.z+self.object_dict[object_name][1])])

        grasp_pose = Pose(cup_position, hard_code_pose.orientation)
        rospy.loginfo("found tracker positon {}".format(grasp_pose))

        pre_grasp_pose = self._offset_hand(grasp_pose, offset_dist=0.1)
        return pre_grasp_pose, grasp_pose
コード例 #2
0
def test_trajopt(pose_input):
    moveit_commander.roscpp_initialize(sys.argv)

    jaco = JacoInterface()

    start_pose = jaco.arm_group.get_current_pose()

    if isinstance(pose_input, basestring):
        pose = Pose.load(pose_input)
    elif isinstance(pose_input, Pose):
        pose = pose_input
    else:
        raise ValueError("plan only supports pose objects")
    p = geometry_msgs.msg.PoseStamped()
    p.header.frame_id = "world"
    p.pose = pose.ros_message

    traj = jaco.plan(start_pose, p)

    jaco.execute(traj)

    markers = jaco.planner.get_body_markers()
    for m in markers:
        jaco.marker_array_pub.publish(m)

    moveit_commander.roscpp_shutdown()
コード例 #3
0
    def plan_config(self, config):
        """ find a plan to the given robot configuration goal (joint goal)
        params
        ---
        config: The config of the robot (7,) numpy array
        returns
        --
        nothing
        """
        self.group.clear_pose_targets()
        # self.group.forget_joint_values()
        # self.group.get_current_joint_values
        # self.group._g.set_joint_value_target_from_joint_state_message()

        # header = std_msgs.msg.Header()
        # robot_state = moveit_msgs.msg.RobotState()
        # robot_state.joint_state.name = ['j2s7s300_joint_{}'.format(i) for i in range(1,8)]
        # robot_state.joint_state.position = self.group.get_current_joint_values()
        # rospy.loginfo("joint state message {}".format(robot_state.joint_state))
        # self.group._g.set_joint_value_target_from_joint_state_message(robot_state.joint_state)

        try:
            self.group.set_joint_value_target(config)
        except (MoveItCommanderException):
            rospy.loginfo("Moveit Exception caught")

        # bla = self.group.get_joints()
        # rospy.loginfo("joints: {}".format(bla))

        # print("joints,", str(bla))
        # rospy.loginfo("active joints {}".format(self.group.get_variable_count()))

        # joint_target = {}
        # for i in range(1,8):
        #     joint_target["j2s7s300_joint_{}".format(i)] = 0
        # print("joint_target",joint_target)

        # self.group.set_joint_value_target(joint_target)

        # rospy.loginfo("Planning to {}".format(pose))
        self.group.set_start_state_to_current_state()
        rospy.sleep(1)
        self.solved_plan = self.group.plan(
        )  #automatically displays trajectory
        if self.solved_plan is None:
            rospy.logerr("Plan not solved")
            return None

        cur_joints = list(
            self.solved_plan.joint_trajectory.points[-1].positions)
        posefk = self.fk(cur_joints).pose_stamped[0].pose
        final_pose_res = Pose(
            [posefk.position.x, posefk.position.y, posefk.position.z], [
                posefk.orientation.x, posefk.orientation.y,
                posefk.orientation.z, posefk.orientation.w
            ])
        rospy.loginfo("passed to function end!")
        return final_pose_res
コード例 #4
0
 def current_pose(self):
     cur_pose = self.group.get_current_pose(self.eef_name)
     position = [
         cur_pose.pose.position.x, cur_pose.pose.position.y,
         cur_pose.pose.position.z
     ]
     orientation = [
         cur_pose.pose.orientation.x, cur_pose.pose.orientation.y,
         cur_pose.pose.orientation.z, cur_pose.pose.orientation.w
     ]
     pose = Pose(position, orientation)
     return pose
コード例 #5
0
    def plan_ik(self, pose_input):
        """ find a plan to the position
        Params
        ---
        pose_input: The pose to plan to. `string` or `Pose`
        Returns
        ---
        1 if plan succeeded 0 otherwise
        """
        if isinstance(pose_input, basestring):
            pose = Pose.load(pose_input)
        elif isinstance(pose_input, Pose):
            pose = pose_input
        else:
            raise ValueError("plan only supports pose objects")

        # self.group.clear_pose_targets()

        # self.group.set_pose_target(pose.ros_message)

        #debugging ik
        p = geometry_msgs.msg.PoseStamped()
        p.header.frame_id = "world"
        p.pose = pose.ros_message
        ik_config = self.ik(p)
        rospy.loginfo("IK Config: {}".format(ik_config))
        joints = list(ik_config.solution.joint_state.position[0:7])

        # current_joints = self.group.get_current_joint_values()
        # rospy.loginfo("current joints: {}".format(current_joints))
        # rospy.loginfo("set joing arguments: {}".format(joints))

        # header = std_msgs.msg.Header()
        # robot_state = moveit_msgs.msg.RobotState()
        # robot_state.joint_state.name = ['j2s7s300_joint_{}'.format(i) for i in range(1,8)]
        # robot_state.joint_state.position = joints
        # self.group.set_joint_value_target(robot_state.joint_state)

        rospy.loginfo("ik joints:{}".format(joints))
        final_pose_res = self.plan_config(joints)

        # rospy.loginfo("Planning to {}".format(pose))
        # self.group.set_start_state_to_current_state()
        # rospy.sleep(1)
        # self.solved_plan = self.group.plan() #automatically displays trajectory
        # if self.solved_plan is None:
        #     rospy.logerr("Plan not solved")
        #     return None

        # cur_joints = list(self.solved_plan.joint_trajectory.points[-1].positions)
        # posefk = self.fk(cur_joints).pose_stamped[0].pose
        # final_pose_res = Pose([posefk.position.x, posefk.position.y, posefk.position.z],[posefk.orientation.x, posefk.orientation.y, posefk.orientation.z, posefk.orientation.w])
        return pose, final_pose_res
コード例 #6
0
    def plan(self, pose_input):
        """ find a plan to the position using trajopt
        Params
        ---
        pose_input: The pose to plan to. `string` or `Pose`
        Returns
        ---
        1 if plan succeeded 0 otherwise
        """
        if isinstance(pose_input, basestring):
            pose = Pose.load(pose_input)
        elif isinstance(pose_input, Pose):
            pose = pose_input
        else:
            raise ValueError("plan only supports pose objects")

        start_pose = self.jaco.arm_group.get_current_pose()
        p = geometry_msgs.msg.PoseStamped()
        p.header.frame_id = "world"
        p.pose = pose.ros_message

        self.solved_plan = self.jaco.plan(start_pose, p)

        # markers = jaco.planner.get_body_markers()
        # for m in markers:
        #     jaco.marker_array_pub.publish(m)
        if self.solved_plan is None:
            rospy.logerr("Plan not solved")
            return None

        cur_joints = list(
            self.solved_plan.joint_trajectory.points[-1].positions)
        posefk = self.fk(cur_joints).pose_stamped[0].pose
        final_pose_res = Pose(
            [posefk.position.x, posefk.position.y, posefk.position.z], [
                posefk.orientation.x, posefk.orientation.y,
                posefk.orientation.z, posefk.orientation.w
            ])
        return pose, final_pose_res
コード例 #7
0
def offset_hand(pose_input, unit_vector=[1, 0, 0], offset_dist=0.1):
    """ Find the pre grasp pose by offsetting the hand backwards from its current position
    grasp_pose: the pose of the grasp location
    offset_dist: the amount to offset off the object
    Returns
    ---
    pre_grasp_pose: the offsetted pose of the object
    """
    #use the unit z vector because thats the direction out of the hand
    quat = pose_input.orientation[0:4]

    quat_inv = tf.transformations.quaternion_inverse(quat)
    direction = spacial_location.qv_mult(quat, unit_vector)
    pre_grasp_position = pose_input.position - direction * offset_dist

    pre_grasp_pose = Pose(pre_grasp_position, pose_input.orientation)

    pose_input.show_position_marker(ident=1, label="grasp pose")
    pre_grasp_pose.show_position_marker(ident=2, label="pregrasp pose")
    # import pdb; pdb.set_trace()

    return pre_grasp_pose
コード例 #8
0
    def plan(self, pose_input):
        """ find a plan to the position
        Params
        ---
        pose_input: The pose to plan to. `string` or `Pose`
        Returns
        ---
        1 if plan succeeded 0 otherwise
        """
        if isinstance(pose_input, basestring):
            pose = Pose.load(pose_input)
        elif isinstance(pose_input, Pose):
            pose = pose_input
        else:
            raise ValueError("plan only supports pose objects")

        self.group.clear_pose_targets()

        self.group.set_pose_target(pose.ros_message)
        rospy.loginfo("Planning to {}".format(pose))
        self.group.set_start_state_to_current_state()
        rospy.sleep(1)
        self.solved_plan = self.group.plan(
        )  #automatically displays trajectory
        if self.solved_plan is None:
            rospy.logerr("Plan not solved")
            return None

        print("Points len ", len(self.solved_plan.joint_trajectory.points))
        cur_joints = list(
            self.solved_plan.joint_trajectory.points[-1].positions)
        posefk = self.fk(cur_joints).pose_stamped[0].pose
        final_pose_res = Pose(
            [posefk.position.x, posefk.position.y, posefk.position.z], [
                posefk.orientation.x, posefk.orientation.y,
                posefk.orientation.z, posefk.orientation.w
            ])
        return pose, final_pose_res
コード例 #9
0
    def _offset_hand(self, grasp_pose, offset_dist=0.1):
        """ Find the pre grasp pose by offsetting the hand backwards from its current position
        grasp_pose: the pose of the grasp location
        offset_dist: the amount to offset off the object
        Returns
        ---
        pre_grasp_pose: the offsetted pose of the object
        """
        #use the unit z vector because thats the direction out of the hand
        unit_z_vector = np.asarray([0, 0, 1])
        direction = spacial_location.qv_mult(grasp_pose.orientation[0:4],
                                             unit_z_vector)
        #print "direction", direction
        print "grasp_pose.position: ", grasp_pose.position

        grasp_pose.show_position_marker(ident=1, label="grasp pose")

        pre_grasp_position = grasp_pose.position - direction * offset_dist

        pre_grasp_pose = Pose(pre_grasp_position, grasp_pose.orientation)
        pre_grasp_pose.show_position_marker(ident=2, label="pregrasp pose")

        return pre_grasp_pose
コード例 #10
0
def test_offset_hand():
    rospy.sleep(5)
    grasp_pose = Pose([0.5, 0.5, 0.5], [0, 0, 0, 1])
    planner = ARTrackPlanner()
    planner._offset_hand(grasp_pose)
コード例 #11
0
class GQCNNPlanner(GraspPlanner):
    """ pass an image to the GQCNN
    """
    def __init__(self, camera_intrinsics, config):
        # wait for Grasp Planning Service and create Service Proxy
        rospy.loginfo("Waiting for GQCNN to spin up")
        rospy.wait_for_service('plan_gqcnn_grasp')
        self.plan_grasp = rospy.ServiceProxy('plan_gqcnn_grasp',
                                             GQCNNGraspPlanner)
        rospy.loginfo("GQCNN service Initialized")

        self.config = config
        # get camera intrinsics
        self.camera_intrinsics = camera_intrinsics
        self.listener = tf.TransformListener()

    def _bbox_to_msg(self, bbox):
        """
        Params
        ---
        bbox: numpy array [minX, minY, maxX, maxY] in pixels around the image 
        Returns
        ---
        a bondingBox message type
        """
        boundingBox = BoundingBox()
        boundingBox.minX = bbox[0]
        boundingBox.minY = bbox[1]
        boundingBox.maxX = bbox[2]
        boundingBox.maxY = bbox[3]
        return boundingBox

    def _get_new_pose(self, pose_stamped, target_frame="/world"):
        """ Transform a pose from its frame to the desired frame
        Params
        ----
        pose_stamped: A ros stamped pose with its reference frame specified
        target_frame: The desired frame of the robot
        Returns
        ----
        pose_world_stamped: The pose in the world frame
        """
        self.listener.waitForTransform(target_frame,
                                       self.camera_intrinsics.frame,
                                       rospy.Time(0), rospy.Duration(4.0))
        try:
            pose_world_stamped = self.listener.transformPose(
                target_frame, pose_stamped)
        except (tf.LookupException, tf.ConnectivityException,
                tf.ExtrapolationException):
            rospy.logerr(
                "tf was not able to find a transformation between the world frame and the grasp point"
            )
            return False

        return pose_world_stamped

    def _get_transformation(self, from_frame, to_frame="world"):
        target_frame = to_frame
        source_frame = from_frame
        self.listener.waitForTransform(target_frame, source_frame,
                                       rospy.Time(0), rospy.Duration(4.0))
        try:
            pos, quat_out = self.listener.lookupTransform(
                target_frame, source_frame, rospy.Time(0))
        except (tf.LookupException, tf.ConnectivityException,
                tf.ExtrapolationException):
            rospy.logerr(
                "tf was not able to find a transformation from {} to {}".
                format(from_frame, to_frame))
            return False
        #import pdb; pdb.set_trace()
        quat = [
            quat_out[3], quat_out[0], quat_out[1], quat_out[2]
        ]  #the quaternion of ros has a different representation than other sys
        return RigidTransform(quat, pos, from_frame, to_frame)

    def get_grasp_plan(self, bounding_box, color_image, depth_image):
        """ finds the highest quality score grasp associated with the object

        """
        #grab frames for depth image and color image
        rospy.loginfo("grabbing frames")

        boundingBox = self._bbox_to_msg(bounding_box)

        pyplot.figure()
        pyplot.subplot(2, 2, 1)
        pyplot.title("color image")
        pyplot.imshow(color_image.data)

        pyplot.subplot(2, 2, 2)
        pyplot.title("depth image")
        pyplot.imshow(depth_image.data)

        # inpaint to remove holes
        inpainted_color_image = color_image.inpaint(
            self.config["inpaint_rescale_factor"]
        )  #TODO make rescale factor in config
        inpainted_depth_image = depth_image.inpaint(
            self.config["inpaint_rescale_factor"])

        pyplot.subplot(2, 2, 3)
        pyplot.title("inpaint color")
        pyplot.imshow(inpainted_color_image.data)

        pyplot.subplot(2, 2, 4)
        pyplot.title("inpaint depth")
        pyplot.imshow(inpainted_depth_image.data)
        pyplot.show()

        try:
            rospy.loginfo("Sending grasp plan request to gqcnn server")
            planned_grasp_data = self.plan_grasp(inpainted_color_image.rosmsg,
                                                 inpainted_depth_image.rosmsg,
                                                 self.camera_intrinsics.rosmsg,
                                                 boundingBox)

            planned_grasp_pose_msg = planned_grasp_data.grasp.pose
            grasp_succes_prob = planned_grasp_data.grasp.grasp_success_prob
            grasp = planned_grasp_data.grasp

            rospy.loginfo("Grasp service request response: {}".format(
                planned_grasp_data))

        except rospy.ServiceException, e:
            rospy.logerr("Service call failed: \n %s" % e)

        rotation_quaternion = np.asarray([
            grasp.pose.orientation.w, grasp.pose.orientation.x,
            grasp.pose.orientation.y, grasp.pose.orientation.z
        ])
        translation = np.asarray([
            grasp.pose.position.x, grasp.pose.position.y, grasp.pose.position.z
        ])
        T_grasp_camera = RigidTransform(rotation_quaternion, translation,
                                        "grasp", self.camera_intrinsics.frame)

        T_world_camera = self._get_transformation(
            self.camera_intrinsics.frame
        )  #TODO this should be a rigidbody.getfromtransform call

        rotation = np.array([[0, 0, 1], [0, 1, 0], [-1, 0, 0]])
        translation = np.array([0, 0, 0])
        T_grasp_gripper = RigidTransform(rotation,
                                         translation,
                                         from_frame="gripper",
                                         to_frame="grasp")
        T_grasp_world = T_world_camera * T_grasp_camera * T_grasp_gripper

        # import pdb; pdb.set_trace()
        t_quat = [
            T_grasp_world.quaternion[1], T_grasp_world.quaternion[2],
            T_grasp_world.quaternion[3], T_grasp_world.quaternion[0]
        ]
        grasp_pose_world = Pose(T_grasp_world.position, t_quat, frame="/world")

        print("Get pre grasp pose")
        pre_grasp_pose = offset_hand(grasp_pose_world, unit_vector=[0, 0, 1])

        return pre_grasp_pose, grasp_pose_world
コード例 #12
0
def test_offset_hand():
    rospy.sleep(5)
    quat = tf.transformations.random_quaternion()
    print quat
    grasp_pose = Pose([0.5, 0.5, 0.5], quat)
    offset_hand(grasp_pose)