Exemplo n.º 1
0
def drop_pose_transform(binId,rel_pose, BoxBody, place_pose, viz_pub, listener, br):
     #~define gripper home orientation
    base_pose = [0.,0.,0.,0.,1.,0.,0.] #~goarc hand pose
    matrix_base_pose= tfm.quaternion_matrix(base_pose[3:7])
    hand_orient_norm = matrix_base_pose[0:3,0:3]
    #~initialize arguments for placing functions
    finger_opening = gripper.getGripperopening()
    safety_margin=.035
    #~ get 3d bin and finger points
    finger_pts_3d = collision_detection.collisionHelper.getFingerPoints(finger_opening, [0,0,0], hand_orient_norm, False)
    bin_pts_3d = collision_detection.collisionHelper.getBinPoints(binId=binId,listener=listener, br=br)
    #~convert to 2d
    bin_pts = bin_pts_3d[:,0:2]
    finger_pts = finger_pts_3d[:,0:2]
    #~ ~perform coordinate transformation from object to gripper
    (drop_pose,final_object_pose) = pose_transform_precise_placing(rel_pose, BoxBody, place_pose, base_pose, bin_pts_3d, finger_pts, safety_margin, False, viz_pub)
    return drop_pose
Exemplo n.º 2
0
    def run_grasping(self, container=None):
        self.getBestGraspingPoint(container)

        #~Publish grasp proposal information
        grasp_proposal_msgs = Float32MultiArray()
        grasp_proposal_msgs.data = self.grasp_point
        if self.grasp_point is not None:
            self.grasp_proposal_pub.publish(grasp_proposal_msgs)
            comments_msgs = String()
            comments_msgs.data = self.experiment_description
            experiment_type_msgs = String()
            experiment_type_msgs.data = self.experiment_type
            self.experiment_comments_pub.publish(comments_msgs)
            self.experiment_type_pub.publish(experiment_type_msgs)
        else:
            print('There are no grasp proposal')
            self.execution_possible = False
            return

        #~Visualize grasp proposals
        markers_msg = MarkerArray()
        m0 = createDeleteAllMarker('pick_proposals')
        markers_msg.markers.append(m0)
        for i in range(10):
            self.proposal_viz_array_pub.publish(markers_msg)
        ik.visualize_helper.visualize_grasping_proposals(
            self.proposal_viz_array_pub, self.all_grasp_proposals,
            self.listener, self.br)
        ik.visualize_helper.visualize_grasping_proposals(
            self.proposal_viz_array_pub, np.asarray([self.grasp_point]),
            self.listener, self.br, True)

        #execute for grasp. Stop when the gripper is closed
        self.back_img_list = self.capture_images()

        #self.grasp_point=[0.92737001180648804, -0.391, -0.12441360205411911, 0.0, 0.0, -1.0, 0.067681394517421722, 0.059999998658895493, 0.0, 1.0, 0.0, 0.74888890981674194]

        self.grasping_output = grasp(objInput=self.grasp_point,
                                     listener=self.listener,
                                     br=self.br,
                                     isExecute=self.isExecute,
                                     binId=container,
                                     withPause=self.withPause,
                                     viz_pub=self.proposal_viz_array_pub,
                                     recorder=self.gdr)
        num_it = 0
        print('gripper open: ', gripper.getGripperopening())
        is_in_wrong_pose = (gripper.getGripperopening() < 0.04)
        while is_in_wrong_pose:
            '''
            self.retrieve_output = retrieve(listener=self.listener, br=self.br,
                                       isExecute=self.isExecute, binId=container,
                                       withPause=self.withPause, viz_pub=self.proposal_viz_array_pub,
                                       recorder=self.gdr, ws_object=self.weightSensor, isShake=False)
            self.grasping_output = grasp(objInput=self.grasp_point, listener=self.listener, br=self.br,
                                    isExecute=self.isExecute, binId=container, withPause=self.withPause,
                                    viz_pub=self.proposal_viz_array_pub, recorder=self.gdr, open_hand=False)
            '''
            # Motion heuristic
            initial_dz = 0.05
            dz = .022  #should be related to object length
            ik.helper.move_cart(dz=initial_dz)
            rospy.sleep(0.5)
            ik.helper.move_cart(dz=-initial_dz)
            rospy.sleep(0.5)
            gripper.move(90)
            ik.helper.move_cart(dz=dz)
            rospy.sleep(0.5)
            #should be done in the direction of the gripper plane
            dx = 0.038  #.02*num_it
            ik.helper.move_cart_hand(self.listener,
                                     dx=dx,
                                     dy=0,
                                     dz=0,
                                     speedName='fastest')
            rospy.sleep(0.5)
            ik.helper.move_cart(dz=-dz)
            rospy.sleep(0.5)
            ik.helper.move_cart_hand(self.listener,
                                     dx=-dx,
                                     dy=0,
                                     dz=0,
                                     speedName='fastest')
            rospy.sleep(0.5)
            gripper.close()
            rospy.sleep(0.5)
            print('gripper_open', gripper.getGripperopening())

            is_in_wrong_pose = (gripper.getGripperopening() < 0.04)
            if is_in_wrong_pose:
                gripper.move(90)
                rospy.sleep(0.5)
                ik.helper.move_cart(dz=dz)
                rospy.sleep(0.5)
                ik.helper.move_cart_hand(self.listener,
                                         dx=-dx,
                                         dy=0,
                                         dz=0,
                                         speedName='fastest')
                rospy.sleep(0.5)
                ik.helper.move_cart(dz=-dz)
                rospy.sleep(0.5)
                ik.helper.move_cart_hand(self.listener,
                                         dx=dx,
                                         dy=0,
                                         dz=0,
                                         speedName='fastest')
                rospy.sleep(0.5)

                gripper.close()
                rospy.sleep(0.5)
                self.gdr.save_data_recorded = False
                num_it += 1
                is_in_wrong_pose = (
                    gripper.getGripperopening() < 0.04
                )  #and (gripper.getGripperopening() > 0.015)

        self.retrieve_output = retrieve(listener=self.listener,
                                        br=self.br,
                                        isExecute=self.isExecute,
                                        binId=container,
                                        withPause=self.withPause,
                                        viz_pub=self.proposal_viz_array_pub,
                                        recorder=self.gdr,
                                        ws_object=self.weightSensor,
                                        isShake=False)
        self.execution_possible = self.retrieve_output['execution_possible']
Exemplo n.º 3
0
def grasp(objInput,
          listener,
          br,
          isExecute=True,
          objId='expo_eraser',
          binId=0,
          flag=0,
          withPause=False,
          rel_pose=None,
          BoxBody=None,
          place_pose=None,
          viz_pub=None,
          is_drop=True,
          update_command=None,
          recorder=None):

    #########################################################
    # fhogan and nikhilcd
    # Description:
    #~2017 picking primitive
    #
    #~Usage
    #~ pick(objInput, listener, br, isExecute = True, objId = 'cheezit_big_original', flag = 1, withPause = False, rel_pose=None, BoxBody=None, place_pose=None, viz_pub=None):
    #
    #~Parameters:
    #
    #~Inputs:
    # 1) objInput: Input from vision or RViz (list of 12, or 7)
    # 2) listener: Input from vision or RViz (list of 12, or 7)
    # 3) br: Input from vision or RViz (list of 12, or 7)
    # 4) isExecute: execute the robot motion if possible or not? (Bool)
    # 5) objId: name of object (str)
    # 6) binId: bin in which we need to place the objects. (It is useless when flag 0 and 1)
    # 7) flag: Type of action directed by planning (0:run picking, 1:go to home (camera), 2: drop the object at target location)
    # 8) withPause: Pause between robot motions (Bool)
    # 9) rel_pose: object pose relative to link_6
    # 10) BoxBody: Bounding box points resolved in link 6 frame
    # 11) place_pose: desired position of bounding box
    # 12) viz_pub: desired position of bounding box
    #
    #~Output:
    # Dictionary of following keys:
    # 1) grasp_possible: Does the object fit in the hand? (True/False)
    # 2) plan_possible: Has the plan succeeded? (True/False)
    # 3) execution_possible: Is there anything in the grasp? (True/False)
    # 4) gripper_opening: gripper opening after grasp attempt (in meters)
    # 5) graspPose: pose of the gripper at grasp (7X1 - position and quaternion)
    # 6) gelsight_data: [] , dummy for now
    #########################################################

    #rospy.logdebug(msg, *args)
    #rospy.loginfo(msg, *args)
    #rospy.logwarn(msg, *args)
    #rospy.logerr(msg, *args)
    #rospy.logfatal(msg, *args)

    ###############################
    ## Pring input variables for ##
    ###############################
    rospy.logdebug('[Picking] Starting primitive with flag: %d' % flag)
    rospy.logdebug('[Picking] objInput %s', objInput)
    rospy.logdebug('[Picking] objId %s', objId)
    rospy.logdebug('[Picking] flag %s', flag)
    rospy.logdebug('[Picking] withPause %s', withPause)
    rospy.logdebug('[Picking] rel_pose %s', rel_pose)
    rospy.logdebug('[Picking] BoxBody %s', BoxBody)
    rospy.logdebug('[Picking] place_pose %s', place_pose)

    ########################
    ## Initialize values ##
    ########################
    q_initial = ik.helper.get_joints()
    bin_pose = ik.helper.get_params_yaml('bin' + str(binId) + '_pose')
    spatula_tip_to_tcp_dist = rospy.get_param(
        "/gripper/spatula_tip_to_tcp_dist")
    delta_vision_pose = ik.helper.get_params_yaml('vision_pose_picking')
    vision_pos = np.array(bin_pose[0:3]) + np.array(delta_vision_pose[0:3])
    plans = []
    plans_grasping = []
    plans_grasping2 = []
    plans_guarded = []
    plans_placing = []
    graspPose = []
    plan_possible = False
    execution_possible = False
    gripper_opening = 0.0
    grasp_possible = True
    gelsight_data = []
    collision = False
    final_object_pose = None
    rospy.set_param('is_record', False)
    rospy.set_param('is_contact', False)
    weightSensor = ws_prob.WeightSensor()

    liftoff_pub = rospy.Publisher('/liftoff_time',
                                  std_msgs.msg.String,
                                  queue_size=10,
                                  latch=False)

    def compose_output():
        return {
            'collision': collision,
            'grasp_possible': grasp_possible,
            'plan_possible': plan_possible,
            'execution_possible': execution_possible,
            'gripper_opening': gripper_opening,
            'graspPose': graspPose,
            'gelsight_data': gelsight_data,
            'final_object_pose': final_object_pose
        }

    #########################
    ## Constant parameters ##
    #########################
    UpdistFromBinFast = 0.15  # Margin above object during "move to a location"
    UpdistFromBinGuarded = 0.05  # Margin above object during "move to a location"
    gripperOriHome = [0, 1, 0, 0]
    l1 = 0.0
    l2 = 0.0
    l3 = spatula_tip_to_tcp_dist
    tip_hand_transform = [l1, l2, l3, 0, 0, 0]
    vision_pos[2] = 0.17786528

    ###############################
    ## Picking primitive flag: 0 ##
    ###############################
    if flag == 0:
        #        ik.visualize_helper.visualize_grasping_proposals(viz_pub, np.asarray([objInput]),  listener, br, False)
        #~Define reference frames
        world_X, world_Y, world_Z, tote_X, tote_Y, tote_Z, tote_pose_pos = ik.helper.reference_frames(
            listener=listener, br=br)

        #~get grasp pose and gripper opening from vision
        if len(objInput) == 12:
            graspPos, hand_X, hand_Y, hand_Z, grasp_width = ik.helper.get_picking_params_from_12(
                objInput)
        elif len(objInput) == 7:
            graspPos, hand_X, hand_Y, hand_Z, grasp_width = ik.helper.get_picking_params_from_7(
                objInput, objId, listener, br)

        #~build gripper orientation matrix 3x3
        hand_orient_norm = np.vstack([hand_X, hand_Y, hand_Z])
        hand_orient_norm = hand_orient_norm.transpose()

        #~Grasp relocation script
        hand_orient_quat = ik.helper.mat2quat(hand_orient_norm)
        euler = tf.transformations.euler_from_quaternion(hand_orient_quat)
        theta = euler[2] - np.pi
        colFreePose = collisionFree(graspPos,
                                    binId=binId,
                                    listener=listener,
                                    br=br,
                                    finger_opening=grasp_width,
                                    safety_margin=0.00,
                                    theta=theta)
        graspPos[0:2] = colFreePose[0:2]

        #################################
        ## GENERATE PLANS OF PRIMITIVE ##
        #################################.
        #~ Start from home position
        #        if isExecute:
        #            q_initial = collision_free_placing(binId, listener, isSuction = False, with_object = False, hand_orientation=[0,1,0,0], projected_target_position = bin_pose[0:3], isReturn = True)
        #            scorpion.back()

        #~0. Move to a location above the bin, Rotate gripper to grasp orientation
        pregrasp_targetPosition = graspPos
        pregrasp_targetPosition[2] = bin_pose[2] + UpdistFromBinFast
        plan, qf, plan_possible = generatePlan(q_initial,
                                               pregrasp_targetPosition,
                                               hand_orient_quat,
                                               tip_hand_transform,
                                               'superSaiyan',
                                               plan_name='go_safe_bin')
        if plan_possible:
            plans.append(plan)
            q_initial = qf
        else:
            return compose_output()

        #~1. Move to to surface of bin
        pregrasp_targetPosition[2] = bin_pose[2] + UpdistFromBinGuarded
        plan, qf, plan_possible = generatePlan(q_initial,
                                               pregrasp_targetPosition,
                                               hand_orient_quat,
                                               tip_hand_transform,
                                               'superSaiyan',
                                               plan_name='go_top_bin')
        if plan_possible:
            plans.append(plan)
            q_initial = qf
        else:
            return compose_output()

    #~2. Open gripper
        grasp_plan = EvalPlan('helper.moveGripper(%f, 200)' % grasp_width)
        plans.append(grasp_plan)

        #~ 3. Open spatula
        grasp_plan = EvalPlan('spatula.open()')
        plans.append(grasp_plan)

        grasp_targetPosition = deepcopy(graspPos)
        #        rospy.loginfo('[Picking] Grasp Target %s', grasp_targetPosition)

        #~4. sleep
        sleep_plan = EvalPlan('rospy.sleep(0.2)')
        plans.append(sleep_plan)

        #~5. perform guarded move down
        grasp_targetPosition[2] = bin_pose[2] - rospy.get_param(
            '/tote/height') + 0.000  #~frank hack for safety
        if binId == 0:
            grasp_targetPosition[2] = grasp_targetPosition[2] + 0.005

        plan, qf, plan_possible = generatePlan(q_initial,
                                               grasp_targetPosition,
                                               hand_orient_quat,
                                               tip_hand_transform,
                                               'faster',
                                               guard_on=WeightGuard(
                                                   binId, threshold=100),
                                               plan_name='guarded_pick')
        if plan_possible:
            plans_guarded.append(plan)
            q_initial = qf
        else:
            return compose_output()

        #~7. Close spatula
        grasp_plan = EvalPlan('spatula.close()')
        plans_guarded.append(grasp_plan)

        #~8. Close gripper
        grasp_plan = EvalPlan('helper.graspinGripper(%f,%f)' % (800, 50))
        plans_guarded.append(grasp_plan)

        #~9. sleep
        sleep_plan = EvalPlan('rospy.sleep(0.2)')
        plans_guarded.append(sleep_plan)

        #~10. Move to a location above the bin
        plan, qf, plan_possible = generatePlan(q_initial,
                                               vision_pos,
                                               delta_vision_pose[3:7],
                                               tip_hand_transform,
                                               'fastest',
                                               plan_name='retrieve_object')
        if plan_possible:
            plans_grasping.append(plan)
            q_initial = qf
        else:
            return compose_output()

        ################
        ## EXECUTION ###
        ################

        if isExecute and plan_possible:

            executePlanForward(plans, withPause)

            ###We start recording now
            lasers.start(binId)
            recorder.start_recording(
                action='grasping',
                action_id=str(
                    datetime.datetime.now().strftime("%Y-%m-%d-%H-%M-%S")),
                tote_num=binId,
                frame_rate_ratio=5,
                image_size=-1)

            #Execute guarded move
            rospy.set_param('is_record', True)
            executePlanForward(plans_guarded, withPause)
            rospy.set_param('is_record', False)

            #Publish liftoff time
            liftoff_msgs = std_msgs.msg.String()
            liftoff_msgs.data = 'Liftoff (Robot command)'
            liftoff_pub.publish(liftoff_msgs)

            #Execute non-guarded grasp plan move
            executePlanForward(plans_grasping, withPause)

            ###We stop recording
            recorder.stop_recording(save_action=True)
            lasers.stop(binId)

        #~Check if picking success
        low_threshold = 0.0035
        high_threshold = 0.015
        if isExecute and plan_possible:
            rospy.sleep(2)
            gripper_opening = gripper.getGripperopening()
            if gripper_opening > high_threshold:
                rospy.loginfo('[Picking] ***************')
                rospy.loginfo(
                    '[Picking] Pick Successful (Gripper Opening Test)')
                rospy.loginfo('[Picking] ***************')
                execution_possible = True
            else:
                rospy.loginfo('[Picking] ***************')
                rospy.loginfo(
                    '[Picking] Pick Inconclusive (Gripper Opening Test)')
                rospy.loginfo('[Picking] ***************')
                execution_possible = None

        elif not isExecute:
            execution_possible = plan_possible

        return compose_output()

    #############
    ## Placing ##
    #############
    elif flag == 2:
        #if primitive called without placing arguments, go to center of bin
        if rel_pose == None:
            drop_pose = ik.helper.get_params_yaml('bin' + str(binId) + '_pose')
            drop_pose[3:7] = gripperOriHome
        else:
            drop_pose = ik.helper.drop_pose_transform(binId, rel_pose, BoxBody,
                                                      place_pose, viz_pub,
                                                      listener, br)

        #~ Adjust height according to weigth
        if is_drop:
            drop_offset = 0.15
        else:
            drop_offset = 0.002
        #~set drop pose target z position to be bottom of bin
        drop_pose[2] = bin_pose[2] - rospy.get_param(
            'tote/height') + drop_offset

        #~Predrop: go to top middle bin surface fast without guarded move
        predrop_pos = np.array(drop_pose[0:3])
        predrop_pos[2] = bin_pose[2] + 0.05

        ######################
        ##  Generate plans ##
        ######################
        #~move safe to placing bin number
        rospy.logdebug(
            '[Picking] Collision free placing to binId %s and position %s',
            binId, predrop_pos)
        #        if isExecute:
        #            q_initial = collision_free_placing(binId, listener, obj_ID=objId, BoxBody=BoxBody, isSuction = False,with_object = True, hand_orientation=drop_pose[3:7],projected_target_position = predrop_pos, isReturn = True)
        #            if update_command is not None:
        #                update_command.execute()

        #~0.go up to avoid collision with tote walls
        current_pose = ik.helper.get_tcp_pose(listener, tcp_offset=l3)
        current_pose[2] = current_pose[2] + 0.15
        plan, qf, plan_possible = generatePlan(q_initial,
                                               current_pose[0:3],
                                               current_pose[3:7],
                                               tip_hand_transform,
                                               'faster',
                                               plan_name='go_up')
        if plan_possible:
            plans.append(plan)
            q_initial = qf
        else:
            return compose_output()

        #~0.1.fast motion to bin surface high
        predrop_pos_high = predrop_pos
        predrop_pos_high[2] = current_pose[2]
        plan, qf, plan_possible = generatePlan(q_initial,
                                               predrop_pos_high,
                                               drop_pose[3:7],
                                               tip_hand_transform,
                                               'faster',
                                               plan_name='go_bin_surface_high')
        if plan_possible:
            plans.append(plan)
            q_initial = qf
        else:
            return compose_output()

        #~1.fast motion to bin surface
        predrop_pos[2] = bin_pose[2] + 0.05
        plan, qf, plan_possible = generatePlan(q_initial,
                                               predrop_pos,
                                               drop_pose[3:7],
                                               tip_hand_transform,
                                               'faster',
                                               plan_name='go_bin_surface')
        if plan_possible:
            plans.append(plan)
            q_initial = qf
        else:
            return compose_output()

        #~2. Guarded move down slow
        plan, qf, plan_possible = generatePlan(q_initial,
                                               drop_pose[0:3],
                                               drop_pose[3:7],
                                               tip_hand_transform,
                                               'fast',
                                               guard_on=WeightGuard(binId),
                                               backwards_speed='superSaiyan',
                                               plan_name='guarded_drop')
        if plan_possible:
            plans_placing.append(plan)
            q_initial = qf
        else:
            return compose_output()

        #~3. open gripper
        grasp_plan = EvalPlan('helper.moveGripper(0.110, 200)')
        plans_placing.append(grasp_plan)

        #~4. open spatula
        grasp_plan = EvalPlan('spatula.open()')
        plans_placing.append(grasp_plan)

        #~5. go to predrop_pos
        plan, qf, plan_possible = generatePlan(q_initial,
                                               predrop_pos[0:3],
                                               drop_pose[3:7],
                                               tip_hand_transform,
                                               'superSaiyan',
                                               plan_name='predrop_pos')
        if plan_possible:
            plans_placing.append(plan)
            q_initial = qf
        else:
            return compose_output()

        #~6. go to final pose
        final_pos = bin_pose[0:3]
        final_pos[2] = bin_pose[2] + 0.3
        plan, qf, plan_possible = generatePlan(q_initial,
                                               final_pos,
                                               drop_pose[3:7],
                                               tip_hand_transform,
                                               'superSaiyan',
                                               plan_name='final_pose')
        if plan_possible:
            plans_placing.append(plan)
            q_initial = qf
        else:
            return compose_output()

        #~ Execute plans
        if plan_possible:
            if isExecute:
                executePlanForward(plans, withPause)

                #We start recording now
                lasers.start(binId)
                recorder.start_recording(
                    action='placing',
                    action_id=str(
                        datetime.datetime.now().strftime("%Y-%m-%d-%H-%M-%S")),
                    tote_num=binId,
                    frame_rate_ratio=5,
                    image_size=-1)

                rospy.set_param('is_record', True)
                executePlanForward(plans_placing, withPause)
                rospy.set_param('is_record', False)

                recorder.stop_recording(save_action=True)
                lasers.stop(binId)

            execution_possible = True
            return compose_output()
Exemplo n.º 4
0
def flip_image2(data, topic):
    #flip image using opencv
    bridge = CvBridge()
    cv_image = bridge.imgmsg_to_cv2(data, "bgr8")
    # flip_cv_image = cv2.flip(cv_image,1)
    flip_cv_image = cv2.flip(cv_image, 0)
    outImage = flip_cv_image
    # cv2.imwrite('/home/mcube/background_1.png',outImage)
    if rospy.get_param('/is_high_viz', True):
        back_cv_image = cv2.imread('/home/mcube/background_1.png', 1)
        patch_cv_image, initial_img, contact_img = crop_contact(back_cv_image,
                                                                flip_cv_image,
                                                                gel_id=2,
                                                                is_zeros=True)
        COM_pos = get_center_of_mass(contact_img)
        COM_pos = np.nan_to_num(COM_pos)
        print(COM_pos)

        #publish flipped image
        # Convert uint8 to float
        foreground = initial_img
        #foreground = foreground[:-140,55:-70]  #im_crop_grasp = img_grasp[0:-120,135:-135]
        img = contact_img * 255

        # threshold image
        ret, threshed_img = cv2.threshold(
            cv2.cvtColor(img, cv2.COLOR_BGR2GRAY), 127, 255, cv2.THRESH_BINARY)
        # find contours and get the external one
        image, contours, hier = cv2.findContours(threshed_img, cv2.RETR_TREE,
                                                 cv2.CHAIN_APPROX_SIMPLE)

        # with each contour, draw boundingRect in green
        # a minAreaRect in red and
        # a minEnclosingCircle in blue
        biggest_area = 0
        biggest_rect = None
        for c in contours:

            # get the min area rect
            rect = cv2.minAreaRect(c)
            area = float(rect[1][0]) * float(rect[1][1])
            if biggest_area < area:
                biggest_rect = rect
                biggest_area = area
        if biggest_rect:
            box = cv2.boxPoints(biggest_rect)
            # convert all coordinates floating point values to int
            box = np.int0(box)
            # draw a red 'nghien' rectangle
            cv2.drawContours(img, [box], 0, (0, 0, 255), 2)
            cv2.rectangle(
                img,
                (int(biggest_rect[0][0] - 10), int(biggest_rect[0][1] - 10)),
                (int(biggest_rect[0][0] + 10), int(biggest_rect[0][1] + 10)),
                (0, 255, 0), -1)
            cv2.rectangle(img, (int(COM_pos[1] - 10), int(COM_pos[0] - 10)),
                          (int(COM_pos[1] + 10), int(COM_pos[0] + 10)),
                          (0, 0, 255), -1)

            outimg = cv2.drawContours(img, contours, -1, (255, 255, 0), 2)
            # Normalize the alpha mask to keep intensity between 0 and 1
            alpha = 0.8

            #img = cv2.resize(img, (foreground.shape[1], foreground.shape[0]))
            # Add the masked foreground and background.
            outImage = cv2.addWeighted(foreground, alpha, img, 1 - alpha, 0)

            #rotation angle in degree
            rotated = cv2.pyrDown(
                cv2.imread('/home/mcube/vertical_nut.png',
                           cv2.IMREAD_UNCHANGED))
            rows = rotated.shape[1]
            cols = rotated.shape[1]

            if (gripper.getGripperopening() > 0.03):
                if biggest_rect[1][0] > biggest_rect[1][1]:
                    M = cv2.getRotationMatrix2D((cols / 2, rows / 2),
                                                rect[2] + 90, 1)
                else:
                    M = cv2.getRotationMatrix2D((cols / 2, rows / 2), rect[2],
                                                1)
                rotated = cv2.warpAffine(rotated, M, (cols, rows))
            else:
                rotated = cv2.imread('/home/mcube/hor2_nut.png',
                                     cv2.IMREAD_UNCHANGED)

            rotated = cv2.resize(rotated,
                                 (outImage.shape[1], outImage.shape[0]))
            outImage = np.concatenate([outImage, rotated], axis=1)

        else:
            outImage = foreground

    flip_image_pub = rospy.Publisher(topic,
                                     sensor_msgs.msg.Image,
                                     queue_size=10)
    flip_image_pub.publish(bridge.cv2_to_imgmsg(outImage, "bgr8"))
Exemplo n.º 5
0
    def run_grasping(self, container=None):
        self.getBestGraspingPoint(container)
        grasp_proposal_msgs = Float32MultiArray()
        grasp_proposal_msgs.data = self.grasp_point
        grasp_noise_msgs = Float32MultiArray()
        grasp_noise_msgs.data = self.grasp_noise

        if self.grasp_point is not None:
            self.grasp_proposal_pub.publish(grasp_proposal_msgs)
            self.grasp_noise_pub.publish(grasp_noise_msgs)
        comments_msgs = String()
        comments_msgs.data = self.experiment_description
        experiment_type_msgs = String()
        experiment_type_msgs.data = self.experiment_type
        self.experiment_comments_pub.publish(comments_msgs)
        self.experiment_type_pub.publish(experiment_type_msgs)

        if self.grasp_point is None:
            print(
                'It was suppose to do grasping, but there is no grasp proposal'
            )
            self.execution_possible = False
            return
        if self.visionType != 'real':
            self.callFakeGrasping(prob=0.8, container=container)
            return

        #~visualize grasp proposals
        markers_msg = MarkerArray()
        m0 = createDeleteAllMarker('pick_proposals')
        markers_msg.markers.append(m0)
        for i in range(10):
            self.proposal_viz_array_pub.publish(markers_msg)
        ik.visualize_helper.visualize_grasping_proposals(
            self.proposal_viz_array_pub, self.all_grasp_proposals,
            self.listener, self.br)
        ik.visualize_helper.visualize_grasping_proposals(
            self.proposal_viz_array_pub, np.asarray([self.grasp_point]),
            self.listener, self.br, True)

        #execute for grasp. Stop when the gripper is closed
        if self.is_control:
            back_img_list = self.controller.capture_images()

        self.grasping_output = grasp(objInput=self.grasp_point,
                                     listener=self.listener,
                                     br=self.br,
                                     isExecute=self.isExecute,
                                     binId=container,
                                     withPause=self.withPause,
                                     viz_pub=self.proposal_viz_array_pub,
                                     recorder=self.gdr)

        self.gdr.save_item(item_name='grasp_noise_std_dev',
                           data=self.grasp_std)
        self.gdr.save_item(item_name='grasp_noise', data=self.grasp_noise)

        if self.is_control:
            if gripper.getGripperopening() > 0.017:
                print('[Planner]: ', gripper.getGripperopening())
                # WE PAUSE THE RECOORDER TO SAVE DATA
                self.gdr.pause_recording()

                #find new and improved grasp points
                best_grasp_dict = self.controller.control_policy(
                    back_img_list, smirror=self.smirror)
                # self.controller.visualize_actions(with_CAM = False)
                # self.controller.visualize_best_action(with_CAM = False)
                #save network information action_dict and best_action_dict
                self.gdr.save_item(item_name='action_dict',
                                   data=self.controller.action_dict)
                self.gdr.save_item(item_name='best_action_dict',
                                   data=self.controller.best_action_dict)
                #WE UNPAUSE THE RECORDER
                self.gdr.replay_recording()
                #go for new grasp Point
                self.grasping_output = grasp_correction(
                    self.grasp_point, best_grasp_dict['delta_pos'],
                    self.listener, self.br)
                self.gdr.save_data_recorded = True
            else:
                self.gdr.save_data_recorded = False

        #frank hack for double grasping
        if self.experiment_type == 'is_data_collection':
            if gripper.getGripperopening() > 0.017:
                self.grasping_output = grasp_correction(
                    self.grasp_point, np.array([0, 0, 0]), self.listener,
                    self.br)
                self.gdr.save_data_recorded = True
            else:
                self.gdr.save_data_recorded = False

        self.retrieve_output = retrieve(listener=self.listener,
                                        br=self.br,
                                        isExecute=self.isExecute,
                                        binId=container,
                                        withPause=self.withPause,
                                        viz_pub=self.proposal_viz_array_pub,
                                        recorder=self.gdr,
                                        ws_object=self.weightSensor)
        self.execution_possible = self.retrieve_output['execution_possible']
Exemplo n.º 6
0
def retrieve(listener,
             br,
             isExecute=True,
             objId='expo_eraser',
             binId=0,
             withPause=False,
             viz_pub=None,
             recorder=None,
             ws_object=None,
             isShake=True):

    liftoff_pub = rospy.Publisher('/liftoff_time',
                                  std_msgs.msg.String,
                                  queue_size=10,
                                  latch=False)

    def compose_output():
        return {
            'collision': collision,
            'grasp_possible': grasp_possible,
            'plan_possible': plan_possible,
            'execution_possible': execution_possible,
            'gripper_opening': gripper_opening,
            'graspPose': graspPose,
            'gelsight_data': gelsight_data,
            'final_object_pose': final_object_pose
        }

    q_initial = ik.helper.get_joints()
    bin_pose = ik.helper.get_params_yaml('bin' + str(binId) + '_pose')
    spatula_tip_to_tcp_dist = rospy.get_param(
        "/gripper/spatula_tip_to_tcp_dist")
    l1 = 0.0
    l2 = 0.0
    l3 = spatula_tip_to_tcp_dist
    tip_hand_transform = [l1, l2, l3, 0, 0, 0]
    delta_vision_pose = ik.helper.get_params_yaml('vision_pose_picking')
    delta_vision_pose[3:7] = np.array([0, 1, 0, 0])
    vision_pos = np.array(bin_pose[0:3]) + np.array(delta_vision_pose[0:3])
    vision_pos[2] = 0.17786528 - 0.1
    plans = []
    plans_grasping = []
    graspPose = []
    plan_possible = False
    execution_possible = False
    gripper_opening = 0.0
    grasp_possible = True
    gelsight_data = []
    collision = False
    final_object_pose = None
    rospy.set_param('is_record', False)
    rospy.set_param('is_contact', False)

    # ~10. Move to a location above the bin
    plan, qf, plan_possible = generatePlan(q_initial,
                                           vision_pos,
                                           delta_vision_pose[3:7],
                                           tip_hand_transform,
                                           'fastest',
                                           plan_name='retrieve_object')
    if plan_possible:
        plans_grasping.append(plan)
        q_initial = qf
    else:
        return compose_output()

    if isExecute and plan_possible:
        #Publish liftoff time
        liftoff_msgs = std_msgs.msg.String()
        liftoff_msgs.data = 'Liftoff (Robot command)'
        liftoff_pub.publish(liftoff_msgs)

        #Execute non-guarded grasp plan move
        executePlanForward(plans_grasping, withPause)

    #~Check if picking success
    low_threshold = 0.0035
    high_threshold = 0.027
    #shake robot
    rospy.sleep(1.5)
    if isShake and gripper.getGripperopening() > high_threshold and isExecute:
        # We initialize drop listener
        ws_object.start_listening_for_drop(bin_num=binId)

        ik.ik.shake()
        rospy.sleep(2.)

        # We stop drop listener
        ws_object.stop_listening_for_drop()

    ### We stop recording
    if recorder is not None:
        recorder.stop_recording(save_action=True)
        lasers.stop(binId)

    if isExecute and plan_possible:
        rospy.sleep(2)
        gripper_opening = gripper.getGripperopening()
        if gripper_opening > high_threshold:
            rospy.loginfo('[Picking] ***************')
            rospy.loginfo('[Picking] Pick Successful (Gripper Opening Test)')
            rospy.loginfo('[Picking] ***************')
            execution_possible = True  #temporary hack
        else:
            rospy.loginfo('[Picking] ***************')
            rospy.loginfo('[Picking] Pick Inconclusive (Gripper Opening Test)')
            rospy.loginfo('[Picking] ***************')
            execution_possible = None

    elif not isExecute:
        execution_possible = plan_possible

    return compose_output()
Exemplo n.º 7
0
    def run_grasping(self, container=None):
        self.getBestGraspingPoint(container)
        grasp_proposal_msgs = Float32MultiArray()
        grasp_proposal_msgs.data = self.grasp_point
        grasp_noise_msgs = Float32MultiArray()
        grasp_noise_msgs.data = self.grasp_noise

        if self.grasp_point is not None:
            self.grasp_proposal_pub.publish(grasp_proposal_msgs)
            self.grasp_noise_pub.publish(grasp_noise_msgs)
        comments_msgs = String()
        comments_msgs.data = self.experiment_description
        experiment_type_msgs = String()
        experiment_type_msgs.data = self.experiment_type
        self.experiment_comments_pub.publish(comments_msgs)
        self.experiment_type_pub.publish(experiment_type_msgs)

        if self.grasp_point is None:
            print(
                'It was suppose to do grasping, but there is no grasp proposal'
            )
            self.execution_possible = False
            return
        if self.visionType != 'real':
            self.callFakeGrasping(prob=0.8, container=container)
            return

        #~visualize grasp proposals
        markers_msg = MarkerArray()
        m0 = createDeleteAllMarker('pick_proposals')
        markers_msg.markers.append(m0)
        for i in range(10):
            self.proposal_viz_array_pub.publish(markers_msg)
        ik.visualize_helper.visualize_grasping_proposals(
            self.proposal_viz_array_pub, self.all_grasp_proposals,
            self.listener, self.br)
        ik.visualize_helper.visualize_grasping_proposals(
            self.proposal_viz_array_pub, np.asarray([self.grasp_point]),
            self.listener, self.br, True)

        #execute for grasp. Stop when the gripper is closed
        if self.is_control:
            back_img_list = self.controller.capture_images()

        #self.grasp_point=[0.92737001180648804, -0.391, -0.12441360205411911, 0.0, 0.0, -1.0, 0.067681394517421722, 0.059999998658895493, 0.0, 1.0, 0.0, 0.74888890981674194]
        self.background_images = self.capture_images()

        self.grasping_output = grasp(objInput=self.grasp_point,
                                     listener=self.listener,
                                     br=self.br,
                                     isExecute=self.isExecute,
                                     binId=container,
                                     withPause=self.withPause,
                                     viz_pub=self.proposal_viz_array_pub,
                                     recorder=self.gdr)

        if self.is_record == True:
            self.gdr.save_item(item_name='grasp_noise_std_dev',
                               data=self.grasp_std)
            self.gdr.save_item(item_name='grasp_noise', data=self.grasp_noise)
        is_in_wrong_pose = (gripper.getGripperopening() < 0.03)  #raw_input()
        if is_in_wrong_pose:
            self.retrieve_output = retrieve(
                listener=self.listener,
                br=self.br,
                isExecute=self.isExecute,
                binId=container,
                withPause=self.withPause,
                viz_pub=self.proposal_viz_array_pub,
                recorder=self.gdr,
                ws_object=self.weightSensor)
            gripper.open()
            gripper.close()
            self.grasping_output = grasp(objInput=self.grasp_point,
                                         listener=self.listener,
                                         br=self.br,
                                         isExecute=self.isExecute,
                                         binId=container,
                                         withPause=self.withPause,
                                         viz_pub=self.proposal_viz_array_pub,
                                         recorder=self.gdr)

        if self.is_control:
            if gripper.getGripperopening() > 0.017:
                print('[Planner]: ', gripper.getGripperopening())
                # WE PAUSE THE RECOORDER TO SAVE DATA
                #self.gdr.pause_recording()
                #find new and improved grasp points
                best_grasp_dict, initial_score = self.controller.control_policy(
                    back_img_list,
                    smirror=self.smirror,
                    use_COM=self.use_COM,
                    use_raw=self.use_raw)

                # self.controller.visualize_actions(with_CAM = False)
                #self.controller.visualize_best_action(with_CAM = False)
                #pdb.set_trace()
                #save network information action_dict and best_action_dict
                #WE UNPAUSE THE RECORDER
                #self.gdr.replay_recording()
                self.gdr.save_item(item_name='initial_score',
                                   data=initial_score)
                self.gdr.save_item(item_name='best_grasp_dict',
                                   data=best_grasp_dict)
                self.gdr.save_item(item_name='action_dict',
                                   data=self.controller.action_dict)
                self.gdr.save_item(item_name='best_action_dict',
                                   data=self.controller.best_action_dict)
                #go for new grasp PointgraspPose
                self.grasping_output = grasp_correction(
                    self.grasp_point, best_grasp_dict['delta_pos'],
                    self.listener, self.br)
                second_best_grasp_dict, final_score = self.controller.control_policy(
                    back_img_list,
                    smirror=self.smirror,
                    use_COM=self.use_COM,
                    use_raw=self.use_raw)
                self.gdr.save_item(item_name='final_score', data=final_score)
                self.gdr.save_item(item_name='second_best_grasp_dict',
                                   data=second_best_grasp_dict)
                self.gdr.save_data_recorded = True

            else:
                self.gdr.save_data_recorded = False
        #frank hack for double grasping
        if self.experiment_type == 'is_data_collection':
            if gripper.getGripperopening() > 0.017:
                # self.grasping_output = grasp_correction(self.grasp_point, np.array([0,0,0]), self.listener, self.br)
                self.gdr.save_data_recorded = True
            else:
                self.gdr.save_data_recorded = False

        self.retrieve_output = retrieve(listener=self.listener,
                                        br=self.br,
                                        isExecute=self.isExecute,
                                        binId=container,
                                        withPause=self.withPause,
                                        viz_pub=self.proposal_viz_array_pub,
                                        recorder=self.gdr,
                                        ws_object=self.weightSensor)
        self.execution_possible = self.retrieve_output['execution_possible']