def Rot_k_phi(self, k_vec, phi): #Eigen::Vector3d k_vec,float phi
	    #Eigen::Matrix3d R_k_phi;
	    kx = k_vec[0]
	    ky = k_vec[1]
	    kz = k_vec[2]
	    
	    #K_nrow = Vector(0.0, -kz, ky)
	    #K_trow = Vector(kz, 0.0, -kx)
	    #K_brow = Vector(-ky, kx, 0.0)

	    N = 3
	    K = Rotation(0.0,-kz,ky, kz,0.0,-kx, -ky,kx,0.0)
	    print "K",K
	    Ksquare = K*K
	    print "K^2",Ksquare
	    I = Rotation()
	    print "I",I
	    K_Frame = Frame(K)
	    print "K_Frame",K_Frame
	    K_Frame_numpy = posemath.toMatrix(K_Frame)
	    print "K_Frame_numpy",K_Frame_numpy
	    K_numpy = K_Frame_numpy[0:3,0:3]
	    print "K_numpy",K_numpy
	    I_numpy = np.identity(N)
	    print "I_numpy",I_numpy
	    K_square_Frame = Frame(Ksquare)
	    print "K_square_Frame",K_square_Frame
	    K_square_Frame_numpy = posemath.toMatrix(K_square_Frame)
	    print "K_square_Frame",K_square_Frame
	    K_square_numpy = K_square_Frame_numpy[0:3,0:3]
	    I_numpy = np.identity(N)
	    print "math.sin(phi)",(math.sin(phi))
	    print "1-math.cos(phi)",(1-math.cos(phi))
	    print "K_numpy*K_numpy",K_numpy*K_numpy
	    R_k_phi_numpy = I_numpy + math.sin(phi)*K_numpy + (1-math.cos(phi))*K_numpy*K_numpy
	    print "R_k_phi_numpy",R_k_phi_numpy
	    R_k_phi_numpy_FrameTemp = np.c_[R_k_phi_numpy, np.ones(N)]
	    print "R_k_phi_numpy_FrameTemp",R_k_phi_numpy_FrameTemp
	    R_k_phi_numpy_Frame = np.r_[R_k_phi_numpy_FrameTemp,[R_k_phi_numpy_FrameTemp[1]]]
	    print "R_k_phi_numpy_Frame",R_k_phi_numpy_Frame
	    R_k_phi_Frame = posemath.fromMatrix(R_k_phi_numpy_Frame)
	    print "R_k_phi_Frame",R_k_phi_Frame
	    R_k_phi = R_k_phi_Frame.M
	    print  "R_k_phi",R_k_phi



	    R_k_phi_numpy_square = I_numpy + math.sin(phi)*K_numpy + (1-math.cos(phi))*K_square_numpy
	    print "R_k_phi_numpy_square",R_k_phi_numpy_square
	    R_k_phi_numpy_FrameTemp_square = np.c_[R_k_phi_numpy_square, np.ones(N)]
	    print "R_k_phi_numpy_FrameTemp_square",R_k_phi_numpy_FrameTemp_square
	    R_k_phi_numpy_Frame_square = np.r_[R_k_phi_numpy_FrameTemp_square,[R_k_phi_numpy_FrameTemp_square[1]]]
	    print "R_k_phi_numpy_Frame_square",R_k_phi_numpy_Frame_square
	    R_k_phi_Frame_square = posemath.fromMatrix(R_k_phi_numpy_Frame_square)
	    print "R_k_phi_Frame",R_k_phi_Frame_square
	    R_k_phi_square = R_k_phi_Frame_square.M
	    print  "R_k_phi",R_k_phi_square


	    return R_k_phi
Пример #2
0
    def process_grasp_msg(self, grasp_msg):
        """@brief - Attempt to make the adjustment specified by grasp_msg
        1. plan a path to the a place 15cm back from the new pose
        """
        print 'regular move:'
        print grasp_msg
        #release the object
        tp.open_barrett()

        #get the robot's current location and calculate the absolute tran after the adjustment
        #update the robot status
        tp.update_robot(self.global_data.or_env.GetRobots()[0])
        handGoalInBase = pm.toMatrix(pm.fromMsg(grasp_msg.final_grasp_pose))

        try:
            hand_tf = pm.toTf(pm.fromMatrix(handGoalInBase))
            bc = tf.TransformBroadcaster()
            now = rospy.Time.now()
            bc.sendTransform(hand_tf[0], hand_tf[1], now, "hand_goal",
                             "armbase")
            self.global_data.listener.waitForTransform("armbase",
                                                       "arm_goal", now,
                                                       rospy.Duration(1.0))
            armtip_robot = self.global_data.listener.lookupTransform(
                'armbase', 'arm_goal', now)
            armGoalInBase = pm.toMatrix(pm.fromTf(armtip_robot))
        except Exception, e:
            handle_fatal('convert_cartesian_world_goal failed: error %s.' % e)
Пример #3
0
    def imageProc(self, image):
        if self.masterWidget is not None:
            return image
        # Get current force
        fmsg = self.camSync.getMsg(self.forceTopic)
        if type(fmsg) is not WrenchStamped:
            return image
        force = [fmsg.wrench.force.x, fmsg.wrench.force.y, fmsg.wrench.force.z]
        force = np.linalg.norm(force)
        targetF = 3  # Newtons
        targetMax = 6  # Newtons
        # Calculate color
        xp = [0, targetF, targetMax]
        fp = [0, 1, 0]
        colorPos = np.interp(force, xp, fp)
        color = colorsys.hsv_to_rgb(colorPos**3 / 3, .8, 1)

        # Get robot pose
        posMsg = self.camSync.getMsg(self.dvrkTopic)
        if type(posMsg) is not PoseStamped:
            return image
        pos = posemath.fromMsg(posMsg.pose)

        if self.drawType == "arrow":
            self.arrowActor.GetProperty().SetColor(color[0], color[1],
                                                   color[2])
            # Calculate pose of arrows
            initialRot = PyKDL.Frame(PyKDL.Rotation.RotY(np.pi / 2),
                                     PyKDL.Vector(0, 0, 0))
            pos = pos * initialRot
            pos = self.cameraTransform.Inverse() * pos
            posMat = posemath.toMatrix(pos)
            posMatTarget = posMat.copy()
            # Scale arrows
            posMat[0:3, 0:3] = posMat[0:3, 0:3] * .025 * targetF
            setActorMatrix(self.targetActor, posMat)
            posMat[0:3, 0:3] = posMat[0:3, 0:3] * force / targetF
            setActorMatrix(self.arrowActor, posMat)

        elif self.drawType == "bar":
            self.forceBar.GetProperty().SetColor(color[0], color[1], color[2])
            # Move background bar
            pos = self.cameraTransform.Inverse() * pos
            pos2 = PyKDL.Frame(PyKDL.Rotation.Identity(), pos.p)
            pos2.M.DoRotZ(np.pi)
            pos2.p = pos2.p + pos2.M.UnitX() * -.015  # + pos2.M.UnitZ() * 0.01
            posMat = posemath.toMatrix(pos2)
            setActorMatrix(self.bar, posMat)
            setActorMatrix(self.greenLine, posMat)
            # Scale color bar
            fp2 = [0, .5, 1]
            scalePos = np.interp(force, xp, fp2)
            posMat[1, 0:3] = posMat[1, 0:3] * scalePos
            setActorMatrix(self.forceBar, posMat)

        return image
def draw_raw_map():
    global package_path
    global arucos_dict
    global BASE_ARUCO_ID
    global rviz_marker_pub
    global aruco_map

    while(rviz_marker_pub.get_num_connections() < 1):
        if rospy.is_shutdown():
            rospy.signal_shutdown('Master主节点没有开启')

        # load_aruco_map()
        rospy.logwarn("请创建RVIZ的Subscriber")
        rospy.sleep(5)
    
    aruco_map[BASE_ARUCO_ID] = get_base_tag_pose()
    # 绘制参考marker
    draw_single_marker(BASE_ARUCO_ID, get_base_tag_pose(), alpha=0.8, height=0.05)
    
    # 构建一个图
    graph = aruco_udg(max_dis=rospy.get_param('max_distance'))
    order = graph.BFS()

    for aruco_id in order[1:]:
        # 跳过第一个, 因为第一个是base aruco, 已经确定
        # 按照拓扑顺序依次遍历
        parent_id = graph.nodes_dict[aruco_id].parent_id
        # 获取所有记录到的从parent变换到当前码的变换
        pose_list = arucos_dict[parent_id][aruco_id]
        # 均值滤波获取Pose
        pose = weighted_mean_filter(pose_list)
        # 父亲码在世界坐标系下的位姿
        parent_pose = aruco_map[parent_id]

        world2parent_mat = pm.toMatrix(pm.fromMsg(parent_pose))
        parent2child_mat = pm.toMatrix(pm.fromMsg(pose))
        world2child_mat = world2parent_mat.dot(parent2child_mat)
        
        aruco_pose = pm.toMsg(pm.fromMatrix(world2child_mat))

        # 地图追加该aruco码
        aruco_map[aruco_id] = aruco_pose
        # 绘制当前的aruco码
        draw_single_marker(aruco_id, aruco_pose, alpha=0.8, height=0.05)
        

    # 序列化保存    
    with open(package_path + '/data/aruco_map.bin', 'wb') as f:
        pickle.dump(aruco_map, f)
    # 发布静态TF变换
    for aruco_id, pose in aruco_map.items():
        send_aruco_static_transform(aruco_id, pose)
    
    # 结束当前的进程
    rospy.signal_shutdown('绘制了所有的aruco,并发布了aruco码的TF变换')
Пример #5
0
 def optitrack_cb(self, msg):
   if msg.header.frame_id != self.global_frame:
     return
   for rigid_body in msg.bodies:
     if rigid_body.tracking_valid:
       if rigid_body.id == self.table_id:
         frame = posemath.fromMsg(rigid_body.pose)
         self.Ttable = posemath.toMatrix(frame)
       if rigid_body.id == self.stick_id:
         frame = posemath.fromMsg(rigid_body.pose)
         self.Tshort = posemath.toMatrix(frame)
Пример #6
0
 def align_pose(self, pose):
     objectInCamera = pm.toMatrix(pm.fromMsg(pose))
     ModelRecManager.tf_listener.waitForTransform("/world", "/camera_rgb_optical_frame", rospy.Time(0), rospy.Duration(5))
     cameraInWorld = pm.toMatrix(pm.fromTf(ModelRecManager.tf_listener.lookupTransform("/world", "/camera_rgb_optical_frame",rospy.Time(0))))
     objectInWorld = dot(cameraInWorld, objectInCamera)
     """45 degrees rotated around z axis"""
     objectInWorld[0:3,0:3] = mat([[0.7071,-0.7071,0],
                                   [0.7071,0.7071,0],
                                   [0,0,1]]);
     worldInCamera = linalg.inv(cameraInWorld)
     objectInCameraModified = dot(worldInCamera, objectInWorld)
     return pm.toMsg(pm.fromMatrix(objectInCameraModified))
Пример #7
0
def omni_callback(joint_state):
    global update_pub, last_button_state
    sendTf(marker_tf, '/marker', fixed_frame)
    sendTf(zero_tf, '/base', fixed_frame)
    sendTf(marker_ref, '/marker_ref', fixed_frame)
    sendTf(stylus_ref, '/stylus_ref', fixed_frame)
        
    try:
        update = InteractionCursorUpdate()
        update.pose.header = std_msgs.msg.Header()
        update.pose.header.stamp = rospy.Time.now()
        update.pose.header.frame_id = 'marker_ref'
        if button_clicked and last_button_state == update.GRAB:
            update.button_state = update.KEEP_ALIVE
        elif button_clicked and last_button_state == update.KEEP_ALIVE:
            update.button_state = update.KEEP_ALIVE
        elif button_clicked:
            update.button_state = update.GRAB
        elif last_button_state == update.KEEP_ALIVE:
            update.button_state = update.RELEASE
        else:
            update.button_state = update.NONE
            updateRefs()
        update.key_event = 0

        control_tf = ((0, 0, 0), tf.transformations.quaternion_from_euler(0, 0, control_offset))
        if button_clicked:
            # Get pose corresponding to transform between stylus reference and current position.
            stylus_tf = listener.lookupTransform('/stylus_ref', '/stylus', rospy.Time(0))
            stylus_matrix = pm.toMatrix(pm.fromTf(stylus_tf))
            control_matrix = pm.toMatrix(pm.fromTf(control_tf))
            p = pm.toMsg(pm.fromMatrix(numpy.dot(control_matrix, stylus_matrix)))
        else:
            p = pm.toMsg(pm.fromTf(control_tf))
        
        # Simply scale this up a bit to increase the workspace.
        workspace = 4
        p.position.x = p.position.x * workspace
        p.position.y = p.position.y * workspace
        p.position.z = p.position.z * workspace

        update.pose.pose = p

        last_button_state = update.button_state

        # Publish feedback.
        update_pub.publish(update)
    except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
        rospy.logerr("Couldn't look up transform. These things happen...")
    def calculate_psmb_world_transform(self):
        self.trans_psmb_world_l = self.trans_ecmb_psmb_l.Inverse(
        ) * self.trans_ecmb_ecm * self.trans_camleft_ecm.Inverse(
        ) * self.trans_camleft_world
        self.trans_psmb_world_r = self.trans_ecmb_psmb_l.Inverse(
        ) * self.trans_ecmb_ecm * self.trans_camright_ecm.Inverse(
        ) * self.trans_camright_world

        z = {
            "T_from_left": pm.toMatrix(self.trans_psmb_world_l).tolist(),
            "T_from_right": pm.toMatrix(self.trans_psmb_world_r).tolist()
        }
        with open("./calib_parameters/z_estimation.json", "w") as f:
            z = json.dumps(z, indent=4)
            f.write(z)
def transfrom_between_arucos(camera2ai, camera2aj):
    '''
    计算arucos之间的转换
    换算成以 aruco i为参考坐标系 对应arucoj的姿态
    '''
    # 获取以相机为世界坐标系下的码j的位姿
    camera2aj_mat = pm.toMatrix(camera2aj)
    # 获取以码i做世界坐标系下的相机的位姿
    ai2camera_mat = pm.toMatrix(camera2ai.Inverse())
    # 计算aj在ai下的位姿 位姿传递
    ai2aj_mat = ai2camera_mat.dot(camera2aj_mat)
    # 生成码j在码i坐标系下的位姿 对应的KDL Frame对象
    ai2aj_frame = pm.fromMatrix(ai2aj_mat)
    # 返回对象
    return pm.toMsg(ai2aj_frame)
Пример #10
0
def processMarkerFeedback(feedback):
    global marker_tf, marker_name
    marker_name = feedback.marker_name
    marker_offset_tf = ((0, 0, marker_offset), tf.transformations.quaternion_from_euler(0, 0, 0))
    marker_tf = pm.toTf(pm.fromMatrix(numpy.dot(pm.toMatrix(pm.fromMsg(feedback.pose)), pm.toMatrix(pm.fromTf(marker_offset_tf)))))
    if feedback.menu_entry_id:
        marker_tf = zero_tf
Пример #11
0
def ground_area(cm, src, tf_x):
    """ Compute 4-corners of ground-plane from camera.

    Args:
        cm(image_geometry.PinholeCameraModel): camera model for image.
        src(np.ndarray): [4,2] array of four corners, formatted (u,v).
            Represents the area in the frame to compute the groundplane projection.
        tf_x(tuple): (txn,qxn) tuple for translation and rotation(quaternion).
            Refer to results from tf.TransformListener.lookupTransform()
    Returns:
        points(np.ndarray): [4,2] array of four corners, formatted (x,y)
            z value is implicitly zero w.r.t. the source tf frame.
    """

    txn, qxn = tf_x

    # K = 3x3 camera projection
    w, h = cm.width, cm.height
    M_x = pm.toMatrix(pm.fromTf(tf_x))

    cray = [cm.projectPixelTo3dRay(e)
            for e in src]  # [4,3] formatted (x,y,z) ??
    # convert ray to map coord
    mray = np.dot(cray, M_x[:3, :3].T)  # right-multiply rotation matrix

    # extend ray to groundplane
    l = -txn[2] / mray[:, 2]
    # need to flip z since mray is pointing downwards
    # i.e. same as mray[:,2].dot([0,0,-1]) which is the correct distance
    gray = l[:, np.newaxis] * mray
    return gray[:, :2] + np.reshape(txn[:2], [-1, 2])
Пример #12
0
    def step(self):
        if self.T_ is None:
            self.init_tf()
            return

        if self.recv_:
            self.recv_ = False
            pose = self.odom_.pose.pose
            T0 = pm.toMatrix(pm.fromMsg(pose))  # android_odom --> android

            # send odom -> base_link transform
            T = tf.transformations.concatenate_matrices(self.T_, T0, self.Ti_)
            frame = pm.fromMatrix(T)
            if self.pub_tf_:
                txn, qxn = pm.toTf(frame)
                self.tfb_.sendTransform(txn, qxn, self.odom_.header.stamp,
                                        'odom', 'base_link')

            # send as msg
            # TODO : does not deal with twist/covariance information
            msg = pm.toMsg(frame)
            self.cvt_msg_.pose.pose = pm.toMsg(frame)
            self.cvt_msg_.header.frame_id = 'map'  # experimental
            self.cvt_msg_.header.stamp = self.odom_.header.stamp
            self.pub_.publish(self.cvt_msg_)
Пример #13
0
	def simple_horiz_kvec_motion_psm2(self, O_needle, r_needle, kvec_yaw, gripper_affines_wrt_camera):
	    dphi = math.pi/40.0
	    bvec0 = Vector(1,0,0)
	    nvec = Vector(0,0,1)
	    bvec = self.Rotz(kvec_yaw)*bvec0
	    tvec = bvec*nvec
	    R0 = Rotation(nvec,tvec,bvec)
	    R0 = R0.Inverse()
	    tip_pos = O_needle - r_needle*tvec
	    self.affine_gripper_frame_wrt_camera_frame_.p = tip_pos
	    self.affine_gripper_frame_wrt_camera_frame_.M = R0
	    del gripper_affines_wrt_camera[:]
	    nsolns = 0
	    nphi = 0
	    print "nphi: "

	    for phi in range(0.0,-math.pi,-dphi):
	        R = self.Rot_k_phi(bvec, phi)*R0
	        self.affine_gripper_frame_wrt_camera_frame_.M=R
	        R_Frame = posemath.toMatrix(self.affine_gripper_frame_wrt_camera_frame_)
	        R_column2_numpy = R_Frame[0:3,1]
	        R_column2 = Vector(R_column2_numpy[0],R_column2_numpy[1],R_column2_numpy[2])
	        tip_pos = O_needle - r_needle*R_column2
	        self.affine_gripper_frame_wrt_camera_frame_.p = tip_pos
	        des_gripper1_wrt_base = self.default_affine_lcamera_to_psm_two_.Inverse()*self.affine_gripper_frame_wrt_camera_frame_
	        ''' ERDEM
	        if (ik_solver_.ik_solve(des_gripper1_wrt_base)) 
	        {  nsolns++;
	           cout<<nphi<<",";
	           #cout<<":  found IK; nsolns = "<<nsolns<<endl;
	           gripper_affines_wrt_camera.push_back(affine_gripper_frame_wrt_camera_frame_);
	        }'''
	        nphi += 1

	    print "\n"
Пример #14
0
 def setArmPosition(self, pos, preferedAngs=None):
     self.currentPos = np.array(pos)
     if not self.useOptim:
         angs = self.__oldIVK__(pos, preferedAngs)
     else:
         print(pm.toMatrix(pm.fromMsg(self.ur2ros(pos))))
         trj = trajectoryOptim(self.lastJointAngles, pm.toMatrix(pm.fromMsg(self.ur2ros(pos))))
         trj._numSteps = 1
         angs = trj.optimize()
         loc = angs[-1]
         angs = angs[0][0]
     self.lastJointAngles = angs
     self.__publish__(angs)
     self.currentPos[0:3] = loc[0][0:-1,3]
     return loc[0][0:-1,3]
     
Пример #15
0
    def _makePose(self, pose_msg):
        '''
        Read in a pose message and convert it to a standard data format. We
        use KDL Frames for fast operations down the line.

        If we previously set the `from_unity` flag to true, then we need to
        adjust our frames so that they make sense as well.

        Parameters:
        -----------
        pose_msg: a ROS pose message

        Returns:
        --------
        a KDL pose in right hand, z-up notation
        '''
        pose = pm.fromMsg(pose_msg)
        if self.from_unity:
            H = np.array([[0, 0, 1, 0], [-1, 0, 0, 0], [0, 1, 0, 0],
                          [0, 0, 0, 1]])
            x = pm.toMatrix(pose)
            x = H.dot(x)
            pose = pm.fromMatrix(x)

        return pose
def get_staubli_cartesian_as_tran():
    """@brief - Read the staubli end effector's cartesian position as a 4x4 numpy matrix

       Returns 4x4 numpy message on success, empty message on failure
    """
    current_pose = get_staubli_cartesian_as_pose_msg()
    return pm.toMatrix(pm.fromMsg(current_pose))
Пример #17
0
def get_staubli_cartesian_as_tran():
    """@brief - Read the staubli end effector's cartesian position as a 4x4 numpy matrix

       Returns 4x4 numpy message on success, empty message on failure
    """
    current_pose = get_staubli_cartesian_as_pose_msg()
    return pm.toMatrix(pm.fromMsg(current_pose))
 def align_pose(self, pose):
     objectInCamera = pm.toMatrix(pm.fromMsg(pose))
     ModelRecManager.tf_listener.waitForTransform(
         "/world", "/camera_rgb_optical_frame", rospy.Time(0),
         rospy.Duration(5))
     cameraInWorld = pm.toMatrix(
         pm.fromTf(
             ModelRecManager.tf_listener.lookupTransform(
                 "/world", "/camera_rgb_optical_frame", rospy.Time(0))))
     objectInWorld = dot(cameraInWorld, objectInCamera)
     """45 degrees rotated around z axis"""
     objectInWorld[0:3, 0:3] = mat([[0.7071, -0.7071, 0],
                                    [0.7071, 0.7071, 0], [0, 0, 1]])
     worldInCamera = linalg.inv(cameraInWorld)
     objectInCameraModified = dot(worldInCamera, objectInWorld)
     return pm.toMsg(pm.fromMatrix(objectInCameraModified))
Пример #19
0
    def imageProc(self, image):
        # Get current force
        force = self.currentForce
        force = np.linalg.norm(force)
        targetF = 4  # Newtons
        targetR = 1  # Newtons
        # Calculate color
        xp = [targetF - targetR, targetF, targetF + targetR]
        fp = [0, 1, 0]
        colorPos = np.interp(force, xp, fp)
        color = colorsys.hsv_to_rgb(colorPos**3 / 3, .8, 1)

        if self.drawType == "arrow":
            self.arrowActor.GetProperty().SetColor(color[0], color[1],
                                                   color[2])
            # Calculate pose of arrows
            initialRot = PyKDL.Frame(PyKDL.Rotation.RotY(np.pi / 2),
                                     PyKDL.Vector(0, 0, 0))
            pos = self.robot.get_current_position() * initialRot
            pos = self.cameraTransform.Inverse() * pos
            posMat = posemath.toMatrix(pos)
            posMatTarget = posMat.copy()
            # Scale arrows
            posMat[0:3, 0:3] = posMat[0:3, 0:3] * .025 * targetF
            setActorMatrix(self.targetActor, posMat)
            posMat[0:3, 0:3] = posMat[0:3, 0:3] * force / targetF
            setActorMatrix(self.arrowActor, posMat)

        elif self.drawType == "bar":
            self.forceBar.GetProperty().SetColor(color[0], color[1], color[2])
            # Move background bar
            pos = self.robot.get_current_position()
            pos = self.cameraTransform.Inverse() * pos
            pos2 = PyKDL.Frame(PyKDL.Rotation.Identity(), pos.p)
            pos2.M.DoRotZ(np.pi)
            pos2.p = pos2.p + pos2.M.UnitX() * -.015
            posMat = posemath.toMatrix(pos2)
            setActorMatrix(self.bar, posMat)
            setActorMatrix(self.greenLine, posMat)
            # Scale color bar
            fp2 = [0, .5, 1]
            scalePos = np.interp(force, xp, fp2)
            print scalePos
            posMat[1, 0:3] = posMat[1, 0:3] * scalePos
            setActorMatrix(self.forceBar, posMat)

        return image
Пример #20
0
    def getCartesianMove(self,
                         frame,
                         q0,
                         base_steps=1000,
                         steps_per_meter=1000,
                         time_multiplier=10):

        # interpolate between start and goal
        pose = pm.fromMatrix(self.kdl_kin.forward(q0))

        cur_rpy = np.array(pose.M.GetRPY())
        cur_xyz = np.array(pose.p)

        goal_rpy = np.array(frame.M.GetRPY())
        goal_xyz = np.array(frame.p)

        steps = base_steps + int((pose.p - frame.p).Norm() * steps_per_meter)
        print " -- Computing %f steps" % steps

        ts = (pose.p - frame.p).Norm() / steps * time_multiplier
        traj = JointTrajectory()
        traj.points.append(
            JointTrajectoryPoint(positions=q0,
                                 velocities=[0] * len(q0),
                                 accelerations=[0] * len(q0)))

        # compute IK
        for i in range(1, steps + 1):
            xyz = cur_xyz + ((float(i) / steps) * (goal_xyz - cur_xyz))
            rpy = cur_rpy + ((float(i) / steps) * (goal_rpy - cur_rpy))

            frame = pm.toMatrix(
                kdl.Frame(kdl.Rotation.RPY(rpy[0], rpy[1], rpy[2]),
                          kdl.Vector(xyz[0], xyz[1], xyz[2])))
            #q = self.kdl_kin.inverse(frame,q0)
            q = self.ik(frame, q0)

            if self.verbose:
                print "%d -- %s %s = %s" % (i, str(xyz), str(rpy), str(q))

            if not q is None:
                pt = JointTrajectoryPoint(positions=q,
                                          velocities=[0] * len(q),
                                          accelerations=[0] * len(q))
                pt.time_from_start = rospy.Duration(i * ts)
                traj.points.append(pt)
                q0 = q
            elif i == steps:
                return JointTrajectory()

        if len(traj.points) < base_steps:
            print rospy.logerr("Planning failure with " \
                    + str(len(traj.points)) \
                    + " / " + str(base_steps) \
                    + " points.")
            return JointTrajectory()

        traj.joint_names = self.joint_names
        return traj
    def calculate_ecmb_world_transform(self):
        #ECM matrix
        self.trans_ecmb_world_l = self.trans_ecmb_ecm * self.trans_camleft_ecm.Inverse(
        ) * self.trans_camleft_world
        self.trans_ecmb_world_r = self.trans_ecmb_ecm * self.trans_camright_ecm.Inverse(
        ) * self.trans_camright_world

        # print(self.trans_ecmb_world_l)
        # print(self.trans_ecmb_world_r)

        Y = {
            "T_from_left": pm.toMatrix(self.trans_ecmb_world_l).tolist(),
            "T_from_right": pm.toMatrix(self.trans_ecmb_world_r).tolist()
        }
        with open("./calib_parameters/y_estimation.json", "w") as f:
            Y = json.dumps(Y, indent=4)
            f.write(Y)
    def get_err_twist(self, goal_frame=None, dT=0.01):
        """ Get the desirable twist for given error.
        """
        #_cur_frame = self.get_current_frame()

        if self.btn_state_2 == 1:
            b_T_saw0 = pm.toMatrix(self.cur_frame)
            b_T_vr0 = pm.toMatrix(self._frame2)
            self.vr0_T_saw0 = np.matmul(inv(b_T_vr0),
                                        b_T_saw0)  # numpy 4x4 array
        # else:
        #      self.vr0_T_saw0 = np.identity(4)

        self._frame2 = pm.fromMatrix(
            np.matmul(self.vr0_T_saw0, pm.toMatrix(self._frame2)))

        return PyKDL.diff(self.cur_frame, self._frame2, dT)
	def compute_needle_drive_gripper_affines(self, gripper_affines_wrt_camera, gripper_affines_wrt_psm):
	    
	    phi_insertion_ = 0.0
	    self.affine_needle_frame_wrt_tissue_ = self.affine_init_needle_frame_wrt_tissue_

	    dphi = math.pi/(2.0*(NSAMPS_DRIVE_PLAN-1))
	    
	    self.R0_needle_wrt_tissue_ = self.affine_needle_frame_wrt_tissue_.M
	    self.affine_needle_frame_wrt_tissue_.M = self.Rotx(self.psi_needle_axis_tilt_wrt_tissue_)*self.R0_needle_wrt_tissue_
	    self.R0_needle_wrt_tissue_= self.affine_needle_frame_wrt_tissue_.M


	    kvec_needle_Frame = posemath.toMatrix(self.affine_needle_frame_wrt_tissue_)
	    kvec_needle_numpy = kvec_needle_Frame[0:3,2]
	    kvec_needle = Vector(kvec_needle_numpy[0],kvec_needle_numpy[1],kvec_needle_numpy[2])
	    if(debug_needle_print):
	    	print "kvec_needle=" , kvec_needle
	    if(debug_needle_print):
	    	print "R0 needle:"


	    needle_origin = self.affine_needle_frame_wrt_tissue_.p
	    self.affine_needle_frame_wrt_tissue_.p = self.Rotx(self.psi_needle_axis_tilt_wrt_tissue_)*needle_origin;
	    
	    if(debug_needle_print):
	    	print self.affine_needle_frame_wrt_tissue_.M


	    for ipose in range(0,NSAMPS_DRIVE_PLAN):
	        Rot_needle = self.Rot_k_phi(kvec_needle,phi_insertion_)
	        #R_needle_wrt_tissue_ = Roty_needle*R0_needle_wrt_tissue_; #update rotation of needle drive
	        self.R_needle_wrt_tissue_ = Rot_needle*self.R0_needle_wrt_tissue_ #update rotation of needle drive
	        
	        if(debug_needle_print):
	        	print "R_needle w/rt tissue:"
	        if(debug_needle_print):
	        	print self.R_needle_wrt_tissue_
	        #need to check these transforms...
	        self.affine_needle_frame_wrt_tissue_.M = self.R_needle_wrt_tissue_
	        self.affine_gripper_frame_wrt_tissue_ = self.affine_needle_frame_wrt_tissue_*self.affine_needle_frame_wrt_gripper_frame_.Inverse()
	        
	        if(debug_needle_print):
	        	print("affine_gripper_frame_wrt_tissue_")
	        if(debug_needle_print):
	        	print_affine(self.affine_gripper_frame_wrt_tissue_)
	        
	        self.affine_gripper_frame_wrt_camera_frame_ = self.affine_tissue_frame_wrt_camera_frame_*self.affine_gripper_frame_wrt_tissue_

	        if(debug_needle_print):
	        	print("affine_gripper_frame_wrt_camera_frame_")
	        	print_affine(self.affine_gripper_frame_wrt_camera_frame_)
	        
	        gripper_affines_wrt_camera.append(self.affine_gripper_frame_wrt_camera_frame_)
	        
	        self.affine_gripper_frame_wrt_psm_frame_ = self.default_affine_lcamera_to_psm_one_.Inverse()*self.affine_gripper_frame_wrt_camera_frame_
	        gripper_affines_wrt_psm.append(self.affine_gripper_frame_wrt_psm_frame_)

	        phi_insertion_+=dphi
    def calc_joint_vel_3(self):
        """ calc joint vel using PyKDL IK
        """
        with self.mutex:
            q_now = self.joints_to_kdl(type='positions', values=None)
            T_sd = self._get_goal_matrix() # 
            targ_fr = self.get_frame(T_sd)
        dT = self.get_dt()
        err_mat = np.dot(np.linalg.inv(pm.toMatrix(self.get_current_frame())), pm.toMatrix(targ_fr))

        # err_twist = self.to_kdl_twist(r.se3ToVec(r.MatrixLog6(err_mat)).tolist()) # cbModern robotics pp 230 22Nff
        # err_twist = self.get_err_twist(targ_fr, dT) # err term twist(Xd - X)
        err_twist = self.get_err_twist(targ_fr, 1.0) # err term twist(Xd - X)
        # err_twist = pm.fromMatrix(cur_fk) * _err_twist  
        # err_twist *= 0.0

        # des_twist = self.get_des_twist(dT) # feed-forward twist value Vd_dot
        des_twist = self.get_des_twist(dT) # feed-forward twist value Vd_dot
        # des_twist = pm.fromMatrix(cur_fk) * _des_twist        
        # des_twist = self.adj_to_twist(err_mat, _des_twist)
        # des_twist =self.to_kdl_twist(des_twist)
        # rospy.logwarn('=============== Twist error ===============')
        # print (err_twist)
        # rospy.logwarn('=============== Twist desired ===============')
        # print (des_twist, self.cur_wp_idx)
        integ_twist = self.integ_error(err_twist, dT)
        # rospy.logwarn('=============== Twist integrated ===============')
        # print (integ_twist)
        err_twist, integ_twist = self.apply_gain(err_twist, integ_twist)
        # total_twist = des_twist  + err_twist  + integ_twist # FF + Pg*Err + Ig*Integ(Err)
        total_twist = err_twist  + integ_twist # FF + Pg*Err + Ig*Integ(Err)
        # self.q_dot = self.kdl_inv_vel_kine(cur_joint_pos=q_now, ee_twist=total_twist)
        self.q_dot = self.kdl_inv_vel_kine(cur_joint_pos=q_now, ee_twist=total_twist)
        self.q_dot = self.scale_joint_vel(self.q_dot)
        # publish joint command 
        qdot_output = dict(zip(self.names, self.q_dot))
        # rospy.logwarn('=============== Computed joint vels ===============')
        # print (qdot_output)
        self.prev_frame = self.cur_frame # the call of this line seems crucial!!
        self.limb.set_joint_velocities(qdot_output)
        # rospy.logwarn('=============== Twist actual ===============')
        # print (self.ee_ang_twist + self.ee_lin_twist)
        
        self._check_traj(dT)
        self.last_time = self.current_time
 def __getstate__(self):
     state = {}
     state['model_name'] = self.model_name
     state['object_name'] = self.object_name
     buff = StringIO.StringIO()
     self.point_cloud_data.serialize(buff)
     buff.seek(0)
     state['point_cloud_data'] = buff.readlines()
     state['pose'] = pm.toMatrix(pm.fromMsg(self.pose))
     return state
Пример #26
0
 def __getstate__(self):
     state = {}
     state['model_name'] = self.model_name
     state['object_name'] = self.object_name
     buff = StringIO.StringIO()
     self.point_cloud_data.serialize(buff)
     buff.seek(0)
     state['point_cloud_data'] = buff.readlines()
     state['pose'] = pm.toMatrix(pm.fromMsg(self.pose))
     return state
Пример #27
0
    def process_grasp_msg(self, grasp_msg):
        """@brief - Attempt to make the adjustment specified by grasp_msg
        1. release the hand
        2. backup the hand
        3. plan a path to the a place 15cm back from the new pose
        4. use guarded motion to move forward
        """
        print 'adjustment:'
        print grasp_msg
        #release the object
        tp.open_barrett()

        #get the robot's current location and calculate the absolute tran after the adjustment
        #update the robot status
        tp.update_robot(self.global_data.or_env.GetRobots()[0])
        adjustInPalm = pm.toMatrix(pm.fromMsg(grasp_msg.final_grasp_pose))
        arm_goal = tp.convert_cartesian_relative_goal(self.global_data,
                                                      adjustInPalm)
        backup = numpy.matrix([[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, -.05],
                               [0, 0, 0, 1]])
        back_from_arm_goal = arm_goal * backup

        #move back by 10cm
        #move_forward(-.1)
        #raw_input("Press enter to go to the target pose...")

        #update the robot status
        tp.update_robot(self.global_data.or_env.GetRobots()[0])
        #just go to the new place without a OpenRave trajectory plan
        #adjustInPalm = pm.toMatrix(pm.fromMsg(grasp_msg.final_grasp_pose))
        #blocking motion set to be true
        #send_cartesian_relative_goal(self.global_data, adjustInPalm, True)
        send_cartesian_goal(back_from_arm_goal, True)
        raw_input("Press enter to use guarded motion to move forward...")

        #guarded move forward
        tp.MoveHandSrv(1, [0, 0, 0, grasp_msg.pre_grasp_dof[0]])
        GuardedMoveUntilHit(self.global_data, array([0, 0, 1]), 'PALM', 0.05,
                            20)
        raw_input("Press enter to close the hand...")

        #close the hand
        #tp.close_barrett()
        #tp.move_hand_velocity([0.5,0.5,0.5,0.0])
        GuardedCloseHand(self.global_data)

        selection = int(raw_input('Lift up (1) or not (0): '))
        if selection == 1:
            print 'lift up the object'
            success = tp.lift_arm(.1, True)
            if not success:
                grasp_status = graspit_msgs.GraspStatus.UNREACHABLE
                grasp_status_msg = "Couldn't lift object"
            else:
                print 'not lift up the object'
    def process_grasp_msg(self, grasp_msg):
        """@brief - Attempt to make the adjustment specified by grasp_msg
        1. release the hand
        2. backup the hand
        3. plan a path to the a place 15cm back from the new pose
        4. use guarded motion to move forward
        """
        print 'adjustment:'
        print grasp_msg
        #release the object
        tp.open_barrett()

        #get the robot's current location and calculate the absolute tran after the adjustment
        #update the robot status
        tp.update_robot(self.global_data.or_env.GetRobots()[0])
        adjustInPalm = pm.toMatrix(pm.fromMsg(grasp_msg.final_grasp_pose))
        arm_goal = tp.convert_cartesian_relative_goal(self.global_data, adjustInPalm)
        backup = numpy.matrix([[1,0,0,0],
                               [0,1,0,0],
                               [0,0,1,-.05],
                               [0,0,0,1]]);
        back_from_arm_goal = arm_goal * backup

        #move back by 10cm
        #move_forward(-.1)
        #raw_input("Press enter to go to the target pose...")

        #update the robot status
        tp.update_robot(self.global_data.or_env.GetRobots()[0])
        #just go to the new place without a OpenRave trajectory plan
        #adjustInPalm = pm.toMatrix(pm.fromMsg(grasp_msg.final_grasp_pose))
        #blocking motion set to be true
        #send_cartesian_relative_goal(self.global_data, adjustInPalm, True)
        send_cartesian_goal(back_from_arm_goal, True)
        raw_input("Press enter to use guarded motion to move forward...")

        #guarded move forward
        tp.MoveHandSrv(1, [0,0,0, grasp_msg.pre_grasp_dof[0]])
        GuardedMoveUntilHit(self.global_data, array([0,0,1]), 'PALM', 0.05, 20)
        raw_input("Press enter to close the hand...")

        #close the hand
        #tp.close_barrett()
        #tp.move_hand_velocity([0.5,0.5,0.5,0.0])
        GuardedCloseHand(self.global_data)
        
        selection = int(raw_input('Lift up (1) or not (0): '))
        if selection == 1:
            print 'lift up the object'
            success = tp.lift_arm(.1, True)
            if not success:
                grasp_status = graspit_msgs.GraspStatus.UNREACHABLE
                grasp_status_msg = "Couldn't lift object"
            else:
                print 'not lift up the object'
  def getposecb(self, a, b):
    print "Setting Pose based on world model"

    if((rospy.Time.now().to_sec() - b.pose.header.stamp.to_sec()) > 4.0):
      print "time now = ", rospy.Time.now().to_sec(), "time from pose = ", b.pose.header.stamp.to_sec()
      print "Time stamp of detection to old: diff=", (rospy.Time.now().to_sec() - b.pose.header.stamp.to_sec())
      if(self.detection_counter <= 4):
        self.detection_counter += 1
        return "preempted"
      else:
        self.detection_counter = 0
        return "aborted"

    self.config_over_object.header.stamp = rospy.Time.now()
    self.config_over_object.pose.position.x = b.pose.pose.position.x
    self.config_over_object.pose.position.y = b.pose.pose.position.y
    #ao = PyKDL.Rotation.Quaternion(self.config_over_object.pose.orientation.x, self.config_over_object.pose.orientation.y, self.config_over_object.pose.orientation.z, self.config_over_object.pose.orientation.w)
    #ao = PyKDL.Rotation.Quaternion(self.config_over_object.pose.orientation.x, self.config_over_object.pose.orientation.y, self.config_over_object.pose.orientation.z, self.config_over_object.pose.orientation.w)
    #bo = PyKDL.Rotation.Quaternion(b.pose.pose.orientation.x, b.pose.pose.orientation.y, b.pose.pose.orientation.z, b.pose.pose.orientation.w)
    #c = ao #* bo#.Inverse()
    m_manip = pm.toMatrix( pm.fromMsg(self.config_over_object.pose) )
    m_obj = pm.toMatrix( pm.fromMsg(b.pose.pose) )
    import numpy
    from tf.transformations import *
    m_ori = pm.toMatrix( pm.fromTf( ((0,0,0), quaternion_from_euler(0, 0, math.radians(-20.0))) ))
    box_pose = pm.toMsg( pm.fromMatrix(numpy.dot(m_manip,m_obj)) )
    m_test = numpy.dot(m_manip,m_obj)
    newrot = pm.toMsg( pm.fromMatrix(m_test))
    print newrot
    print self.config_over_object.pose
    #print c.GetQuaternion()
    self.config_over_object.pose.orientation = box_pose.orientation
    #self.config_over_object.pose.orientation.y = c.GetQuaternion()[1]
    #self.config_over_object.pose.orientation.z = c.GetQuaternion()[2]
    #self.config_over_object.pose.orientation.w = c.GetQuaternion()[3]
    #self.config_over_object.pose.position.z = 0.30#0.430

    self.config_object.header.stamp = rospy.Time.now()
    self.config_object.pose.position.x = b.pose.pose.position.x
    self.config_object.pose.position.y = b.pose.pose.position.y
    #self.config_object.pose.position.z = 0.2 #0.095
    self.config_object.pose.orientation = box_pose.orientation
Пример #30
0
 def pose_callback(self, pose):
     if self.T_cam2base is None:
         try:
             tfpose = self.tf_listener.lookupTransform('/camera', '/base_link', rospy.Time(0))
         except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
             rospy.loginfo("failed to find tf between base_link and camera")
             return
         self.T_cam2base = pm.toMatrix(pm.fromTf(tfpose))
         self.t_cam2base = self.T_cam2base[0:3, 3]
         self.R_cam2base = self.T_cam2base[0:3, 0:3].dot(ez.eulerZYX(0.05, -0.13, 0))
     self.compute_pose_label(self.label)
Пример #31
0
 def test_compute_twist(self):
     for _ in range(100):
         T0 = br.transform.random()
         vel = np.random.rand(3)
         rot = np.random.rand(3)
         kdl_twist = PyKDL.Twist(PyKDL.Vector(*vel), PyKDL.Vector(*rot))
         F0 = posemath.fromMatrix(T0)
         F1 = PyKDL.addDelta(F0, kdl_twist)
         T1 = posemath.toMatrix(F1)
         twist = ru.transforms.compute_twist(F0, F1)
         np.testing.assert_allclose(twist, np.hstack((vel, rot)))
Пример #32
0
    def update(self, dt=0.0):

        # initial pose
        T0 = pm.toMatrix(pm.fromMsg(self._odom.pose.pose))
        dx = self._cmd_vel.linear
        dq = self._cmd_vel.angular

        dT = tx.compose_matrix(angles=(dq.x * dt, dq.y * dt, dq.z * dt),
                               translate=(dx.x * dt, dx.y * dt, dx.z * dt))
        T1 = tx.concatenate_matrices(T0, dT)
        self._odom.pose.pose = pm.toMsg(pm.fromMatrix(T1))
Пример #33
0
    def test_roundtrip(self):
        c = Frame()

        d = pm.fromMsg(pm.toMsg(c))
        self.assertEqual(repr(c), repr(d))

        d = pm.fromMatrix(pm.toMatrix(c))
        self.assertEqual(repr(c), repr(d))

        d = pm.fromTf(pm.toTf(c))
        self.assertEqual(repr(c), repr(d))
Пример #34
0
def Rel2Globalfile(relfname, globalfname):
    pose_array = PoseArray()
    Rel2Global(relfname, pose_array)
    with open(globalfname, "w") as relfile:
        for pose in pose_array.poses:
            outarray = posemath.toMatrix(posemath.fromMsg(pose))
            outonedim = outarray[0:3, :].reshape((12))
            for index in range(11):
                relfile.write("{} ".format(outonedim[index]))
            relfile.write("{}\n".format(outonedim[11]))
        relfile.close()
Пример #35
0
    def test_roundtrip(self):
        c = Frame()

        d = pm.fromMsg(pm.toMsg(c))
        self.assertEqual(repr(c), repr(d))

        d = pm.fromMatrix(pm.toMatrix(c))
        self.assertEqual(repr(c), repr(d))

        d = pm.fromTf(pm.toTf(c))
        self.assertEqual(repr(c), repr(d))
Пример #36
0
    def ik(self, pose, q0):
        '''
        The ik() command is used by various agents and problem domains
        to recover a command vector that will move the robot arm to the right
        pose.

        Params:
        --------
        pose: kdl frame to move to
        q0: current joint position
        '''
        return self.kinematics.inverse(pm.toMatrix(pose), q0)
    def get_des_twist(self, dT=0.01):
        """ Get desired twist @ time t, by computing differential of traj_frame @ t=T-1 and t=T
        """
        #_cur_frame = self.get_current_frame()
        #return PyKDL.diff(self.prev_frame, _cur_frame, dT)
        #
        #
        if self.btn_state_2 == 1:
            b_T_saw0 = pm.toMatrix(self.cur_frame)
            b_T_vr0 = pm.toMatrix(self._frame2)
            self.vr0_T_saw0 = np.matmul(inv(b_T_vr0),
                                        b_T_saw0)  # numpy 4x4 array
        # else:
        #     self.vr0_T_saw0 = np.identity(4)

        self._frame2 = pm.fromMatrix(
            np.matmul(self.vr0_T_saw0, pm.toMatrix(self._frame2)))
        self._frame1 = pm.fromMatrix(
            np.matmul(self.vr0_T_saw0, pm.toMatrix(self._frame1)))

        return PyKDL.diff(self._frame1, self._frame2, dT)
def pregrasp_object(global_data,
                    object_name,
                    adjust_height,
                    grasp_tran,
                    object_height_col=2,
                    pregrasp_dist=-.05,
                    object_height_adjustment=0.0,
                    run_it=True):
    """@brief - Move the hand to the pregrasp position. This procedure is suprisingly intricate.

    Given the object filename and the grasp transform, this transforms the grasp to world coordinates
    then runs a planner to avoid collisions in moving the arm the long distance to the
    object. This uses the scene graph, which is currently provided by a camera.

    Returns whether the pregrasp location was reachable, the filename in which the trajectory from OpenRave
    is stored, and the list of the dof values that are contained in the trajectory.
    """
    """ Sets up object location in openrave -- this code is all to compensate for the current object localization
    method only knowing the location of the base of the object - the height of the object's origin from the base
    must be used to adjust it's location in openrave before planning. 
    """

    print "Pregrasping object: %s" % (object_name)

    #Get the object transform in the world.
    obj_tran = pm.toMatrix(
        pm.fromTf(
            global_data.listener.lookupTransform("/world", object_name,
                                                 rospy.Time(0))))

    if (obj_tran == []):
        warn("Scene graph in TF is broken! Most\
           likely, the object transform could not be found.")
        return [], [], [], []
    """Set up and visualize the grasp transforms"""

    pregrasp_tran = get_pregrasp_tran_from_tran(grasp_tran, pregrasp_dist)
    publish_grasp_tran(pregrasp_tran)
    final_tran = dot(obj_tran, pregrasp_tran)
    publish_grasp_tran(final_tran)

    success = False
    """Run the planner"""
    plan = []
    j = []
    try:
        success, trajectory_filename, dof_list, j = run_cbirrt_with_tran(
            global_data.or_env, final_tran)

    except Exception, e:
        warn("Failed running planner with\
                       error %s" % e)
Пример #39
0
 def analyze_demonstration_pose(self, demo_grasp):
     if(len(self.data) ==0):
         return 1.0
     
     demo_pose = pm.toMatrix(pm.fromMsg(demo_grasp.final_grasp_pose))
     demo_position = demo_pose[:3,3]
     
     distances, indices = self.model.kneighbors(demo_position)
     indices = unique(indices)
     nbrs = [t for t in itertools.compress(self.data, indices)]
     valid_nbrs = []
     for n in nbrs:
         pose = pm.toMatrix(pm.fromMsg(n[0].final_grasp_pose))
         if (acos(dot(pose[:3,2],demo_pose[:3,2]) < .52)):
             valid_nbrs.append(n)
     
     if len(valid_nbrs):
         success_probability = len([n for n in valid_nbrs if n[1] & 1])/(1.0*len(valid_nbrs))            
     else:
         success_probability = 0
     self.demo_pose_analysis_publisher.publish(success_probability)
     return success_probability
Пример #40
0
def get_camera_pose(aruco_msg):
    '''
    根据aruco的Message,与aruco地图(aruco_map)
    推算出aruco码相对于世界坐标系的位置跟姿态
    '''
    global aruco_map

    # 从姿态信息 构建 PyKDL Frame对象
    aruco2camera = pm.fromMsg(aruco_msg.pose.pose)
    # 调整姿态 绕X轴逆向旋转90度 弧度
    aruco2camera.M.DoRotX(-math.pi / 2)
    aruco2camera.M.DoRotZ(math.pi)  # 校正姿态 Z轴旋转180度
    # 矩阵取反,改为相机相对于aruco码的相对位置
    camera2aruco = aruco2camera.Inverse()
    # 获取aruco相对于世界坐标系的矩阵

    aruco2world_mat = pm.toMatrix(pm.fromMsg(aruco_map[aruco_msg.id]))
    # 矩阵相乘,获取相机相对于世界坐标系的变换矩阵
    camera2world_mat = aruco2world_mat.dot(pm.toMatrix(camera2aruco))
    # 将矩阵转换为姿态Message
    camera_pose_msg = pm.toMsg(pm.fromMatrix(camera2world_mat))
    return camera_pose_msg
    def analyze_demonstration_pose(self, demo_grasp):
        if (len(self.data) == 0):
            return 1.0

        demo_pose = pm.toMatrix(pm.fromMsg(demo_grasp.final_grasp_pose))
        demo_position = demo_pose[:3, 3]

        distances, indices = self.model.kneighbors(demo_position)
        indices = unique(indices)
        nbrs = [t for t in itertools.compress(self.data, indices)]
        valid_nbrs = []
        for n in nbrs:
            pose = pm.toMatrix(pm.fromMsg(n[0].final_grasp_pose))
            if (acos(dot(pose[:3, 2], demo_pose[:3, 2]) < .52)):
                valid_nbrs.append(n)

        if len(valid_nbrs):
            success_probability = len([n for n in valid_nbrs if n[1] & 1
                                       ]) / (1.0 * len(valid_nbrs))
        else:
            success_probability = 0
        self.demo_pose_analysis_publisher.publish(success_probability)
        return success_probability
    def process_grasp_msg(self, grasp_msg):
        """@brief - Attempt to make the adjustment specified by grasp_msg
        1. plan a path to the a place 15cm back from the new pose
        """
        print 'regular move:'
        print grasp_msg
        #release the object
        tp.open_barrett()

        #get the robot's current location and calculate the absolute tran after the adjustment
        #update the robot status
        tp.update_robot(self.global_data.or_env.GetRobots()[0])
        handGoalInBase = pm.toMatrix(pm.fromMsg(grasp_msg.final_grasp_pose))

        try:
            hand_tf = pm.toTf(pm.fromMatrix(handGoalInBase))
            bc = tf.TransformBroadcaster()
            now = rospy.Time.now()
            bc.sendTransform(hand_tf[0], hand_tf[1], now, "hand_goal", "armbase")
            self.global_data.listener.waitForTransform("armbase", "arm_goal", now, rospy.Duration(1.0))
            armtip_robot = self.global_data.listener.lookupTransform('armbase', 'arm_goal', now)
            armGoalInBase = pm.toMatrix(pm.fromTf(armtip_robot))
        except Exception, e:
            handle_fatal('convert_cartesian_world_goal failed: error %s.'%e) 
Пример #43
0
def goto(kdl_kin, pub, listener, trans, rot): 

  try:
    T = pm.fromTf((trans, rot))

    q0 = [-1.0719114121799995, -1.1008140645600006, 1.7366724169200003,
            -0.8972388608399999, 1.25538042294, -0.028902652380000227,]

    # DEFAULT
    objt, objr = ((0.470635159016, 0.0047549889423, -0.428045094013),(0,0,0,1))
    T_orig = pm.fromTf((objt,objr))
    # MOVEd
    objt, objr = ((0.52, 0.00, -0.43),(0,0,0,1))
    T_new = pm.fromTf((objt,objr))

    T_pose = pm.toMatrix(T)
    Q = kdl_kin.inverse(T_pose, q0)

    print "----------------------------"
    print "[ORIG] Closest joints =", Q

    msg = JointState(name=CONFIG['joints'], position=Q)
    pub.publish(msg)
    rospy.sleep(0.2)

    T_goal = T_orig.Inverse() * T
    T2 = T_new * T_goal
    T2_pose = pm.toMatrix(T2)
    Q = kdl_kin.inverse(T2_pose, q0)
    print "[NEW] Closest joints =", Q
    msg = JointState(name=CONFIG['joints'], position=Q)
    pub.publish(msg)
    rospy.sleep(0.2)

  except  (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException), e:
    pass
    def model_state_publisher(self):
        #############
        targetFrame = PyKDL.Frame(PyKDL.Rotation.Quaternion(self.target.pose.orientation.x,self.target.pose.orientation.y,self.target.pose.orientation.z,self.target.pose.orientation.w), PyKDL.Vector(self.target.pose.position.x,self.target.pose.position.y,self.target.pose.position.z))

        targetFrameMatrix = pm.toMatrix(targetFrame)

        sourceFrame = PyKDL.Frame(PyKDL.Rotation.Quaternion(self.source.pose.orientation.x,self.source.pose.orientation.y,self.source.pose.orientation.z,self.source.pose.orientation.w),PyKDL.Vector(self.source.pose.position.x,self.source.pose.position.y,self.source.pose.position.z))
        
        pose = pm.toMsg(pm.fromMatrix(linalg.inv(targetFrameMatrix))*sourceFrame)
        
        poseStamped = PoseStamped()
        poseStamped.pose = pose
        poseStamped.header.stamp = rospy.get_rostime()
        poseStamped.header.frame_id = '/'+self.targetLinkName[5:len(self.targetLinkName)] # This strips 'pr2::' from the frame_id string
        self.wPub.publish(poseStamped)
Пример #45
0
def pregrasp_object(global_data, object_name, adjust_height, grasp_tran, object_height_col = 2,
                    pregrasp_dist = -.05, object_height_adjustment = 0.0, run_it = True):
    """@brief - Move the hand to the pregrasp position. This procedure is suprisingly intricate.

    Given the object filename and the grasp transform, this transforms the grasp to world coordinates
    then runs a planner to avoid collisions in moving the arm the long distance to the
    object. This uses the scene graph, which is currently provided by a camera.

    Returns whether the pregrasp location was reachable, the filename in which the trajectory from OpenRave
    is stored, and the list of the dof values that are contained in the trajectory.
    """


    """ Sets up object location in openrave -- this code is all to compensate for the current object localization
    method only knowing the location of the base of the object - the height of the object's origin from the base
    must be used to adjust it's location in openrave before planning. 
    """

    print "Pregrasping object: %s"%(object_name)


    #Get the object transform in the world. 
    obj_tran = pm.toMatrix(pm.fromTf(global_data.listener.lookupTransform("/world",object_name, rospy.Time(0))))

    if (obj_tran == []):
        warn("Scene graph in TF is broken! Most\
           likely, the object transform could not be found.")
        return [],[], [], []
                

    """Set up and visualize the grasp transforms"""

    pregrasp_tran = get_pregrasp_tran_from_tran(grasp_tran, pregrasp_dist)
    publish_grasp_tran(pregrasp_tran)
    final_tran = dot(obj_tran, pregrasp_tran)
    publish_grasp_tran(final_tran)

    success = False
    """Run the planner"""
    plan = []
    j = []
    try:
        success, trajectory_filename, dof_list, j = run_cbirrt_with_tran( global_data.or_env, final_tran )

    except Exception,e:
        warn("Failed running planner with\
                       error %s"%e)
def release_gently(global_data, max_dist, ignore_warning=0):
    """ @brief - Gently place the object on the table by to at most the maximum distance
        and dropping the object
        
        @param global_data - the data structure storing the openrave environment and other global
                             parameters
        @param max_dist - The highest above the table we allow the arm to be before releasing

        TODO: Test and debug this function!!!!
    """
    """ This function is not ready yet!"""
    rospy.log_warn(
        "release gently function called, but it is not done being developed yet! "
    )
    if not ignore_warning:
        return

    success = True
    reason = "succeeded releasing"
    #find hand position with respect to world
    try:
        hand_in_world = pm.toMatrix(
            pm.fromTf(
                listener.lookupTransform('/hand', '/world', rospy.Time(0))))
    except:
        success = False
        reason = "failed to find hand in world"
    #Find distance above table and move hand down by that distance
    if success:
        try:
            move_dist = hand_in_world[2, 3] - max_dist
            if move_dist > 0:
                lift_arm_to_contact(-move_dist)
        except:
            success = False
            reason = "failed to move arm down"
    #open the hand to drop the object
    if success:
        try:
            success, reason, position = open_barrett()
        except:
            success = False
            reason = "open barrett command threw exception"

    return success, reason
Пример #47
0
def release_gently(global_data, max_dist, ignore_warning = 0):
    """ @brief - Gently place the object on the table by to at most the maximum distance
        and dropping the object
        
        @param global_data - the data structure storing the openrave environment and other global
                             parameters
        @param max_dist - The highest above the table we allow the arm to be before releasing

        TODO: Test and debug this function!!!!
    """

    """ This function is not ready yet!"""
    rospy.log_warn("release gently function called, but it is not done being developed yet! ")
    if not ignore_warning:
        return

    success = True
    reason = "succeeded releasing"
    #find hand position with respect to world
    try:
        hand_in_world = pm.toMatrix(pm.fromTf(
            listener.lookupTransform('/hand','/world', rospy.Time(0))))
    except:
        success = False
        reason = "failed to find hand in world"
    #Find distance above table and move hand down by that distance
    if success:
        try:
            move_dist = hand_in_world[2,3] - max_dist
            if move_dist > 0:            
                lift_arm_to_contact(-move_dist)
        except:
            success = False
            reason = "failed to move arm down"
    #open the hand to drop the object
    if success:
        try:
            success, reason, position = open_barrett()
        except:
            success = False
            reason = "open barrett command threw exception"
            
    return success, reason
Пример #48
0
def convert_cartesian_world_goal(global_data, world_tran):
    """@brief - Get armtip goal pose in the arms coordinate system from hand world goal pose.
    This is useful to convert between a hand goal in the world coordinate system to a cartesian goal that
    can be sent directly to the staubli controller. 
    
    @param world_tf - tf of hand goal pose in world coordinates
    """
    try:    
        world_tf = pm.toTf(pm.fromMatrix(world_tran))
        bc = tf.TransformBroadcaster()
        now = rospy.Time.now()
        bc.sendTransform(world_tf[0], world_tf[1], now, "hand_goal", "world")
        #listener = tf.TransformListener()
        global_data.listener.waitForTransform("armbase", "arm_goal", now, rospy.Duration(1.0))
        armtip_robot = global_data.listener.lookupTransform('armbase', 'arm_goal', now)
        
        return pm.toMatrix(pm.fromTf(armtip_robot))
    except Exception, e:
        handle_fatal('convert_cartesian_world_goal failed: error %s.'%e) 
Пример #49
0
def convert_cartesian_relative_goal(global_data, move_tran):
    """@brief - Convert a position relative to the hand's current coordinate system to the robot's base coordinate system.
    @param move_tran - the relative position in the hand's coordinate system. 

    """
    try:
        move_tf = pm.toTf(pm.fromMatrix(move_tran))
        print move_tf
        bc = tf.TransformBroadcaster()
        now = rospy.Time.now()
        bc.sendTransform(move_tf[0], move_tf[1], now, "hand_goal", "hand")
        #listener = tf.TransformListener()
        global_data.listener.waitForTransform("armbase", "arm_goal", now, rospy.Duration(1.0))
        armtip_robot = global_data.listener.lookupTransform('armbase','arm_goal', now)

        #arm_robot = global_data.listener.lookupTransform('armbase', 'arm', now)
        return pm.toMatrix(pm.fromTf(armtip_robot))
    except Exception, e:
        handle_fatal('convert_cartesian_relative_goal failed: error %s.'%e)
    def adjust(self, adjust_msg, close_hand = True):
        """
        adjust_msg specifies the adjustment in palm's local coordinate system
        """
        print 'adjust:'
        print adjust_msg
        #release the object
        tp.open_barrett()
        #tp.move_hand_percentage(0.8)

        #get the robot's current location and calculate the absolute tran after the adjustment
        #update the robot status
        tp.update_robot(self.global_data.or_env.GetRobots()[0])
        adjustInPalm = pm.toMatrix(pm.fromMsg(adjust_msg.offset))
        arm_goal = tp.convert_cartesian_relative_goal(self.global_data, adjustInPalm)
        backup = numpy.matrix([[1,0,0,0],
                               [0,1,0,0],
                               [0,0,1,-.05],
                               [0,0,0,1]]);
        back_from_arm_goal = arm_goal * backup

        #update the robot status
        tp.update_robot(self.global_data.or_env.GetRobots()[0])
        #just go to the new place without a OpenRave trajectory plan
        #adjustInPalm = pm.toMatrix(pm.fromMsg(grasp_msg.final_grasp_pose))
        #blocking motion set to be true
        #send_cartesian_relative_goal(self.global_data, adjustInPalm, True)
        #send_cartesian_goal(back_from_arm_goal, True)
        send_cartesian_goal(arm_goal, True)
        #raw_input("Press enter to use guarded motion to move forward...")

        #guarded move forward
        #GuardedMoveUntilHit(self.global_data, array([0,0,1]), 'PALM', 0.05, 20)
        #raw_input("Press enter to close the hand...")

        #close the hand
        #tp.close_barrett()

        #gentlly close the fingers
        if close_hand:
            hu.GuardedCloseHand(self.global_data)
            tp.move_hand_velocity([0.1,0.1,0.1,0])
    def call_planner(self, stage):
        self.status.planning_status = stage
        print(stage)

        if stage == ObjectStatus.PREGRASP:
            
            target_pose = self.status.pose_stamped.pose
            
            pregrasp_distance = .1

            #The marker pose is defined in the torso coordinate system
            target_tran = pm.toMatrix(pm.fromMsg(target_pose))
            hand_tran = target_tran.copy()


            #Align hand perpendicular to cylinder in its canonical pose
            hand_tran[:3,1] = target_tran[:3,2]
            hand_tran[:3,2] = target_tran[:3,0]
            hand_tran[:3,0] = target_tran[:3,1]
            pregrasp_tran = hand_tran.copy()

            #The approach direction of this gripper is -y for some reason
            pregrasp_tran[:3,3] = pregrasp_tran[:3,3] + pregrasp_tran[:3,1]*pregrasp_distance
            hand_tran_pose = pm.toMsg(pm.fromMatrix(hand_tran))
            pregrasp_pose = pm.toMsg(pm.fromMatrix(pregrasp_tran))


            req = PosePlanningSrv._request_class()
            req.TargetPoses = [pregrasp_pose, hand_tran_pose]
            req.ManipulatorID = int(self.status.hands == self.status.RIGHT)
            res = None
            try:
                print(req)
                res = self.pregrasp_planner_client.call(req)
                self.status.operating_status = self.status.PLANNING
                #print res
                self.last_plan = res.PlannedTrajectory
            except:
                res = None
                rospy.loginfo("Planner failed to pregrasp")
                         
                self.status.operating_status = self.status.ERROR
Пример #52
0
	def Rot_k_phi(self, k_vec, phi): #Eigen::Vector3d k_vec,float phi
	    #Eigen::Matrix3d R_k_phi;
	    kx = k_vec[0]
	    ky = k_vec[1]
	    kz = k_vec[2]
	    N = 3
	    K_nrow = Vector(0.0, -kz, ky)
	    K_trow = Vector(kz, 0.0, -kx)
	    K_brow = Vector(-ky, kx, 0.0)

	    K = Rotation(K_nrow,K_trow,K_brow)
	    I = Rotation()
	    K_Frame = Frame(K)
	    K_Frame_numpy = posemath.toMatrix(K_Frame)
	    K_numpy = K_Frame_numpy[0:3,0:3]
	    I_numpy = np.identity(N)
	    R_k_phi_numpy = I_numpy + math.sin(phi)*K_numpy + (1-math.cos(phi))*K_numpy*K_numpy
	    R_k_phi_numpy_FrameTemp = np.c_[R_k_phi_numpy, np.ones(N)]
	    R_k_phi_numpy_Frame = np.r_[R_k_phi_numpy_FrameTemp,[R_k_phi_numpy_FrameTemp[1]]]
	    R_k_phi_Frame = posemath.fromMatrix(R_k_phi_numpy_Frame)
	    R_k_phi = R_k_phi_Frame.M
	    return R_k_phi
    def calib_position(self):
        '''
        Calibrate the suction cup tip location. Pose it as a linear algebra problem
        @return:
        '''

        A = np.zeros((self.num_pos_measurements()*3, 6))
        b = np.zeros((self.num_pos_measurements()*3, 1))

        n = 0

        for measurement in self._pos_measurements:
            T = posemath.toMatrix(measurement)
            R = T[0:3, 0:3]
            p = T[0:3, 3]

            A[3*n:3*n+3, 0:3] = R
            A[3*n:3*n+3, 3:6] = -np.identity(3)

            b[3*n:3*n+3, 0] = -p

            n += 1

        self._A = A
        self._b = b

        sol = np.linalg.lstsq(A, b)
        x = sol[0]

        # pose of tool in wrist frame
        self._F_wrist_tool.p = kdl.Vector(x[0], x[1], x[2])

        # position of tool in robot base frame
        self._F_base_tool.p = kdl.Vector(x[3], x[4], x[5])

        return sol
Пример #54
0
    def get_best_distance(self, T, T_fwd, q0, check_closest_only = False, obj_name = None):
        quaternion_dot = np.dot(np.array(T_fwd.M.GetQuaternion()),np.array(T.M.GetQuaternion()))
        delta_rotation = np.arccos(2 * quaternion_dot ** 2 - 1)

        q_new = list()

        if self.closed_form_IK_solver == None or check_closest_only:
            q_new.append(self.ik(pm.toMatrix(T),self.q0))
        else:
            q_new = self.closed_form_IK_solver.solveIK(pm.toMatrix(T))

        if q_new is not None and len(q_new) > 0:
            message_print = ''
            message_print_invalid = ''
            ik_joint_solution_validity = ''
            best_dist = float('inf')
            best_invalid = float('inf')
            best_q = None
            best_q_invalid = None

            for q_i in q_new:
                if q_i is None:
                    continue
                dq = np.absolute(q_i - self.q0) * self.joint_weights
                combined_distance = (T.p - T_fwd.p).Norm() * self.translation_weight + \
                      self.rotation_weight * delta_rotation + \
                      self.joint_space_weight * np.sum(dq)

                # rospy.loginfo(str(q_i))
                result = self.check_robot_position_validity(q_i.tolist())

                other_obj_collisions = ''
                if obj_name is not None and not result.valid:
                    other_obj_collision = False
                    contacts = result.contacts
                    for collision in contacts:
                        if collision.contact_body_1 != obj_name and collision.contact_body_2 != obj_name:
                            other_obj_collision = True
                            break
                    # if not other_obj_collision:
                    #     rospy.logwarn('[Query] State Pose is actually valid')
                    #     rospy.logwarn(str(contacts))
                    result.valid = not other_obj_collision
                # ik_joint_solution_validity += '%s ' % result.valid

                if result.valid and combined_distance < best_dist:
                    best_q = q_i
                    best_dist = combined_distance
                    message_print = 'valid %s pose, translation: %f, rotation: %f, dq6: %f, dist: %f'  % \
                     (obj_name ,(T.p - T_fwd.p).Norm(),self.rotation_weight * delta_rotation, dq[-1], combined_distance )

                elif not result.valid and combined_distance < best_invalid:
                    best_q_invalid = q_i
                    best_invalid = combined_distance
                    message_print_invalid = 'invalid %s pose, translation: %f, rotation: %f, dq6: %f, dist: %f'  % \
                        (obj_name, (T.p - T_fwd.p).Norm(),self.rotation_weight * delta_rotation, dq[-1], combined_distance )
                    message_print_invalid += '\nCollisions found between: '
                    for contact_info in result.contacts:
                        message_print_invalid += '(%s and %s) '%(contact_info.contact_body_1,contact_info.contact_body_2)

            if (best_q is not None) or (best_q_invalid is not None):
                valid_pose = (best_q is not None)
                if not valid_pose:
                    best_q = best_q_invalid
                return valid_pose, best_dist, best_invalid, message_print, message_print_invalid, best_q

        return False, float('inf'), float('inf'), '%s: No valid IK solution'%obj_name,'%s: No valid IK solution'%obj_name, list()
Пример #55
0
	def write_psm2_needle_drive_affines_to_file(self, psm2_gripper_affines_wrt_camera):


		nposes = len(psm2_gripper_affines_wrt_camera)
		if (nposes<1):
			rospy.logwarn("NO POSES TO SAVE")
			return

		str1 = "saving computer %d gripper poses w/rt camera"%nposes
		rospy.loginfo(str1)



		affine_gripper2_frame_wrt_camera_last = psm2_gripper_affines_wrt_camera[nposes-1]
		affine_needle_frame_wrt_camera_last = affine_gripper2_frame_wrt_camera_last*self.affine_needle_frame_wrt_gripper_frame_

		affine_needle_frame_wrt_camera_last_numpy = posemath.toMatrix(affine_needle_frame_wrt_camera_last)
		affine_needle_frame_wrt_camera_last_numpy_R = affine_needle_frame_wrt_camera_last_numpy[0:3,0:3] 
		
		'''BELOW do rest of the calculations with numpy array to prevent data type mismatch
		and convert to PyKDL at the end'''

		nvec_needle = affine_needle_frame_wrt_camera_last_numpy_R[0:,0]
		bvec_needle = affine_needle_frame_wrt_camera_last_numpy_R[0:,2]
		tvec_needle = affine_needle_frame_wrt_camera_last_numpy_R[0:,1]

		origin_needle = affine_needle_frame_wrt_camera_last.p
		origin_needle_numpy = np.array([origin_needle.x(),origin_needle.y(),origin_needle.z()])
		tip_of_needle_numpy = origin_needle_numpy + self.needle_radius_*nvec_needle
		
		gripper2_grasp_origin_numpy = tip_of_needle_numpy + self.grasp_depth_*bvec_needle
		gripper2_pre_grasp_origin_numpy = gripper2_grasp_origin_numpy - 0.03*bvec_needle
		gripper2_out_of_way_numpy = np.array([0.14, -0.03, 0.07])

		N = 3
		'''
		R2_diag_numpy = np.identity(N)
		R2_diag_numpy[2,2] = -1
		R2_diag_numpy[1,1] = -1

		print "R2_diag:"
		print R2_diag_numpy

		R2_diag_numpy_FrameTemp = np.c_[R2_diag_numpy, np.ones(N)]
		R2_diag_numpy_Frame = np.r_[R2_diag_numpy_FrameTemp,[R2_diag_numpy_FrameTemp[1]]]
		R2_diag_Frame = posemath.fromMatrix(R2_diag_numpy_Frame)
		
		#BELOW numpy ends here - switch back to PyKDL BELOW

		R2_diag = R2_diag_Frame.M
		gripper2_out_of_way = Vector(gripper2_out_of_way_numpy[0],gripper2_out_of_way_numpy[1],gripper2_out_of_way_numpy[2])
		psm2_nom_wrt_base2 = Frame(R2_diag,gripper2_out_of_way)


		print "affine_lcamera_to_psm_two_: "
		print self.default_affine_lcamera_to_psm_two_.M
		print "origin: "
		print self.default_affine_lcamera_to_psm_one_.p

		psm2_nom_wrt_camera = self.default_affine_lcamera_to_psm_two_*psm2_nom_wrt_base2

		print "psm2_nom_wrt_camera: "
		print psm2_nom_wrt_camera.M
		print "origin: "
		print psm2_nom_wrt_camera.p

		#BELOW switch back to numpy BELOW
		
		psm2_nom_wrt_camera_numpy = posemath.toMatrix(psm2_nom_wrt_camera)
		psm2_nom_wrt_camera_numpy_R = psm2_nom_wrt_camera_numpy[0:3,0:3] 
		'''
		tvec_gripper2 = nvec_needle
		bvec_gripper2 = bvec_needle
		nvec_gripper2 = np.cross(tvec_gripper2,bvec_gripper2)

		Rtr = np.array([nvec_gripper2, tvec_gripper2, bvec_gripper2])
		print Rtr
		R = np.transpose(Rtr)
		print R

		nvec2 = np.array([1,0,0])
		bvec2 = np.array([0,0,1])

		t = 4
		dt = 1

		outfile = open('psm2_gripper_poses_in_camera_coords.csp','w')

		for i in range(nposes):
			psm2_gripper_affines_wrt_camera_this = psm2_gripper_affines_wrt_camera[i]
			#Origin = gripper_affines_wrt_camera_this.p
			#R = gripper_affines_wrt_camera_this.M

			gripper_numpy = posemath.toMatrix(psm2_gripper_affines_wrt_camera_this)
			R = gripper_numpy[0:3,0:3] 
			Origin = gripper_numpy[0:3,3]

			outfile.write(str("%0.3f"%Origin[0]) + ', ' + str("%0.3f"%Origin[1]) + ', ' + str("%0.3f"%Origin[2]) + ',     ')

			nvec = R[0:,0]
			tvec = R[0:,1]
			bvec = R[0:,2]

			outfile.write(str("%0.3f"%nvec[0]) + ', ' + str("%0.3f"%nvec[1]) + ', ' + str("%0.3f"%nvec[2]) + ',     ')
			outfile.write(str("%0.3f"%bvec[0]) + ', ' + str("%0.3f"%bvec[1]) + ', ' + str("%0.3f"%bvec[2]) + ',  0,   ')

			outfile.write(str("%0.3f"%gripper2_out_of_way_numpy[0]) + ', ' + str("%0.3f"%gripper2_out_of_way_numpy[1]) + ', ' + str("%0.3f"%gripper2_out_of_way_numpy[2]) + '     ')
			outfile.write(str("%0.3f"%nvec2[0]) + ', ' + str("%0.3f"%nvec2[1]) + ', ' + str("%0.3f"%nvec2[2]) + ',     ')
			outfile.write(str("%0.3f"%bvec2[0]) + ', ' + str("%0.3f"%bvec2[1]) + ', ' + str("%0.3f"%bvec2[2]) + ',  0.0   ' + str("%0.2f"%t) + '\n')


			t+=dt
		
		outfile.close()
		rospy.loginfo("wrote fripper motion plan to file psm2_gripper_poses_in_camera_coords.csp") 
Пример #56
0
def graspit_grasp_to_moveit_grasp(graspit_grasp_msg, move_group_commander, listener,  grasp_tran_frame_name='approach_tran', pregrasp_joint_angles=None, grasp_joint_angles=None):
    """
    :param graspit_grasp_msg: A graspit grasp message
    :type graspit_grasp_msg: graspit_msgs.msg.Grasp
    :returns a moveit message built from the graspit grasp
    :rtype: moveit_msgs.msg.Grasp
    """
    # 'manipulator' is the name for the root move group in the mico arm
    moveit_positions_from_graspit_positions = {'StaubliArm': barrett_positions_from_graspit_positions,
                                               'manipulator': mico_positions_from_graspit_positions,
                                               'left_arm': lambda x: (['l_gripper_joint'], [x[0]]),
                                               'right_arm': lambda x: (['r_gripper_joint'], [x[0]])
                                               }
    move_group_name = rospy.get_param('/arm_name', 'StaubliArm')
    moveit_positions_from_graspit_positions_fcn = moveit_positions_from_graspit_positions[move_group_name]

    moveit_grasp = moveit_msgs.msg.Grasp()

    # # This message contains a description of a grasp that would be used
    # # with a particular end-effector to grasp an object, including how to
    # # approach it, grip it, etc.  This message does not contain any
    # # information about a "grasp point" (a position ON the object).
    # # Whatever generates this message should have already combined
    # # information about grasp points with information about the geometry
    # # of the end-effector to compute the grasp_pose in this message.
    #
    # # A name for this grasp
    # string id
    #
    moveit_grasp.id = 'graspit_' + str(graspit_grasp_msg.grasp_id)

    # # The internal posture of the hand for the pre-grasp
    # # only positions are used
    #
    # trajectory_msgs/JointTrajectory pre_grasp_posture
    #
    pre_grasp_goal_point = trajectory_msgs.msg.JointTrajectoryPoint()
    if not pregrasp_joint_angles:
        pregrasp_joint_angles = graspit_grasp_msg.pre_grasp_dof
    pre_grasp_joint_names, pre_grasp_goal_point.positions = moveit_positions_from_graspit_positions_fcn(pregrasp_joint_angles)
    moveit_grasp.pre_grasp_posture.points.append(pre_grasp_goal_point)
    moveit_grasp.pre_grasp_posture.joint_names = pre_grasp_joint_names

    # # The internal posture of the hand for the grasp
    # # positions and efforts are used
    #
    # trajectory_msgs/JointTrajectory grasp_posture
    #
    goal_point = trajectory_msgs.msg.JointTrajectoryPoint()
    if not grasp_joint_angles:
        grasp_joint_angles = graspit_grasp_msg.final_grasp_dof
    joint_names, goal_point.positions = moveit_positions_from_graspit_positions_fcn(grasp_joint_angles)
    moveit_grasp.grasp_posture.joint_names = joint_names
    moveit_grasp.grasp_posture.points.append(goal_point)

    # # The position of the end-effector for the grasp.  This is the pose of
    # # the "parent_link" of the end-effector, not actually the pose of any
    # # link *in* the end-effector.  Typically this would be the pose of the
    # # most distal wrist link before the hand (end-effector) links began.
    #
    # geometry_msgs/PoseStamped grasp_pose
    #
    ee_pose = graspit_grasp_pose_to_moveit_grasp_pose(move_group_commander, listener, graspit_grasp_msg, grasp_tran_frame_name)
    moveit_grasp.grasp_pose.pose = ee_pose
    moveit_grasp.grasp_pose.header.frame_id = graspit_grasp_msg.object_name

    # # The estimated probability of success for this grasp, or some other
    # # measure of how "good" it is.
    #
    # float64 grasp_quality
    #
    moveit_grasp.grasp_quality = .8

    # # The approach direction to take before picking an object
    #
    # GripperTranslation pre_grasp_approach
    #
    approach_dir = geometry_msgs.msg.Vector3Stamped()
    # Set default approach dir to the jaco approach dir.
    x = rospy.get_param('approach_dir_x', 0)
    y = rospy.get_param('approach_dir_y', 0)
    z = rospy.get_param('approach_dir_z', -1)
    approach_dir.vector = geometry_msgs.msg.Vector3(x, y, z)
    approach_dir.header.frame_id = grasp_tran_frame_name
    moveit_grasp.pre_grasp_approach.direction = get_approach_dir_in_ee_coords(move_group_commander, listener, approach_dir)
    # #Convert the grasp message to a transform
    grasp_tran = pm.toMatrix(pm.fromMsg(graspit_grasp_msg.final_grasp_pose))


    pregrasp_tran = pm.toMatrix(pm.fromMsg(graspit_grasp_msg.pre_grasp_pose))


    pregrasp_dist = np.linalg.norm(pregrasp_tran[0:3, 3] - grasp_tran[0:3, 3])
    moveit_grasp.pre_grasp_approach.desired_distance = 2*pregrasp_dist
    moveit_grasp.pre_grasp_approach.min_distance = pregrasp_dist

    # # The retreat direction to take after a grasp has been completed (object is attached)
    #
    # GripperTranslation post_grasp_retreat
    #
    moveit_grasp.post_grasp_retreat.min_distance = .05
    moveit_grasp.post_grasp_retreat.desired_distance = 2*moveit_grasp.post_grasp_retreat.min_distance
    moveit_grasp.post_grasp_retreat.direction.header.frame_id = '/world'
    moveit_grasp.post_grasp_retreat.direction.vector = geometry_msgs.msg.Vector3(0, 0, 1)

    # # The retreat motion to perform when releasing the object; this information
    # # is not necessary for the grasp itself, but when releasing the object,
    # # the information will be necessary. The grasp used to perform a pickup
    # # is returned as part of the result, so this information is available for
    # # later use.
    #
    # GripperTranslation post_place_retreat
    #

    # # the maximum contact force to use while grasping (<=0 to disable)
    #
    # float32 max_contact_force
    #
    moveit_grasp.max_contact_force = -1

    # # an optional list of obstacles that we have semantic information about
    # # and that can be touched/pushed/moved in the course of grasping
    #
    # string[] allowed_touch_objects
    #
    moveit_grasp.allowed_touch_objects = []

    return moveit_grasp
	def Rot_k_phi(self, k_vec, phi): #Eigen::Vector3d k_vec,float phi
	    #Eigen::Matrix3d R_k_phi;
	    kx = k_vec[0]
	    ky = k_vec[1]
	    kz = k_vec[2]
	    
	    #K_nrow = Vector(0.0, -kz, ky)
	    #K_trow = Vector(kz, 0.0, -kx)
	    #K_brow = Vector(-ky, kx, 0.0)

	    N = 3
	    K = Rotation(0.0,-kz,ky, kz,0.0,-kx, -ky,kx,0.0)
	    
	    Ksquare = K*K

	    I = Rotation()
	    
	    K_Frame = Frame(K)
	    
	    K_Frame_numpy = posemath.toMatrix(K_Frame)
	    
	    K_numpy = K_Frame_numpy[0:3,0:3]
	    
	    I_numpy = np.identity(N)
	    K_square_Frame = Frame(Ksquare)
	   

	    K_square_Frame_numpy = posemath.toMatrix(K_square_Frame)
	    
	    K_square_numpy = K_square_Frame_numpy[0:3,0:3]
	    

	    #R_k_phi_numpy = I_numpy + math.sin(phi)*K_numpy + (1-math.cos(phi))*K_numpy*K_numpy
	    #R_k_phi_numpy_FrameTemp = np.c_[R_k_phi_numpy, np.ones(N)]
	    #R_k_phi_numpy_Frame = np.r_[R_k_phi_numpy_FrameTemp,[R_k_phi_numpy_FrameTemp[1]]]
	    #R_k_phi_Frame = posemath.fromMatrix(R_k_phi_numpy_Frame)
	    #R_k_phi = R_k_phi_Frame.M


	    R_k_phi_numpy_square = I_numpy + math.sin(phi)*K_numpy + (1-math.cos(phi))*K_square_numpy
	    R_k_phi_numpy_FrameTemp_square = np.c_[R_k_phi_numpy_square, np.ones(N)]
	    R_k_phi_numpy_Frame_square = np.r_[R_k_phi_numpy_FrameTemp_square,[R_k_phi_numpy_FrameTemp_square[1]]]
	    R_k_phi_Frame_square = posemath.fromMatrix(R_k_phi_numpy_Frame_square)
	    R_k_phi_square = R_k_phi_Frame_square.M
	    

	    if(debug_needle_print):
	    	print "K",K
	    	print "Ksquare",Ksquare
	    	print "K_Frame",K_Frame
	    	print "K_Frame_numpy",K_Frame_numpy
	    	print "K_numpy",K_numpy
	    	print "K_square_Frame",K_square_Frame
	    	print "K_square_Frame_numpy",K_square_Frame_numpy
	    	print "math.sin(phi)",(math.sin(phi))
	    	print "math.sin(phi)",(math.sin(phi))
	    	print "1-math.cos(phi)",(1-math.cos(phi))
	    	print "K_numpy*K_numpy",K_numpy*K_numpy
	    	print "R_k_phi_numpy",R_k_phi_numpy
	    	print "R_k_phi_numpy_FrameTemp",R_k_phi_numpy_FrameTemp
	    	print "R_k_phi_numpy_Frame",R_k_phi_numpy_Frame
	    	print "R_k_phi_Frame",R_k_phi_Frame
	    	print  "R_k_phi",R_k_phi
	    	print "R_k_phi_numpy_square",R_k_phi_numpy_square
	    	print "R_k_phi_numpy_FrameTemp_square",R_k_phi_numpy_FrameTemp_square
	    	print "R_k_phi_numpy_Frame_square",R_k_phi_numpy_Frame_square
	    	print "R_k_phi_Frame_square",R_k_phi_Frame_square
	    	print  "R_k_phi_square",R_k_phi_square

	    return R_k_phi_square
	def simple_horiz_kvec_motion_psm2(self, O_needle, r_needle, kvec_yaw, gripper_affines_wrt_camera, gripper_affines_wrt_psm):

	    dphi = math.pi/40.0
	    bvec0 = Vector(1,0,0)
	    nvec = Vector(0,0,1)
	    bvec = self.Rotz(kvec_yaw)*bvec0
	    tvec = bvec*nvec
	    R0 = Rotation(nvec,tvec,bvec)
	    #R0 = R0.Inverse()
	    tvec_numpy = np.array([tvec.x(), tvec.y(), tvec.z()])
	    tip_pos_numpy = O_needle - r_needle*tvec_numpy
	    tip_pos = Vector(tip_pos_numpy[0], tip_pos_numpy[1], tip_pos_numpy[2])

	    self.affine_gripper_frame_wrt_camera_frame_.p = tip_pos
	    self.affine_gripper_frame_wrt_camera_frame_.M = R0
	    del gripper_affines_wrt_camera[:]
	    nsolns = 0
	    nphi = 0
	    print "nphi: "

	    for phi in np.arange(0.0,-math.pi,-dphi): #use arange from numpy for double increments
	        R = self.Rot_k_phi(bvec, phi)*R0
	        self.affine_gripper_frame_wrt_camera_frame_.M=R
	        R_Frame = posemath.toMatrix(self.affine_gripper_frame_wrt_camera_frame_)
	        R_column2_numpy = R_Frame[0:3,1]
	        R_column2 = Vector(R_column2_numpy[0],R_column2_numpy[1],R_column2_numpy[2])

	        tip_pos_numpy = O_needle - r_needle*R_column2_numpy
	        tip_pos = Vector(tip_pos_numpy[0], tip_pos_numpy[1], tip_pos_numpy[2])

	        self.affine_gripper_frame_wrt_camera_frame_.p = tip_pos
	        self.affine_gripper_frame_wrt_psm_frame_ = self.default_affine_lcamera_to_psm_two_.Inverse()*self.affine_gripper_frame_wrt_camera_frame_



	        affine_gripper_frame_wrt_psm_frame_numpyArray =  posemath.toMatrix(self.affine_gripper_frame_wrt_psm_frame_)
	        affine_gripper_frame_wrt_psm_frame_numpyMatrix_0to7 = np.matrix(affine_gripper_frame_wrt_psm_frame_numpyArray)
	        affine_gripper_frame_wrt_psm_frame_numpyMatrix_0to6 = affine_gripper_frame_wrt_psm_frame_numpyMatrix_0to7 * inv(frame6to7)
	        

	        affine_0to6 = deepcopy(affine_gripper_frame_wrt_psm_frame_numpyMatrix_0to6)
	        #print "affine_0to6", affine_0to6
	        # qvecIK = deepcopy(qvecIK_Test)
	        #print "qvecIK", qvecIK
	        #
	        result = psm_manip.InverseKinematics(self.qvecIK, affine_0to6)

	        jointLimitCheckBool = self.fit_joints_to_range(self.qvecIK)
	        if result:
	        	success = False
	        else:
	        	if jointLimitCheckBool:
	        		success = True #legal soln
	        	else:
	        		success = False #no legal soln out of joint lims
	        
	        if (success):
	        	affine_camera = deepcopy(self.affine_gripper_frame_wrt_camera_frame_)
	        	affine_gripper = deepcopy(self.affine_gripper_frame_wrt_psm_frame_)
	        	nsolns+=1
	        	print nphi,","
	        	gripper_affines_wrt_camera.append(affine_camera)
	        	gripper_affines_wrt_psm.append(affine_gripper)
	        	#gripper_affines_wrt_psm.append(self.affine_gripper_frame_wrt_psm_frame_)
	        

	        ''' ERDEM
	        if (ik_solver_.ik_solve(des_gripper1_wrt_base)) 
	        {  nsolns++;
	           cout<<nphi<<",";
	           #cout<<":  found IK; nsolns = "<<nsolns<<endl;
	           gripper_affines_wrt_camera.push_back(affine_gripper_frame_wrt_camera_frame_);
	        }'''
	        nphi += 1

	    print "\n"
	def simple_horiz_kvec_motion(self, O_needle, r_needle, kvec_yaw, gripper_affines_wrt_camera, gripper_affines_wrt_psm):
		"""
		Initialize the IK output joint vector
		
		@param      self                        The object
		@param      O_needle                    The o needle
		@param      r_needle                    The r needle
		@param      kvec_yaw                    The kvec yaw
		@param      gripper_affines_wrt_camera  The gripper affines wrt camera
		@param      gripper_affines_wrt_psm     The gripper affines wrt psm
		
		@return     { description_of_the_return_value }
		"""
		dphi = math.pi/40.0
		bvec0 = Vector(1,0,0)
		nvec = Vector(0,0,-1)
		bvec = self.Rotz(kvec_yaw)*bvec0
		tvec = bvec*nvec
		R0 = Rotation(nvec,tvec,bvec)
		#R0 = R0.Inverse()
		tvec_numpy = np.array([tvec.x(), tvec.y(), tvec.z()])
		tip_pos_numpy = O_needle - r_needle*tvec_numpy
		tip_pos = Vector(tip_pos_numpy[0], tip_pos_numpy[1], tip_pos_numpy[2])

		self.affine_gripper_frame_wrt_camera_frame_.p = tip_pos
		self.affine_gripper_frame_wrt_camera_frame_.M = R0
		del gripper_affines_wrt_camera[:]
		nsolns = 0
		nphi = 0
		print "nphi: "

		if debug_needle_print2:
			print "bvec: ", bvec
			print "tvec: ", tvec
			print "tip_pos: ", tip_pos
			print "R0: ", R0

		for phi in np.arange(0,math.pi,dphi): #use arange from numpy for double increments
			R = self.Rot_k_phi(bvec, phi)*R0
			self.affine_gripper_frame_wrt_camera_frame_.M=R
			R_Frame = posemath.toMatrix(self.affine_gripper_frame_wrt_camera_frame_)
			R_column2_numpy = R_Frame[0:3,1]
			R_column2 = Vector(R_column2_numpy[0],R_column2_numpy[1],R_column2_numpy[2])
			tip_pos_numpy = O_needle - r_needle*R_column2_numpy
			tip_pos = Vector(tip_pos_numpy[0], tip_pos_numpy[1], tip_pos_numpy[2])

			self.affine_gripper_frame_wrt_camera_frame_.p = tip_pos
			self.affine_gripper_frame_wrt_psm_frame_ = self.default_affine_lcamera_to_psm_one_.Inverse()*self.affine_gripper_frame_wrt_camera_frame_
			# ''' ERDEM
			# 
			affine_gripper_frame_wrt_psm_frame_numpyArray =  posemath.toMatrix(self.affine_gripper_frame_wrt_psm_frame_)
			affine_gripper_frame_wrt_psm_frame_numpyMatrix_0to7 = np.matrix(affine_gripper_frame_wrt_psm_frame_numpyArray)
			affine_gripper_frame_wrt_psm_frame_numpyMatrix_0to6 = affine_gripper_frame_wrt_psm_frame_numpyMatrix_0to7 * inv(frame6to7)

			#print "affine_gripper_frame_wrt_psm_frame_numpyMatrix_0to6", affine_gripper_frame_wrt_psm_frame_numpyMatrix_0to6
			affine_0to6 = deepcopy(affine_gripper_frame_wrt_psm_frame_numpyMatrix_0to6)
			#print "affine_0to6", affine_0to6
			#print "qvecIK", qvecIK
			result = psm_manip.InverseKinematics(self.qvecIK, affine_0to6)

			jointLimitCheckBool = self.fit_joints_to_range(self.qvecIK)
			if result:
				success = False
			else:
				if jointLimitCheckBool: #legal soln
					success = True 
				else: #no legal soln out of joint lims
					success = False 

			if debug_needle_print2:
				print "start printing\n"
				print "\n nphi \n", nphi
				print "\nR: \n", R
				print "\ntip_pose: \n", tip_pos
				print "\nself.affine_gripper_frame_wrt_psm_frame_ \n", self.affine_gripper_frame_wrt_psm_frame_
				print "\nself.qvecIK \n", self.qvecIK

			#print "qvecIK after computation", qvecIK
			if (success):
				affine_camera = deepcopy(self.affine_gripper_frame_wrt_camera_frame_)
				affine_gripper = deepcopy(self.affine_gripper_frame_wrt_psm_frame_)
				nsolns+=1
				print nphi,","
				#print "\nqvecIK before computation", qvecIK_before
				if debug_needle_print2:
					print "\nqvecIK after computation", self.qvecIK
				if debug_needle_print3:
					print "self.affine_gripper_frame_wrt_camera_frame_",affine_camera
					print "self.affine_gripper_frame_wrt_gripper_frame_",affine_gripper
				gripper_affines_wrt_camera.append(affine_camera)
				gripper_affines_wrt_psm.append(affine_gripper)
			#gripper_affines_wrt_psm.append(self.affine_gripper_frame_wrt_psm_frame_)

			# '''
			'''
			gripper_affines_wrt_camera.append(self.affine_gripper_frame_wrt_camera_frame_)
			gripper_affines_wrt_psm.append(self.affine_gripper_frame_wrt_psm_frame_)
			'''
			nphi += 1

		print "\n"
	def __init__(self):
		"""
		Constructor.  This initializes a few data members.
		"""
        # data members, event based
		#needle properties: these will be constant for a given operation
		print("needle planner constructor: initializations")

		self.needle_radius_ = DEFAULT_NEEDLE_RADIUS
		self.needle_axis_ht_ = DEFAULT_NEEDLE_AXIS_HT
		self.grab_needle_plus_minus_y_ = GRASP_W_NEEDLE_POSITIVE_GRIPPER_Y
		self.grab_needle_plus_minus_z_ = GRASP_W_NEEDLE_POSITIVE_GRIPPER_Z
		self.grasp_depth_ = DEFAULT_NEEDLE_GRASP_DEPTH
		self.phi_grab_ = DEFAULT_PHI_GRAB

		print "needle arc radius = %f" % self.needle_radius_
		print "needle ht above tissue = %f" % self.needle_axis_ht_

		b_rt = math.sqrt(self.needle_radius_*self.needle_radius_ - self.needle_axis_ht_*self.needle_axis_ht_)
		self.dist_entrance_to_exit_ = 2*b_rt

		print "at needle ht %f, distance from entrance to exit is %f" % (self.needle_axis_ht_, self.dist_entrance_to_exit_)
		self.bvec_needle_wrt_grasp_frame_ = Vector()
		self.nvec_needle_wrt_grasp_frame_ = Vector()
		self.tvec_needle_wrt_grasp_frame_ = Vector()
		R = Rotation(1,0,0, 0,1,0, 0,0,1)

		O_grasp_frame = Vector(0,0,-self.grasp_depth_)
		self.affine_grasp_frame_wrt_gripper_frame_ = Frame(R, O_grasp_frame)
		#print "NeedlePlanner contructor:"
		
		self.O_needle_frame_wrt_grasp_frame_ = Vector()
		O_needle_ = Vector()
		self.R_needle_frame_wrt_grasp_frame_ = Rotation()

		self.affine_needle_frame_wrt_grasp_frame_ = Frame() 
		self.affine_needle_frame_wrt_gripper_frame_ = Frame()

		self.nvec_tissue_frame_wrt_camera_ = Vector()
		self.tvec_tissue_frame_wrt_camera_ = Vector()
		self.bvec_tissue_frame_wrt_camera_ = Vector()

		self.desired_needle_entrance_point_ = Vector()
		self.repaired_exit_pt_ = Vector()

		self.R_tissue_frame_wrt_camera_frame_ = Rotation()
		self.affine_tissue_frame_wrt_camera_frame_ = Frame()


		self.O_needle_wrt_tissue_ = Vector(0.5*self.dist_entrance_to_exit_,0,self.needle_axis_ht_)
		self.bvec_needle_wrt_tissue_frame_ = Vector(0,-1,0)
		self.nvec_needle_wrt_tissue_frame_ = Vector(-1,0,0)
		

		self.tvec_needle_wrt_tissue_frame_ = self.bvec_needle_wrt_tissue_frame_*self.nvec_needle_wrt_tissue_frame_

		self.R0_needle_wrt_tissue_ = Rotation(self.nvec_needle_wrt_tissue_frame_,self.tvec_needle_wrt_tissue_frame_,self.bvec_needle_wrt_tissue_frame_)
		self.affine_init_needle_frame_wrt_tissue_ = Frame(self.R0_needle_wrt_tissue_,self.O_needle_wrt_tissue_)
		print "INIT: initial affine_needle_frame_wrt_tissue_"
		print self.affine_init_needle_frame_wrt_tissue_

		affine_init_needle_frame_wrt_tissue_numpy_ = posemath.toMatrix(self.affine_init_needle_frame_wrt_tissue_)
		

		if(debug_needle_print):
			print "tvec_needle_wrt_tissue_frame_"
			print self.tvec_needle_wrt_tissue_frame_
			print "self.affine_init_needle_frame_wrt_tissue_"
			print self.affine_init_needle_frame_wrt_tissue_;
			print "affine_init_needle_frame_wrt_tissue_numpy_"
			print affine_init_needle_frame_wrt_tissue_numpy_
			print("FIXED: affine_grasp_frame_wrt_gripper_frame_")
			print self.affine_grasp_frame_wrt_gripper_frame_

		self.kvec_needle_ = Vector()
		

		self.R_needle_wrt_tissue_ = Rotation()
		self.affine_needle_frame_wrt_tissue_ = Frame()
		self.affine_gripper_frame_wrt_tissue_ = Frame()
		self.affine_needle_frame_wrt_camera_ = Frame()
		self.affine_gripper_frame_wrt_camera_frame_ = Frame()

		self.compute_grasp_transform()
		self.R0_N_wrt_G_= self.affine_needle_frame_wrt_gripper_frame_.M
		self.O0_N_wrt_G_ = self.affine_needle_frame_wrt_gripper_frame_.p

		default_p_lcamera_to_psm_one_ = Vector(-0.155,-0.03265,0.0)
		default_R_lcamera_to_psm_one_ = Rotation(-1,0,0, 0,1,0, 0,0,-1)
		self.default_affine_lcamera_to_psm_one_ = Frame(default_R_lcamera_to_psm_one_,default_p_lcamera_to_psm_one_)
		self.default_affine_lcamera_to_psm_two_ = Frame()

		""" Initialize the IK output joint vector """
		self.qvecIK = np.array([0., 0., 0., 0., 0., 0.])