def execute(self, userdata): print "State: MoveToPreCutPoint" preCutPoint = tfx.pose(userdata.cutPoint._obj, copy = True) preCutPoint = tfx.convertToFrame(preCutPoint, '/two_remote_center_link') preCutPoint.position.x += 0.03 preCutPoint = tfx.convertToFrame(preCutPoint, '/one_remote_center_link') print "Left Arm precut", preCutPoint self.davinciArmLeft.setGripperPositionDaVinci(1) self.davinciArmLeft.executeInterpolatedTrajectory(preCutPoint) return 'success'
def execute(self, userdata): print "State: MoveToPreCutPoint" preCutPoint = tfx.pose(userdata.cutPoint._obj, copy=True) preCutPoint = tfx.convertToFrame(preCutPoint, '/two_remote_center_link') preCutPoint.position.x += 0.03 preCutPoint = tfx.convertToFrame(preCutPoint, '/one_remote_center_link') print "Left Arm precut", preCutPoint self.davinciArmLeft.setGripperPositionDaVinci(1) self.davinciArmLeft.executeInterpolatedTrajectory(preCutPoint) return 'success'
def execute(self, userdata): print "State: IdentifyCutPoint" # rospy.loginfo('Enter to Identity Cut Point') # raw_input() currPoseRight = self.davinciArmRight.getGripperPose() currPoseRight = currPoseRight.as_tf() * tfx.pose( tfx.tb_angles(180, 0, 0)).as_tf() * tfx.pose( tfx.tb_angles(0, -75, 0)) currPoseRight.position.y += 0.009 currPoseRight.position.z += -0.03 currPoseRight.position.x += 0.004 currPoseRight = currPoseRight.as_tf() * tfx.pose( tfx.tb_angles(180, 0, 0)) currPoseRight.stamp = None cutPointCurr = tfx.convertToFrame(currPoseRight, '/one_remote_center_link') self.cut_point_pub.publish(cutPointCurr.msg.PoseStamped()) # rospy.loginfo('Check cut point') # raw_input() userdata.cutPoint = cutPointCurr return 'success'
def left_image_callback(self, msg): if rospy.is_shutdown(): return self.left_image = self.bridge.imgmsg_to_cv2(msg, "bgr8") self.left_points = self.process_image(self.left_image) poses = [] if self.right_points is not None and self.left_points is not None: self.left_points.sort(key=lambda x: x[0]) self.right_points.sort(key=lambda x: x[0]) disparities = self.assign_disparities(self.left_points, self.right_points) for i in range(len(self.left_points)): x = self.left_points[i][0] y = self.left_points[i][1] disparity = disparities[i] print x, y, disparity pt = Util.convertStereo(x, y, disparity, self.info) pt = tfx.point(pt) pt = tfx.convertToFrame(pt, '/two_remote_center_link') pose = tfx.pose(pt) pose = pose.as_tf() * tfx.pose(tfx.tb_angles( -90, 0, 180)).as_tf() * tfx.pose(tfx.tb_angles(90, 0, 0)) poses.append(pose) print poses pose_array = PoseArray() pose_array.header = poses[0].msg.PoseStamped().header for pose in poses: pose_array.poses.append(pose) self.grasp_poses_pub.publish(pose_array)
def run(self): self.sim.update() while self.handle_pose is None and not rospy.is_shutdown(): print('Waiting for average handle pose') rospy.sleep(1) handle_pose = self.handle_pose while True: try: handle_pose.stamp = rospy.Time.now() tmp_handle_pose = tfx.convertToFrame(handle_pose, '/base_link') handle_pose = tmp_handle_pose break except: pass self.sim.plot_transform(self.sim.transform_from_to(handle_pose, handle_pose.frame, 'world')) print('Received handle pose') while self.pc is None and not rospy.is_shutdown(): print('Waiting for pointcloud') rospy.sleep(1) convexify_pointcloud.add_convexified_pointcloud_to_env(self.sim, self.pc, self.cam.get_pose()) print('Convexified pointcloud, press enter') raw_input() joints = self.planner.get_joint_trajectory(self.arm.get_joints(), handle_pose, ignore_orientation=True) poses = [tfx.pose(self.arm.fk(joint), 'base_link') for joint in joints] for pose in poses: self.sim.plot_transform(self.sim.transform_from_to(pose, pose.frame, 'world')) IPython.embed()
def cameraPoseCallback(self, msg): # convert to 0 link and post try: posePhasespace = tfx.pose(msg) self.observedPose = tfx.convertToFrame(posePhasespace, '0_link', wait=1.0) except: failCount = 1
def _foam_points_cb(self,msg): if self.ignore: return if rospy.is_shutdown(): return with self.lock: self._filterCenterHistory() t = tfx.stamp(msg.header.stamp) all_pts = tuple([tfx.convertToFrame(tfx.point(pt,frame=msg.header.frame_id),raven_constants.Frames.Link0) for pt in msg.points])#,header=msg.header self.centerHistory[t] = all_pts pts = [] for pt in all_pts: for _, estPose in self.estPose.iteritems(): if estPose is not None: tfxPoint = tfx.convertToFrame(tfx.point(pt,frame=msg.header.frame_id),estPose.frame) if estPose.position.distance(tfxPoint) < self.estPoseExclusionRadius: break else: pts.append(pt) self.currentCenters = pts self.newCenters = True
def allocateFoam(self, armName, new=False): with self.lock: self.newCenters = False timeout = raven_util.Timeout(2) timeout.start() while not self.newCenters: rospy.sleep(0.1) if timeout.hasTimedOut(): return None with self.lock: centers = self._getUnallocatedCenters(armName, self.currentCenters, new=new) print 'centers %s' % armName print centers if not centers: msg = FoamAllocation() msg.header.stamp = rospy.Time.now() msg.arm = 'ALLOCATE_FOAM_NONE' self.allocation_pub.publish(msg) return None if armName == raven_constants.Arm.Left: cmp = op.gt else: cmp = op.lt best = None for center in centers: if best is None or cmp(center.x,best.x): best = center self.allocations[armName] = best best = tfx.convertToFrame(best,raven_constants.Frames.Link0) print 'returning new allocation', armName, best self._printState() foamPose = tfx.pose(best,self.orientation) msg = FoamAllocation() msg.header.stamp = rospy.Time.now() msg.arm = armName msg.pose = foamPose.msg.Pose() msg.new = new self.allocation_pub.publish(msg) return foamPose
def getGripperPose(self,frame=MyConstants.Frames.Link0): """ Returns gripper pose w.r.t. frame geometry_msgs.msg.Pose """ pose = tfx.pose([0,0,0],frame=self.toolFrame) return tfx.convertToFrame(pose, self.commandFrame, pose.frame, wait=10) gripperPose = self.ravenController.currentPose if gripperPose != None: gripperPose = Util.convertToFrame(tfx.pose(gripperPose), frame) return gripperPose
def getGripperPose(self,frame=raven_constants.Frames.Link0): """ Returns gripper pose w.r.t. frame geometry_msgs.msg.Pose """ pose = tfx.pose([0,0,0],frame=self.toolFrame) return tfx.convertToFrame(pose, self.commandFrame, pose.frame, wait=10) gripperPose = self.ravenController.currentPose if gripperPose != None: gripperPose = raven_util.convertToFrame(tfx.pose(gripperPose), frame) return gripperPose
def getGripperPose(self, frame=raven_constants.Frames.World): """ Returns gripper pose w.r.t. frame geometry_msgs.msg.Pose """ ### This return statement was added so as not to bother with frames return self.ravenController.currentPose pose = tfx.pose([0, 0, 0], frame=self.toolFrame) return tfx.convertToFrame(pose, self.commandFrame, pose.frame, wait=10) gripperPose = self.ravenController.currentPose if gripperPose != None: gripperPose = raven_util.convertToFrame(tfx.pose(gripperPose), frame) return gripperPose
def getGripperPose(self,frame=raven_constants.Frames.World): """ Returns gripper pose w.r.t. frame geometry_msgs.msg.Pose """ ### This return statement was added so as not to bother with frames return self.ravenController.currentPose pose = tfx.pose([0,0,0],frame=self.toolFrame) return tfx.convertToFrame(pose, self.commandFrame, pose.frame, wait=10) gripperPose = self.ravenController.currentPose if gripperPose != None: gripperPose = raven_util.convertToFrame(tfx.pose(gripperPose), frame) return gripperPose
def execute(self, userdata): print "State: IdentifyCutPoint" # rospy.loginfo('Enter to Identity Cut Point') # raw_input() currPoseRight = self.davinciArmRight.getGripperPose() currPoseRight = currPoseRight.as_tf()*tfx.pose(tfx.tb_angles(180,0,0)).as_tf()*tfx.pose(tfx.tb_angles(0,-75,0)) currPoseRight.position.y += 0.009 currPoseRight.position.z += -0.03 currPoseRight.position.x += 0.004 currPoseRight = currPoseRight.as_tf()*tfx.pose(tfx.tb_angles(180,0,0)) currPoseRight.stamp = None cutPointCurr = tfx.convertToFrame(currPoseRight, '/one_remote_center_link') self.cut_point_pub.publish(cutPointCurr.msg.PoseStamped()) # rospy.loginfo('Check cut point') # raw_input() userdata.cutPoint = cutPointCurr return 'success'
def run(self): self.sim.update() while self.handle_pose is None and not rospy.is_shutdown(): print('Waiting for average handle pose') rospy.sleep(1) handle_pose = self.handle_pose while True: try: handle_pose.stamp = rospy.Time.now() tmp_handle_pose = tfx.convertToFrame(handle_pose, '/base_link') handle_pose = tmp_handle_pose break except: pass self.sim.plot_transform( self.sim.transform_from_to(handle_pose, handle_pose.frame, 'world')) print('Received handle pose') while self.pc is None and not rospy.is_shutdown(): print('Waiting for pointcloud') rospy.sleep(1) convexify_pointcloud.add_convexified_pointcloud_to_env( self.sim, self.pc, self.cam.get_pose()) print('Convexified pointcloud, press enter') raw_input() joints = self.planner.get_joint_trajectory(self.arm.get_joints(), handle_pose, ignore_orientation=True) poses = [tfx.pose(self.arm.fk(joint), 'base_link') for joint in joints] for pose in poses: self.sim.plot_transform( self.sim.transform_from_to(pose, pose.frame, 'world')) IPython.embed()