コード例 #1
0
    def execute(self, userdata):

        #wait the settle time, then see if the sample is still there
        #timeout for this is also settle time
        rospy.sleep(userdata.settle_time)
        userdata.target_sample = None
        start_time = rospy.Time.now()
        while not rospy.core.is_shutdown_requested():
            rospy.sleep(0.1)
            if userdata.target_sample is not None:
                break #sample is still there, approach
            if (rospy.Time.now() - start_time) > userdata.settle_time:
                return 'point_lost' #sample disappeared, give up

        try:
            point = userdata.target_sample
            self.tf_listener.waitForTransform('manipulator_arm',
                                             point.header.frame_id,
                                             point.header.stamp,
                                             rospy.Duration(2.0))
            point_in_base = self.tf_listener.transformPoint('manipulator_arm',
                                                            point).point
            robot_origin = geometry_msg.Point(0,0,0)
            yaw = util.pointing_yaw(robot_origin, point_in_base)        
            distance = util.point_distance_2d(robot_origin, point_in_base)
            robot_yaw = util.get_current_robot_yaw(self.tf_listener, userdata.odometry_frame)
        except tf.Exception, e:
            rospy.logwarn("PURSUE_SAMPLE failed to get manipulator_arm -> %s transform in 1.0 seconds: %s", point.header.frame_id, e)
            return 'aborted'
コード例 #2
0
ファイル: level_one.py プロジェクト: contradict/SampleReturn
    def execute(self, userdata):
        
        #the expected sample location on the end of the path list
        pose_2d_list = userdata.search_poses_2d

        header = std_msg.Header(0, rospy.Time(0), '/map')
        path_pose_list = []
        yaw = 0
        for i in range(len(pose_2d_list)):
            pose = geometry_msg.Pose()
            pose.position = geometry_msg.Point(pose_2d_list[i]['x'],
                                               pose_2d_list[i]['y'],
                                               0.0)
            if 'yaw' in pose_2d_list[i]:
                yaw = math.radians(pose_2d_list[i]['yaw'])
            else:
                if (i+1) < len(pose_2d_list):
                    next_point = geometry_msg.Point(pose_2d_list[i + 1]['x'],
                                                    pose_2d_list[i + 1]['y'],
                                                    0.0)
                    yaw = util.pointing_yaw(pose.position, next_point)
                # if this is last point, just maintain current yaw                
                            
            quat_array = tf.transformations.quaternion_from_euler(0, 0, yaw)           
            pose.orientation = geometry_msg.Quaternion(*quat_array)
            pose_stamped = geometry_msg.PoseStamped(header, pose)
            path_pose_list.append(pose_stamped)
        
        userdata.pose_list = path_pose_list        
        userdata.stop_on_sample = False
        
        result = samplereturn_msg.GeneralExecutiveResult()
        result.result_string = 'initialized'
        userdata.action_result = result
        
        return 'next'
コード例 #3
0
 def execute(self, userdata):
     
     userdata.stop_on_sample = False
     userdata.detected_sample = None
     rospy.sleep(userdata.servo_params['settle_time'])
     if self.try_count > userdata.servo_params['try_limit']:
         self.announcer.say("Servo exceeded try limit")
         rospy.loginfo("SERVO STRAFE failed to hit tolerance before try_limit: %s" % (userdata.servo_params['try_limit']))
         self.try_count = 0
         #deploy gripper anyway, we still see sample, but it is not positioned well
         userdata.latched_sample = userdata.detected_sample
         return 'complete'
     
     if userdata.detected_sample is None:
         self.announcer.say("Servo lost sample.")
         self.try_count = 0
         return 'point_lost'
     else:
         detected_sample = userdata.detected_sample
         sample_time = detected_sample.header.stamp
         sample_frame = detected_sample.header.frame_id
         while not rospy.core.is_shutdown_requested():
             try:
                 self.tf_listener.waitForTransform('manipulator_arm',
                         sample_frame, sample_time, rospy.Duration(1.0))
                 point_in_manipulator = self.tf_listener.transformPoint('manipulator_arm',
                                                              detected_sample).point
                 break
             except tf.Exception, exc:
                 rospy.logwarn("SERVO CONTROLLER failed to get manipulator_arm -> {!s} transform.  Exception: {!s}".format(sample_frame, exc))
                 self.try_count = 0
                 return 'aborted'
         #rospy.loginfo("SERVO correction %s: " % (userdata.manipulator_correction))
         rospy.loginfo("SERVO point in manipulator before correction %s: " %(point_in_manipulator))
         point_in_manipulator.x += userdata.manipulator_correction['x']
         point_in_manipulator.y += userdata.manipulator_correction['y']
         #rospy.loginfo("SERVO point in manipulator after correction %s: " %(point_in_manipulator))
         origin = geometry_msg.Point(0,0,0)
         distance = util.point_distance_2d(origin, point_in_manipulator)
         if distance < userdata.servo_params['final_tolerance']:
             self.try_count = 0
             userdata.latched_sample = userdata.detected_sample
             self.announcer.say("Servo complete at %.1f millimeters." % (distance*1000))    
             return 'complete'
         elif distance < userdata.servo_params['initial_tolerance']:
             velocity = userdata.servo_params['final_velocity']
             accel = userdata.servo_params['final_accel']
         else:
             velocity = userdata.servo_params['initial_velocity']
             accel = userdata.servo_params['initial_accel']
         yaw = util.pointing_yaw(origin, point_in_manipulator)
         userdata.simple_move =  SimpleMoveGoal(type=SimpleMoveGoal.STRAFE,
                                                angle = yaw,
                                                distance = distance,
                                                velocity = velocity,
                                                acceleration = accel)
         #rospy.loginfo("SERVO simple_move: %s" % (userdata.simple_move))
         self.announcer.say("Servo ing, distance, %.1f centimeters" % (distance*100))
         rospy.loginfo("DETECTED SAMPLE IN manipulator_arm frame (corrected): %s" % (point_in_manipulator))
         self.try_count += 1
         return 'move'
コード例 #4
0
    def execute(self, userdata):
        
        if self.preempt_requested():
            self.service_preempt()
            return 'preempted'
        
        #ignore samples after this (applies to mount moves)
        #clear previous move_point_map
        userdata.stop_on_sample = False
        userdata.move_point_map = None
        map_header = std_msg.Header(0, rospy.Time(0), userdata.world_fixed_frame)
       
        #get our position in map
        current_pose = util.get_current_robot_pose(self.tf_listener,
                                                   userdata.world_fixed_frame)        
        
        #this is possibly a highly inaccurate number.   If we get to the approach point,
        #and don't see the beacon, the map is probably messed up
        distance_to_approach_point = util.point_distance_2d(current_pose.pose.position,
                                                            userdata.beacon_approach_pose.pose.position)

        #if we have been ignoring beacon detections prior to this,
        #we should clear them here, and wait for a fresh detection
        if not userdata.stop_on_beacon:
            userdata.beacon_point = None
            rospy.sleep(4.0)
        
        if userdata.beacon_point is None: #beacon not in view
            #first hope, we can spin slowly and see the beacon
            if not self.tried_spin:
                self.announcer.say("Beacon not in view. Rotate ing")
                userdata.simple_move = SimpleMoveGoal(type=SimpleMoveGoal.SPIN,
                                                      angle = math.pi*2,
                                                      velocity = userdata.spin_velocity)        
                userdata.stop_on_beacon = True
                self.tried_spin = True
                return 'spin'
            #already tried a spin, drive towards beacon_approach_point, stopping on detection
            elif distance_to_approach_point > 5.0:
                #we think we're far from approach_point, so try to go there
                self.announcer.say("Beacon not in view. Search ing")
                userdata.move_point_map = geometry_msg.PointStamped(map_header,
                                                                    userdata.beacon_approach_pose.pose.position)
                goal = samplereturn_msg.VFHMoveGoal(target_pose = userdata.beacon_approach_pose,
                                                    move_velocity = userdata.move_velocity,
                                                    spin_velocity = userdata.spin_velocity)
                userdata.move_goal = goal                
                userdata.stop_on_beacon = True
                self.tried_spin = False
                return 'move'
            else:
                #gotta add some heroic shit here
                #we think we're near the beacon, but we don't see it
                #right now, that is just to move 30 m on other side of the beacon that we don't know about
                self.announcer.say("Close to approach point in map.  Beacon not in view.  Search ing")
                search_pose = deepcopy(userdata.beacon_approach_pose)                
                #invert the approach_point, and try again
                search_pose.pose.position.x *= -1
                #save point
                userdata.move_point_map = geometry_msg.PointStamped(map_header,
                                                    search_pose.pose.position)
                goal = samplereturn_msg.VFHMoveGoal(target_pose = userdata.search_pose,
                                                    move_velocity = userdata.move_velocity,
                                                    spin_velocity = userdata.spin_velocity)
                userdata.move_goal = goal                        
                userdata.stop_on_beacon = True
                self.tried_spin = False
                
                return 'move'               
                
        else: #beacon is in view
            #need to add some stuff here to get to other side of beacon if viewing back
            current_yaw = util.get_current_robot_yaw(self.tf_listener,
                                                       userdata.world_fixed_frame)
            yaw_to_platform = util.pointing_yaw(current_pose.pose.position,
                                                userdata.platform_point.point)
            yaw_error = util.unwind(yaw_to_platform - current_yaw)
            userdata.stop_on_beacon = False
            self.tried_spin = False            
            #on the back side
            if current_pose.pose.position.x < 0:
                front_pose = deepcopy(userdata.beacon_approach_pose)
                #try not to drive through the platform
                front_pose.pose.position.y = 5.0 * np.sign(current_pose.pose.position.y)
                goal = samplereturn_msg.VFHMoveGoal(target_pose = front_pose,
                                                    move_velocity = userdata.move_velocity,
                                                    spin_velocity = userdata.spin_velocity)
                userdata.move_goal = goal                          
                self.announcer.say("Back of beacon in view. Move ing to front")
                return 'move'   
            elif distance_to_approach_point > 2.0:
                #on correct side of beacon, but far from approach point
                goal = samplereturn_msg.VFHMoveGoal(target_pose = userdata.beacon_approach_pose,
                                                    move_velocity = userdata.move_velocity,
                                                    spin_velocity = userdata.spin_velocity,
                                                    orient_at_target = True)
                userdata.move_goal = goal       
                self.announcer.say("Beacon in view. Move ing to approach point")                
                return 'move'   
            elif np.abs(yaw_error) > .2:
                #this means beacon is in view and we are within 1 meter of approach point
                #but not pointed so well at beacon
                self.announcer.say("At approach point. Rotate ing to beacon")
                userdata.simple_move = SimpleMoveGoal(type=SimpleMoveGoal.SPIN,
                                                      angle = yaw_error,
                                                      velocity = userdata.spin_velocity)                   
                return 'spin'
            else:    
                self.announcer.say("Initiate ing platform mount")
                userdata.stop_on_beacon = False
                return 'mount'
        
        return 'aborted'
コード例 #5
0
    def execute(self, userdata):
        #set the move manager key for the move mux
        userdata.active_manager = userdata.manager_dict[self.label]
        userdata.report_sample = False
        userdata.report_beacon = True
        self.beacon_enable.publish(True)        

        if self.preempt_requested():
            self.service_preempt()
            return 'preempted'
        
        #get our position in map
        current_pose = util.get_current_robot_pose(self.tf_listener,
                                                   userdata.platform_frame)        
        
        #hopeful distance to approach point
        distance_to_approach_point = util.point_distance_2d(current_pose.pose.position,
                                                            userdata.beacon_approach_pose.pose.position)

        #if we have been ignoring beacon detections prior to this,
        #we should clear them here, and wait for a fresh detection
        if not userdata.stop_on_detection:
            self.announcer.say("Wait ing for beacon.")
            userdata.beacon_point = None
            start_time = rospy.Time.now()
            while not rospy.core.is_shutdown_requested():
                rospy.sleep(0.5)
                if (userdata.beacon_point is not None) \
                or ((rospy.Time.now() - start_time) > userdata.beacon_observation_delay):
                    break
                
        if userdata.beacon_point is None: #beacon not in view
            #first hope, we can spin slowly and see the beacon
            if not self.tried_spin:
                self.announcer.say("Beacon not in view. Rotate ing")
                userdata.simple_move = SimpleMoveGoal(type=SimpleMoveGoal.SPIN,
                                                      angle = np.pi*2,
                                                      velocity = userdata.spin_velocity)        
                userdata.stop_on_detection = True
                self.tried_spin = True
                return 'spin'
            #already tried a spin, drive towards beacon_approach_point, stopping on detection
            elif distance_to_approach_point > 5.0:
                #we think we're far from approach_point, so try to go there
                self.announcer.say("Beacon not in view. Moving to approach point.")
                userdata.stop_on_detection = True
                self.tried_spin = False
                userdata.move_target = userdata.beacon_approach_pose
                return 'move'
            else:
                #we think we're looking at the beacon, but we don't see it, this is probably real bad
                self.announcer.say("Close to approach point in map.  Beacon not in view.  Search ing")
                search_pose = deepcopy(current_pose)                
                #try a random position nearby-ish, ignore facing
                search_pose.pose.position.x += random.randrange(-30, 30, 10) 
                search_pose.pose.position.y += random.randrange(-30, 30, 10)
                userdata.stop_on_detection = True
                self.tried_spin = False
                userdata.move_target = geometry_msg.PointStamped(search_pose.header,
                                                                 search_pose.pose.position)
                return 'move'               
                
        else: #beacon is in view
            current_yaw = util.get_current_robot_yaw(self.tf_listener,
                                                     userdata.platform_frame)
            yaw_to_platform = util.pointing_yaw(current_pose.pose.position,
                                                userdata.platform_point.point)
            yaw_error = util.unwind(yaw_to_platform - current_yaw)
            userdata.stop_on_detection = False
            self.tried_spin = False            
            #on the back side
            if current_pose.pose.position.x < 0:
                #try not to drive through the platform
                self.announcer.say("Back of beacon in view. Move ing to front")
                front_pose = deepcopy(userdata.beacon_approach_pose)
                front_pose.pose.position.y = 5.0 * np.sign(current_pose.pose.position.y)
                pointing_quat = util.pointing_quaternion_2d(front_pose.pose.position,
                                                            userdata.platform_point.point)
                front_pose.pose.orientation = pointing_quat
                userdata.move_target = front_pose
                return 'move'   
            elif distance_to_approach_point > 2.0:
                #on correct side of beacon, but far from approach point
                self.announcer.say("Beacon in view. Move ing to approach point")                
                userdata.move_target = userdata.beacon_approach_pose
                return 'move'   
            elif np.abs(yaw_error) > .2:
                #this means beacon is in view and we are within 1 meter of approach point
                #but not pointed so well at beacon
                self.announcer.say("At approach point. Rotate ing to beacon")
                userdata.simple_move = SimpleMoveGoal(type=SimpleMoveGoal.SPIN,
                                                      angle = yaw_error,
                                                      velocity = userdata.spin_velocity)                   
                return 'spin'
            else:    
                self.announcer.say("Measure ing beacon position.")
                userdata.stop_on_detection = False
                return 'mount'
        
        return 'aborted'