示例#1
0
    def localization_check(self, event):        
        
        saved_point_map = self.state_machine.userdata.move_point_map
        
        #if the VFH server is active, and the beacon correction is large enough, change the goal
        if saved_point_map is not None:

            goal_point_odom = self.state_machine.userdata.move_goal.target_pose.pose.position

            header = std_msg.Header(0, rospy.Time(0), self.world_fixed_frame)
            
            #take the current planned point, and transform it back to map, and
            #compare with the map position last time we planned/corrected
            try:
                self.tf_listener.waitForTransform(self.odometry_frame,
                                                  self.world_fixed_frame,
                                                  rospy.Time(0),
                                                  rospy.Duration(1.0))
                saved_point_odom = self.tf_listener.transformPoint(self.odometry_frame, saved_point_map)
            except tf.Exception:
                rospy.logwarn("LEVEL_TWO beacon_update failed to transform target point")
            
            correction_error = util.point_distance_2d(goal_point_odom,
                                                      saved_point_odom.point)
            
            #rospy.loginfo("CORRECTION ERROR: {:f}".format(correction_error))
            
            if (correction_error > self.replan_threshold):
                self.announcer.say("Beacon correction.")
                #update the VFH move goal
                goal = deepcopy(self.state_machine.userdata.move_goal)
                goal.target_pose.pose.position = saved_point_odom.point
                self.state_machine.userdata.move_goal = goal                
示例#2
0
    def execute(self, userdata):

        #wait the settle time, then see if the sample is still there
        #timeout for this is also settle time
        rospy.sleep(userdata.settle_time)
        userdata.target_sample = None
        start_time = rospy.Time.now()
        while not rospy.core.is_shutdown_requested():
            rospy.sleep(0.1)
            if userdata.target_sample is not None:
                break #sample is still there, approach
            if (rospy.Time.now() - start_time) > userdata.settle_time:
                return 'point_lost' #sample disappeared, give up

        try:
            point = userdata.target_sample
            self.tf_listener.waitForTransform('manipulator_arm',
                                             point.header.frame_id,
                                             point.header.stamp,
                                             rospy.Duration(2.0))
            point_in_base = self.tf_listener.transformPoint('manipulator_arm',
                                                            point).point
            robot_origin = geometry_msg.Point(0,0,0)
            yaw = util.pointing_yaw(robot_origin, point_in_base)        
            distance = util.point_distance_2d(robot_origin, point_in_base)
            robot_yaw = util.get_current_robot_yaw(self.tf_listener, userdata.odometry_frame)
        except tf.Exception, e:
            rospy.logwarn("PURSUE_SAMPLE failed to get manipulator_arm -> %s transform in 1.0 seconds: %s", point.header.frame_id, e)
            return 'aborted'
 def execute(self, userdata):
     #wait a sec for beacon pose to adjust the localization filter
     saved_point_odom = None
     #wait for a maximum of one minute, then mount anyway
     timeout_time = rospy.Time.now() + rospy.Duration(60.0)
     in_tolerance_count = 0
     while not rospy.core.is_shutdown_requested():
         rospy.sleep(3.0)    
         try:
             platform_point_odom = self.tf_listener.transformPoint(userdata.odometry_frame,
                                                                   userdata.platform_point)
         except tf.Exception:
             rospy.logwarn("LEVEL_TWO calculate_mount failed to transform platform point")
         if saved_point_odom is None:
             saved_point_odom = platform_point_odom
         else:
             correction_error = util.point_distance_2d(platform_point_odom.point,
                                                       saved_point_odom.point)
             saved_point_odom = platform_point_odom
             if correction_error < userdata.beacon_mount_tolerance:
                 in_tolerance_count += 1
             else:
                 in_tolerance_count = 0
             #if we get 3 in_tolerance corrections, mount
             if in_tolerance_count > 2:
                 break
             if rospy.Time.now() > timeout_time:
                 self.announcer.say("Beacon correction did not converge.")
                 break
             else:
                 self.announcer.say("Correction. {:.3f}".format(correction_error))
         
     yaw, distance = util.get_robot_strafe(self.tf_listener,
                                          userdata.platform_point)
     userdata.simple_move = SimpleMoveGoal(type=SimpleMoveGoal.STRAFE,
                                           angle = yaw,
                                           distance = distance,
                                           velocity = 0.5)        
     self.announcer.say("Initiate ing mount move.")
     return 'move'
示例#4
0
def calculate_pursuit(_tf, pursuit_point, min_pursuit_distance, odometry_frame):
    try:
        current_pose = util.get_current_robot_pose(_tf, odometry_frame)
        _tf.waitForTransform(odometry_frame,
                            pursuit_point.header.frame_id,
                            pursuit_point.header.stamp,
                            rospy.Duration(1.0))
        point_in_frame = _tf.transformPoint(odometry_frame, pursuit_point)
        pointing_quat = util.pointing_quaternion_2d(current_pose.pose.position, point_in_frame.point)
        distance_to_point = util.point_distance_2d(current_pose.pose.position, point_in_frame.point)
        pursuit_pose = util.pose_translate_by_quat(current_pose,
                                                   (distance_to_point - min_pursuit_distance),
                                                   pointing_quat)
        #recalculate the quaternion pointing from goal point to sample point
        pointing_quat = util.pointing_quaternion_2d(pursuit_pose.pose.position, point_in_frame.point)
        pursuit_pose.pose.orientation = pointing_quat
        pursuit_pose.header.stamp = rospy.Time(0)
        #rospy.loginfo("CALCULATE PURSUIT: pursuit_point: {!s}, point_in_frame: {!s}, pursuit_pose: {!s}".format(pursuit_point, point_in_frame, pursuit_pose))

        return point_in_frame, pursuit_pose
    except tf.Exception, e:
           rospy.logwarn("PURSUE_SAMPLE failed to transform pursuit point %s->%s: %s",
           pursuit_point.header.frame_id, odometry_frame, e)
 def execute(self, userdata):
     
     userdata.stop_on_sample = False
     userdata.detected_sample = None
     rospy.sleep(userdata.servo_params['settle_time'])
     if self.try_count > userdata.servo_params['try_limit']:
         self.announcer.say("Servo exceeded try limit")
         rospy.loginfo("SERVO STRAFE failed to hit tolerance before try_limit: %s" % (userdata.servo_params['try_limit']))
         self.try_count = 0
         #deploy gripper anyway, we still see sample, but it is not positioned well
         userdata.latched_sample = userdata.detected_sample
         return 'complete'
     
     if userdata.detected_sample is None:
         self.announcer.say("Servo lost sample.")
         self.try_count = 0
         return 'point_lost'
     else:
         detected_sample = userdata.detected_sample
         sample_time = detected_sample.header.stamp
         sample_frame = detected_sample.header.frame_id
         while not rospy.core.is_shutdown_requested():
             try:
                 self.tf_listener.waitForTransform('manipulator_arm',
                         sample_frame, sample_time, rospy.Duration(1.0))
                 point_in_manipulator = self.tf_listener.transformPoint('manipulator_arm',
                                                              detected_sample).point
                 break
             except tf.Exception, exc:
                 rospy.logwarn("SERVO CONTROLLER failed to get manipulator_arm -> {!s} transform.  Exception: {!s}".format(sample_frame, exc))
                 self.try_count = 0
                 return 'aborted'
         #rospy.loginfo("SERVO correction %s: " % (userdata.manipulator_correction))
         rospy.loginfo("SERVO point in manipulator before correction %s: " %(point_in_manipulator))
         point_in_manipulator.x += userdata.manipulator_correction['x']
         point_in_manipulator.y += userdata.manipulator_correction['y']
         #rospy.loginfo("SERVO point in manipulator after correction %s: " %(point_in_manipulator))
         origin = geometry_msg.Point(0,0,0)
         distance = util.point_distance_2d(origin, point_in_manipulator)
         if distance < userdata.servo_params['final_tolerance']:
             self.try_count = 0
             userdata.latched_sample = userdata.detected_sample
             self.announcer.say("Servo complete at %.1f millimeters." % (distance*1000))    
             return 'complete'
         elif distance < userdata.servo_params['initial_tolerance']:
             velocity = userdata.servo_params['final_velocity']
             accel = userdata.servo_params['final_accel']
         else:
             velocity = userdata.servo_params['initial_velocity']
             accel = userdata.servo_params['initial_accel']
         yaw = util.pointing_yaw(origin, point_in_manipulator)
         userdata.simple_move =  SimpleMoveGoal(type=SimpleMoveGoal.STRAFE,
                                                angle = yaw,
                                                distance = distance,
                                                velocity = velocity,
                                                acceleration = accel)
         #rospy.loginfo("SERVO simple_move: %s" % (userdata.simple_move))
         self.announcer.say("Servo ing, distance, %.1f centimeters" % (distance*100))
         rospy.loginfo("DETECTED SAMPLE IN manipulator_arm frame (corrected): %s" % (point_in_manipulator))
         self.try_count += 1
         return 'move'
示例#6
0
    def execute(self, userdata):
        
        if self.preempt_requested():
            self.service_preempt()
            return 'preempted'
        
        #ignore samples after this (applies to mount moves)
        #clear previous move_point_map
        userdata.stop_on_sample = False
        userdata.move_point_map = None
        map_header = std_msg.Header(0, rospy.Time(0), userdata.world_fixed_frame)
       
        #get our position in map
        current_pose = util.get_current_robot_pose(self.tf_listener,
                                                   userdata.world_fixed_frame)        
        
        #this is possibly a highly inaccurate number.   If we get to the approach point,
        #and don't see the beacon, the map is probably messed up
        distance_to_approach_point = util.point_distance_2d(current_pose.pose.position,
                                                            userdata.beacon_approach_pose.pose.position)

        #if we have been ignoring beacon detections prior to this,
        #we should clear them here, and wait for a fresh detection
        if not userdata.stop_on_beacon:
            userdata.beacon_point = None
            rospy.sleep(4.0)
        
        if userdata.beacon_point is None: #beacon not in view
            #first hope, we can spin slowly and see the beacon
            if not self.tried_spin:
                self.announcer.say("Beacon not in view. Rotate ing")
                userdata.simple_move = SimpleMoveGoal(type=SimpleMoveGoal.SPIN,
                                                      angle = math.pi*2,
                                                      velocity = userdata.spin_velocity)        
                userdata.stop_on_beacon = True
                self.tried_spin = True
                return 'spin'
            #already tried a spin, drive towards beacon_approach_point, stopping on detection
            elif distance_to_approach_point > 5.0:
                #we think we're far from approach_point, so try to go there
                self.announcer.say("Beacon not in view. Search ing")
                userdata.move_point_map = geometry_msg.PointStamped(map_header,
                                                                    userdata.beacon_approach_pose.pose.position)
                goal = samplereturn_msg.VFHMoveGoal(target_pose = userdata.beacon_approach_pose,
                                                    move_velocity = userdata.move_velocity,
                                                    spin_velocity = userdata.spin_velocity)
                userdata.move_goal = goal                
                userdata.stop_on_beacon = True
                self.tried_spin = False
                return 'move'
            else:
                #gotta add some heroic shit here
                #we think we're near the beacon, but we don't see it
                #right now, that is just to move 30 m on other side of the beacon that we don't know about
                self.announcer.say("Close to approach point in map.  Beacon not in view.  Search ing")
                search_pose = deepcopy(userdata.beacon_approach_pose)                
                #invert the approach_point, and try again
                search_pose.pose.position.x *= -1
                #save point
                userdata.move_point_map = geometry_msg.PointStamped(map_header,
                                                    search_pose.pose.position)
                goal = samplereturn_msg.VFHMoveGoal(target_pose = userdata.search_pose,
                                                    move_velocity = userdata.move_velocity,
                                                    spin_velocity = userdata.spin_velocity)
                userdata.move_goal = goal                        
                userdata.stop_on_beacon = True
                self.tried_spin = False
                
                return 'move'               
                
        else: #beacon is in view
            #need to add some stuff here to get to other side of beacon if viewing back
            current_yaw = util.get_current_robot_yaw(self.tf_listener,
                                                       userdata.world_fixed_frame)
            yaw_to_platform = util.pointing_yaw(current_pose.pose.position,
                                                userdata.platform_point.point)
            yaw_error = util.unwind(yaw_to_platform - current_yaw)
            userdata.stop_on_beacon = False
            self.tried_spin = False            
            #on the back side
            if current_pose.pose.position.x < 0:
                front_pose = deepcopy(userdata.beacon_approach_pose)
                #try not to drive through the platform
                front_pose.pose.position.y = 5.0 * np.sign(current_pose.pose.position.y)
                goal = samplereturn_msg.VFHMoveGoal(target_pose = front_pose,
                                                    move_velocity = userdata.move_velocity,
                                                    spin_velocity = userdata.spin_velocity)
                userdata.move_goal = goal                          
                self.announcer.say("Back of beacon in view. Move ing to front")
                return 'move'   
            elif distance_to_approach_point > 2.0:
                #on correct side of beacon, but far from approach point
                goal = samplereturn_msg.VFHMoveGoal(target_pose = userdata.beacon_approach_pose,
                                                    move_velocity = userdata.move_velocity,
                                                    spin_velocity = userdata.spin_velocity,
                                                    orient_at_target = True)
                userdata.move_goal = goal       
                self.announcer.say("Beacon in view. Move ing to approach point")                
                return 'move'   
            elif np.abs(yaw_error) > .2:
                #this means beacon is in view and we are within 1 meter of approach point
                #but not pointed so well at beacon
                self.announcer.say("At approach point. Rotate ing to beacon")
                userdata.simple_move = SimpleMoveGoal(type=SimpleMoveGoal.SPIN,
                                                      angle = yaw_error,
                                                      velocity = userdata.spin_velocity)                   
                return 'spin'
            else:    
                self.announcer.say("Initiate ing platform mount")
                userdata.stop_on_beacon = False
                return 'mount'
        
        return 'aborted'
    def execute(self, userdata):
        #set the move manager key for the move mux
        userdata.active_manager = userdata.manager_dict[self.label]
        userdata.report_sample = False
        userdata.report_beacon = True
        self.beacon_enable.publish(True)        

        if self.preempt_requested():
            self.service_preempt()
            return 'preempted'
        
        #get our position in map
        current_pose = util.get_current_robot_pose(self.tf_listener,
                                                   userdata.platform_frame)        
        
        #hopeful distance to approach point
        distance_to_approach_point = util.point_distance_2d(current_pose.pose.position,
                                                            userdata.beacon_approach_pose.pose.position)

        #if we have been ignoring beacon detections prior to this,
        #we should clear them here, and wait for a fresh detection
        if not userdata.stop_on_detection:
            self.announcer.say("Wait ing for beacon.")
            userdata.beacon_point = None
            start_time = rospy.Time.now()
            while not rospy.core.is_shutdown_requested():
                rospy.sleep(0.5)
                if (userdata.beacon_point is not None) \
                or ((rospy.Time.now() - start_time) > userdata.beacon_observation_delay):
                    break
                
        if userdata.beacon_point is None: #beacon not in view
            #first hope, we can spin slowly and see the beacon
            if not self.tried_spin:
                self.announcer.say("Beacon not in view. Rotate ing")
                userdata.simple_move = SimpleMoveGoal(type=SimpleMoveGoal.SPIN,
                                                      angle = np.pi*2,
                                                      velocity = userdata.spin_velocity)        
                userdata.stop_on_detection = True
                self.tried_spin = True
                return 'spin'
            #already tried a spin, drive towards beacon_approach_point, stopping on detection
            elif distance_to_approach_point > 5.0:
                #we think we're far from approach_point, so try to go there
                self.announcer.say("Beacon not in view. Moving to approach point.")
                userdata.stop_on_detection = True
                self.tried_spin = False
                userdata.move_target = userdata.beacon_approach_pose
                return 'move'
            else:
                #we think we're looking at the beacon, but we don't see it, this is probably real bad
                self.announcer.say("Close to approach point in map.  Beacon not in view.  Search ing")
                search_pose = deepcopy(current_pose)                
                #try a random position nearby-ish, ignore facing
                search_pose.pose.position.x += random.randrange(-30, 30, 10) 
                search_pose.pose.position.y += random.randrange(-30, 30, 10)
                userdata.stop_on_detection = True
                self.tried_spin = False
                userdata.move_target = geometry_msg.PointStamped(search_pose.header,
                                                                 search_pose.pose.position)
                return 'move'               
                
        else: #beacon is in view
            current_yaw = util.get_current_robot_yaw(self.tf_listener,
                                                     userdata.platform_frame)
            yaw_to_platform = util.pointing_yaw(current_pose.pose.position,
                                                userdata.platform_point.point)
            yaw_error = util.unwind(yaw_to_platform - current_yaw)
            userdata.stop_on_detection = False
            self.tried_spin = False            
            #on the back side
            if current_pose.pose.position.x < 0:
                #try not to drive through the platform
                self.announcer.say("Back of beacon in view. Move ing to front")
                front_pose = deepcopy(userdata.beacon_approach_pose)
                front_pose.pose.position.y = 5.0 * np.sign(current_pose.pose.position.y)
                pointing_quat = util.pointing_quaternion_2d(front_pose.pose.position,
                                                            userdata.platform_point.point)
                front_pose.pose.orientation = pointing_quat
                userdata.move_target = front_pose
                return 'move'   
            elif distance_to_approach_point > 2.0:
                #on correct side of beacon, but far from approach point
                self.announcer.say("Beacon in view. Move ing to approach point")                
                userdata.move_target = userdata.beacon_approach_pose
                return 'move'   
            elif np.abs(yaw_error) > .2:
                #this means beacon is in view and we are within 1 meter of approach point
                #but not pointed so well at beacon
                self.announcer.say("At approach point. Rotate ing to beacon")
                userdata.simple_move = SimpleMoveGoal(type=SimpleMoveGoal.SPIN,
                                                      angle = yaw_error,
                                                      velocity = userdata.spin_velocity)                   
                return 'spin'
            else:    
                self.announcer.say("Measure ing beacon position.")
                userdata.stop_on_detection = False
                return 'mount'
        
        return 'aborted'