示例#1
0
    def execute(self, userdata):

        if userdata.search_count >= userdata.search_try_limit:
            self.announcer.say("Search limit reached")
            return 'no_sample'
        
        if not userdata.lighting_optimized:
            userdata.lighting_optimized = True
            return 'optimize_lighting'

        point_list = collections.deque([])

        try:
            for position in userdata.spiral_search_positions:                     
                start_pose = util.get_current_robot_pose(self.tf_listener,
                    userdata.odometry_frame)
                next_pose = util.translate_base_link(self.tf_listener,
                                                     start_pose,
                                                     position['x'],
                                                     position['y'],
                                                     userdata.odometry_frame)
                point_list.append(next_pose.pose.position)
                
            userdata.point_list = point_list
            self.announcer.say("Search ing area")
            userdata.search_count += 1
            return 'sample_search'

        except tf.Exception, e:
            rospy.logwarn("PURSUE_SAMPLE failed to transform %s-%s in GetSearchMoves: %s",
                    'base_link', userdata.odometry_frame, e)
            return 'aborted'
 def publish_path_markers(self, event):
     try:
         self.path_marker.pose = util.get_current_robot_pose(self.tf_listener, self.reality_frame).pose
     except:
         return
     self.path_marker.id = self.path_counter
     self.path_counter += 1
     self.path_marker_pub.publish(self.path_marker)
 def yaw_error(self):
     """
     Compute difference between present orientation and
     goal orientation.
     """
     current_pose = util.get_current_robot_pose(self._tf,
                                                self.odometry_frame)             
     
     raw_error = euler_from_orientation(self._goal_odom.pose.orientation)[-1] - \
                 euler_from_orientation(current_pose.pose.orientation)[-1]
     
     return util.unwind(raw_error)
 def position_error(self):
     """
     Compute difference between present position and
     goal position.
     """
     
     current_pose = util.get_current_robot_pose(self._tf,
                                                self.odometry_frame)          
     return Point(
             self._goal_odom.pose.position.x -
             current_pose.pose.position.x,
             self._goal_odom.pose.position.y -
             current_pose.pose.position.y,
             0)
    def publish_point_cloud(self, event):
        #rospy.loginfo("Starting PC generation")

        now = event.current_real
        #get latest available transforms to map
        header = std_msg.Header(0, rospy.Time(0), self.reality_frame)
        target_frame = 'navigation_center_left_camera'
        try:
            current_pose = util.get_current_robot_pose(self.tf_listener,
                    self.reality_frame)
            transform = self.tf_listener.asMatrix(target_frame, header)
        except ( tf.Exception ), e:
            #rospy.loginfo("Failed to transform map for PC")
            #rospy.loginfo("Exception: %s"%e)
            return
示例#6
0
def calculate_pursuit(_tf, pursuit_point, min_pursuit_distance, odometry_frame):
    try:
        current_pose = util.get_current_robot_pose(_tf, odometry_frame)
        _tf.waitForTransform(odometry_frame,
                            pursuit_point.header.frame_id,
                            pursuit_point.header.stamp,
                            rospy.Duration(1.0))
        point_in_frame = _tf.transformPoint(odometry_frame, pursuit_point)
        pointing_quat = util.pointing_quaternion_2d(current_pose.pose.position, point_in_frame.point)
        distance_to_point = util.point_distance_2d(current_pose.pose.position, point_in_frame.point)
        pursuit_pose = util.pose_translate_by_quat(current_pose,
                                                   (distance_to_point - min_pursuit_distance),
                                                   pointing_quat)
        #recalculate the quaternion pointing from goal point to sample point
        pointing_quat = util.pointing_quaternion_2d(pursuit_pose.pose.position, point_in_frame.point)
        pursuit_pose.pose.orientation = pointing_quat
        pursuit_pose.header.stamp = rospy.Time(0)
        #rospy.loginfo("CALCULATE PURSUIT: pursuit_point: {!s}, point_in_frame: {!s}, pursuit_pose: {!s}".format(pursuit_point, point_in_frame, pursuit_pose))

        return point_in_frame, pursuit_pose
    except tf.Exception, e:
           rospy.logwarn("PURSUE_SAMPLE failed to transform pursuit point %s->%s: %s",
           pursuit_point.header.frame_id, odometry_frame, e)
 def execute(self, userdata):
 
     self.last_line_blocked = False
     self.announcer.say("On search line, Yaw " + str(int(math.degrees(userdata.line_yaw))))
     
     while not rospy.is_shutdown():  
         
         current_pose = util.get_current_robot_pose(self.listener)
         distance = util.pose_distance_2d(current_pose, userdata.next_line_pose)
         if distance < userdata.line_replan_distance:
             self.announcer.say("Line is clear, continue ing")
             
             new_pose = util.pose_translate_by_yaw(userdata.next_line_pose,
                                                   userdata.line_plan_step,
                                                   userdata.line_yaw)
                             
             userdata.next_line_pose = new_pose
     
         if rospy.Time.now() > userdata.return_time:
             self.announcer.say("Search time expired")
             return 'return_home'
         
         if self.new_costmap_available:
             self.new_costmap_available = False
             if self.line_blocked(userdata):
                 self.announcer.say("Line is blocked, rotate ing to new line")
                 return 'line_blocked'   
         
         #check preempt after deciding whether or not to calculate next line pose
         if self.preempt_requested():
             rospy.loginfo("PREEMPT REQUESTED IN LINE MANAGER")
             self.service_preempt()
             return 'preempted'
     
         rospy.sleep(0.2)
     
     return 'aborted'
    def execute(self, userdata):
        rospy.sleep(1.0) #wait for robot to settle
        current_pose = util.get_current_robot_pose(self.tf_listener)
        yaw_quat = tf.transformations.quaternion_from_euler(0, 0, userdata.line_yaw)
        #stupid planner may not have the robot oriented along the search line,
        #set orientation to that value anyway
        current_pose.pose.orientation = geometry_msg.Quaternion(*yaw_quat)
        yaw_changes = np.array(range(userdata.blocked_rotation_min,
                                     userdata.blocked_rotation_max,
                                     10))
        yaw_changes = np.radians(yaw_changes)                        
        yaw_changes = np.r_[yaw_changes, -yaw_changes]
        yaw_change = random.choice(yaw_changes)
        line_yaw = userdata.line_yaw + yaw_change
        rotate_pose = util.pose_rotate(current_pose, yaw_change)
        next_line_pose = util.pose_translate_by_yaw(rotate_pose,
                                                    userdata.line_plan_step,
                                                    line_yaw)

        userdata.line_yaw = line_yaw
        userdata.rotate_pose = rotate_pose
        userdata.next_line_pose = next_line_pose
        
        return 'next'
示例#9
0
    def execute(self, userdata):
        
        if self.preempt_requested():
            self.service_preempt()
            return 'preempted'
        
        #ignore samples after this (applies to mount moves)
        #clear previous move_point_map
        userdata.stop_on_sample = False
        userdata.move_point_map = None
        map_header = std_msg.Header(0, rospy.Time(0), userdata.world_fixed_frame)
       
        #get our position in map
        current_pose = util.get_current_robot_pose(self.tf_listener,
                                                   userdata.world_fixed_frame)        
        
        #this is possibly a highly inaccurate number.   If we get to the approach point,
        #and don't see the beacon, the map is probably messed up
        distance_to_approach_point = util.point_distance_2d(current_pose.pose.position,
                                                            userdata.beacon_approach_pose.pose.position)

        #if we have been ignoring beacon detections prior to this,
        #we should clear them here, and wait for a fresh detection
        if not userdata.stop_on_beacon:
            userdata.beacon_point = None
            rospy.sleep(4.0)
        
        if userdata.beacon_point is None: #beacon not in view
            #first hope, we can spin slowly and see the beacon
            if not self.tried_spin:
                self.announcer.say("Beacon not in view. Rotate ing")
                userdata.simple_move = SimpleMoveGoal(type=SimpleMoveGoal.SPIN,
                                                      angle = math.pi*2,
                                                      velocity = userdata.spin_velocity)        
                userdata.stop_on_beacon = True
                self.tried_spin = True
                return 'spin'
            #already tried a spin, drive towards beacon_approach_point, stopping on detection
            elif distance_to_approach_point > 5.0:
                #we think we're far from approach_point, so try to go there
                self.announcer.say("Beacon not in view. Search ing")
                userdata.move_point_map = geometry_msg.PointStamped(map_header,
                                                                    userdata.beacon_approach_pose.pose.position)
                goal = samplereturn_msg.VFHMoveGoal(target_pose = userdata.beacon_approach_pose,
                                                    move_velocity = userdata.move_velocity,
                                                    spin_velocity = userdata.spin_velocity)
                userdata.move_goal = goal                
                userdata.stop_on_beacon = True
                self.tried_spin = False
                return 'move'
            else:
                #gotta add some heroic shit here
                #we think we're near the beacon, but we don't see it
                #right now, that is just to move 30 m on other side of the beacon that we don't know about
                self.announcer.say("Close to approach point in map.  Beacon not in view.  Search ing")
                search_pose = deepcopy(userdata.beacon_approach_pose)                
                #invert the approach_point, and try again
                search_pose.pose.position.x *= -1
                #save point
                userdata.move_point_map = geometry_msg.PointStamped(map_header,
                                                    search_pose.pose.position)
                goal = samplereturn_msg.VFHMoveGoal(target_pose = userdata.search_pose,
                                                    move_velocity = userdata.move_velocity,
                                                    spin_velocity = userdata.spin_velocity)
                userdata.move_goal = goal                        
                userdata.stop_on_beacon = True
                self.tried_spin = False
                
                return 'move'               
                
        else: #beacon is in view
            #need to add some stuff here to get to other side of beacon if viewing back
            current_yaw = util.get_current_robot_yaw(self.tf_listener,
                                                       userdata.world_fixed_frame)
            yaw_to_platform = util.pointing_yaw(current_pose.pose.position,
                                                userdata.platform_point.point)
            yaw_error = util.unwind(yaw_to_platform - current_yaw)
            userdata.stop_on_beacon = False
            self.tried_spin = False            
            #on the back side
            if current_pose.pose.position.x < 0:
                front_pose = deepcopy(userdata.beacon_approach_pose)
                #try not to drive through the platform
                front_pose.pose.position.y = 5.0 * np.sign(current_pose.pose.position.y)
                goal = samplereturn_msg.VFHMoveGoal(target_pose = front_pose,
                                                    move_velocity = userdata.move_velocity,
                                                    spin_velocity = userdata.spin_velocity)
                userdata.move_goal = goal                          
                self.announcer.say("Back of beacon in view. Move ing to front")
                return 'move'   
            elif distance_to_approach_point > 2.0:
                #on correct side of beacon, but far from approach point
                goal = samplereturn_msg.VFHMoveGoal(target_pose = userdata.beacon_approach_pose,
                                                    move_velocity = userdata.move_velocity,
                                                    spin_velocity = userdata.spin_velocity,
                                                    orient_at_target = True)
                userdata.move_goal = goal       
                self.announcer.say("Beacon in view. Move ing to approach point")                
                return 'move'   
            elif np.abs(yaw_error) > .2:
                #this means beacon is in view and we are within 1 meter of approach point
                #but not pointed so well at beacon
                self.announcer.say("At approach point. Rotate ing to beacon")
                userdata.simple_move = SimpleMoveGoal(type=SimpleMoveGoal.SPIN,
                                                      angle = yaw_error,
                                                      velocity = userdata.spin_velocity)                   
                return 'spin'
            else:    
                self.announcer.say("Initiate ing platform mount")
                userdata.stop_on_beacon = False
                return 'mount'
        
        return 'aborted'
    def execute(self, userdata):
        #set the move manager key for the move mux
        userdata.active_manager = userdata.manager_dict[self.label]
        userdata.report_sample = False
        userdata.report_beacon = True
        self.beacon_enable.publish(True)        

        if self.preempt_requested():
            self.service_preempt()
            return 'preempted'
        
        #get our position in map
        current_pose = util.get_current_robot_pose(self.tf_listener,
                                                   userdata.platform_frame)        
        
        #hopeful distance to approach point
        distance_to_approach_point = util.point_distance_2d(current_pose.pose.position,
                                                            userdata.beacon_approach_pose.pose.position)

        #if we have been ignoring beacon detections prior to this,
        #we should clear them here, and wait for a fresh detection
        if not userdata.stop_on_detection:
            self.announcer.say("Wait ing for beacon.")
            userdata.beacon_point = None
            start_time = rospy.Time.now()
            while not rospy.core.is_shutdown_requested():
                rospy.sleep(0.5)
                if (userdata.beacon_point is not None) \
                or ((rospy.Time.now() - start_time) > userdata.beacon_observation_delay):
                    break
                
        if userdata.beacon_point is None: #beacon not in view
            #first hope, we can spin slowly and see the beacon
            if not self.tried_spin:
                self.announcer.say("Beacon not in view. Rotate ing")
                userdata.simple_move = SimpleMoveGoal(type=SimpleMoveGoal.SPIN,
                                                      angle = np.pi*2,
                                                      velocity = userdata.spin_velocity)        
                userdata.stop_on_detection = True
                self.tried_spin = True
                return 'spin'
            #already tried a spin, drive towards beacon_approach_point, stopping on detection
            elif distance_to_approach_point > 5.0:
                #we think we're far from approach_point, so try to go there
                self.announcer.say("Beacon not in view. Moving to approach point.")
                userdata.stop_on_detection = True
                self.tried_spin = False
                userdata.move_target = userdata.beacon_approach_pose
                return 'move'
            else:
                #we think we're looking at the beacon, but we don't see it, this is probably real bad
                self.announcer.say("Close to approach point in map.  Beacon not in view.  Search ing")
                search_pose = deepcopy(current_pose)                
                #try a random position nearby-ish, ignore facing
                search_pose.pose.position.x += random.randrange(-30, 30, 10) 
                search_pose.pose.position.y += random.randrange(-30, 30, 10)
                userdata.stop_on_detection = True
                self.tried_spin = False
                userdata.move_target = geometry_msg.PointStamped(search_pose.header,
                                                                 search_pose.pose.position)
                return 'move'               
                
        else: #beacon is in view
            current_yaw = util.get_current_robot_yaw(self.tf_listener,
                                                     userdata.platform_frame)
            yaw_to_platform = util.pointing_yaw(current_pose.pose.position,
                                                userdata.platform_point.point)
            yaw_error = util.unwind(yaw_to_platform - current_yaw)
            userdata.stop_on_detection = False
            self.tried_spin = False            
            #on the back side
            if current_pose.pose.position.x < 0:
                #try not to drive through the platform
                self.announcer.say("Back of beacon in view. Move ing to front")
                front_pose = deepcopy(userdata.beacon_approach_pose)
                front_pose.pose.position.y = 5.0 * np.sign(current_pose.pose.position.y)
                pointing_quat = util.pointing_quaternion_2d(front_pose.pose.position,
                                                            userdata.platform_point.point)
                front_pose.pose.orientation = pointing_quat
                userdata.move_target = front_pose
                return 'move'   
            elif distance_to_approach_point > 2.0:
                #on correct side of beacon, but far from approach point
                self.announcer.say("Beacon in view. Move ing to approach point")                
                userdata.move_target = userdata.beacon_approach_pose
                return 'move'   
            elif np.abs(yaw_error) > .2:
                #this means beacon is in view and we are within 1 meter of approach point
                #but not pointed so well at beacon
                self.announcer.say("At approach point. Rotate ing to beacon")
                userdata.simple_move = SimpleMoveGoal(type=SimpleMoveGoal.SPIN,
                                                      angle = yaw_error,
                                                      velocity = userdata.spin_velocity)                   
                return 'spin'
            else:    
                self.announcer.say("Measure ing beacon position.")
                userdata.stop_on_detection = False
                return 'mount'
        
        return 'aborted'
示例#11
0
    def execute(self, userdata):

        result = samplereturn_msg.GeneralExecutiveResult()
        result.result_string = 'approach failed'
        result.result_int = samplereturn_msg.NamedPoint.NONE
        userdata.action_result = result
        #default velocity for all moves is in simple_mover!
        #initial approach pursuit done at pursuit_velocity
        userdata.lighting_optimized = True
        userdata.search_count = 0
        userdata.grab_count = 0

        rospy.loginfo("SAMPLE_PURSUIT input_point: %s" % (userdata.action_goal.input_point))

        #store the filter_id on entry, this instance of pursuit will only try to pick up this hypothesis
        userdata.latched_filter_id = userdata.action_goal.input_point.filter_id
        userdata.filter_changed = False        

        point, pose = calculate_pursuit(self.tf_listener,
                                        userdata.action_goal.input_point,
                                        userdata.min_pursuit_distance,
                                        userdata.odometry_frame)

        try:
            approach_point = geometry_msg.PointStamped(pose.header,
                                                       pose.pose.position)
            yaw, distance = util.get_robot_strafe(self.tf_listener, approach_point)
            robot_yaw = util.get_current_robot_yaw(self.tf_listener, userdata.odometry_frame)
        except tf.Exception:
            rospy.logwarn("PURSUE_SAMPLE failed to get base_link -> %s transform in 1.0 seconds", sample_frame)
            return 'aborted'
        rospy.loginfo("INITIAL_PURSUIT calculated with distance: {:f}, yaw: {:f} ".format(distance, yaw))

        #this is the initial vfh goal pose
        goal = samplereturn_msg.VFHMoveGoal(target_pose = pose,
                                            move_velocity = userdata.pursuit_velocity,
                                            spin_velocity = userdata.spin_velocity,
                                            orient_at_target = True)
        userdata.pursuit_goal = goal

        #create return destination
        current_pose = util.get_current_robot_pose(self.tf_listener,
                                                   userdata.odometry_frame)
        current_pose.header.stamp = rospy.Time(0)
        goal = samplereturn_msg.VFHMoveGoal(target_pose = current_pose,
                                            move_velocity = userdata.pursuit_velocity,
                                            spin_velocity = userdata.spin_velocity,
                                            orient_at_target = False)
        userdata.return_goal = goal

        #if distance to sample approach point is small, use a simple move
        if distance < userdata.simple_pursuit_threshold:

            facing_yaw = euler_from_orientation(pose.pose.orientation)[-1]
            dyaw = util.unwind(facing_yaw - robot_yaw)

            userdata.approach_strafe = SimpleMoveGoal(type=SimpleMoveGoal.STRAFE,
                                                  angle = yaw,
                                                  distance = distance,
                                                  velocity = userdata.pursuit_velocity)
            userdata.face_sample = SimpleMoveGoal(type=SimpleMoveGoal.SPIN,
                                                 angle = dyaw,
                                                 velocity = userdata.spin_velocity)

            return 'simple'

        #if further than threshold make a vfh move to approach point
        else:

            return 'goal'
示例#12
0
        
    def is_stop_requested(self):
        return self.stop_requested

    #sets the target point and the start point in odom frame
    def set_path_points_odom(self, target_stamped):
        target_point_odom = None
        try:
            target_point_odom = self._tf.transformPoint(self.odometry_frame,
                                                        target_stamped)
        except tf.Exception, exc:
            rospy.logwarn("VFH MOVE SERVER:could not transform point: %s"%exc)
            self._as.set_aborted(result = VFHMoveResult(outcome = VFHMoveResult.ERROR))
            return False
        self._target_point_odom = target_point_odom
        current_pose = util.get_current_robot_pose(self._tf, self.odometry_frame)
        self._start_point = current_pose.pose.position

    def execute_movement(self, move_velocity, spin_velocity):

        ##### Begin rotate to clear section
        #if requested rotate until the forward sector is clear, then move
        #clear_distance straight ahead, then try to move to the target pose
        if self._rotate_to_clear:
            start_dyaw, d = util.get_robot_strafe(self._tf, self._target_point_odom)
            rot_sign = np.sign(start_dyaw)
            #first try rotating toward the target 120 degrees or so
            error = self._mover.execute_spin(rot_sign*self._clear_first_angle,
                                             max_velocity = spin_velocity,
                                             stop_function=self.clear_check)
            if self.exit_check(): return