Example #1
0
 def execute_spin(self, rotation, max_velocity=None, acceleration=None, stop_function=None):
   return util.unwind(
           self.execute(
           lambda start_yaw=self.current_yaw:(self.current_yaw-start_yaw-rotation)*self.stern_offset,
           dict(stern=-np.pi/2*np.sign(rotation),
                port=0.0,
                starboard=0.0),
           lambda v, sign=np.sign(rotation) : self.spin_publisher(v, sign),
           max_velocity,
           acceleration,
           stop_function)/self.stern_offset)
 def yaw_error(self):
     """
     Compute difference between present orientation and
     goal orientation.
     """
     current_pose = util.get_current_robot_pose(self._tf,
                                                self.odometry_frame)             
     
     raw_error = euler_from_orientation(self._goal_odom.pose.orientation)[-1] - \
                 euler_from_orientation(current_pose.pose.orientation)[-1]
     
     return util.unwind(raw_error)
Example #3
0
 def execute_continuous_strafe(self, angle, max_velocity=None, acceleration=None, stop_function=None):
   angle = util.unwind(angle)
   self.strafe_angle = angle
   #now calculate the angles of the wheel pods
   if (-np.pi/2<=angle<=np.pi/2):
     target_angle = angle
   elif (np.pi/2 < angle):
     target_angle = angle - np.pi
   elif (angle < -np.pi/2):
     target_angle = angle + np.pi
   return self.execute(
           lambda: np.inf ,
           dict(stern=target_angle,
               port=target_angle,
               starboard=target_angle),
           lambda v : self.strafe_publisher(v),
           max_velocity,
           acceleration,
           stop_function)  
Example #4
0
 def execute_strafe(self, angle, distance, max_velocity=None, acceleration=None, stop_function=None):
   angle = util.unwind(angle)
   self.strafe_angle = angle 
   #now calculate the angles of the wheel pods
   if (-np.pi/2<=angle<=np.pi/2):
     target_angle = angle
   elif (np.pi/2 < angle):
     target_angle = angle - np.pi
   elif (angle < -np.pi/2):
     target_angle = angle + np.pi
   return self.execute(
           lambda start_position=self.current_position: np.abs(distance-np.linalg.norm(start_position-self.current_position)),
           dict(stern=target_angle,
               port=target_angle,
               starboard=target_angle),
           lambda v : self.strafe_publisher(v),
           max_velocity,
           acceleration,
           stop_function)
 def run_compute_angle_action(self, goal):
   image_sub = rospy.Subscriber('image', Image, self.image_callback, None, 1)
   self.starting_yaw = util.get_current_robot_yaw(self.tf_listener, self.odom_frame)
   #spin all the way around
   self.turning = True
   self.spin_goal.angle = 2*np.pi
   if not self.execute_move_goal(self.spin_goal):
     self.turning = False
     return
   self.turning = False
   best_angle_index = np.argmax(self.min_lightness)
   best_angle = self.img_angles[best_angle_index]
   #return to best angle
   self.spin_goal.angle = util.unwind(best_angle-self.starting_yaw)
   if not self.execute_move_goal(self.spin_goal):
     return
   self.min_lightness = []
   self.img_angles = []
   image_sub.unregister()
   #return the value of best angle in odometry_frame
   self.action_server.set_succeeded(ComputeAngleResult(best_angle))
 def get_hollow_star(self, spoke_count, spoke_length, offset, hub_radius, spin_step):
     #This creates a list of dictionaries.  The yaw entry is a useful piece of information
     #for some of the state machine states.  The other entry is the list of points that
     #the robot should try to achieve along the spoke definied by the yaw
     
     offset = np.radians(offset)
     yaws = list(np.linspace(0 + offset, -2*np.pi + offset, spoke_count, endpoint=False))
     spokes = []
     for yaw in yaws:
         yaw = util.unwind(yaw)
         radius = hub_radius
         points = deque([])
         while radius < spoke_length:
             header = std_msg.Header(0, rospy.Time(0), self.world_fixed_frame)
             pos = geometry_msg.Point(radius*math.cos(yaw),
                                      radius*math.sin(yaw), 0)
             point = geometry_msg.PointStamped(header, pos)
             radius += spin_step
             if radius > spoke_length: radius = spoke_length
             points.append(point)
         spokes.append({'yaw':yaw,
                        'points':points})
     
     return deque(spokes)
    def execute(self, userdata):
        
        if self.preempt_requested():
            self.service_preempt()
            return 'preempted'
        
        #ignore samples after this (applies to mount moves)
        #clear previous move_point_map
        userdata.stop_on_sample = False
        userdata.move_point_map = None
        map_header = std_msg.Header(0, rospy.Time(0), userdata.world_fixed_frame)
       
        #get our position in map
        current_pose = util.get_current_robot_pose(self.tf_listener,
                                                   userdata.world_fixed_frame)        
        
        #this is possibly a highly inaccurate number.   If we get to the approach point,
        #and don't see the beacon, the map is probably messed up
        distance_to_approach_point = util.point_distance_2d(current_pose.pose.position,
                                                            userdata.beacon_approach_pose.pose.position)

        #if we have been ignoring beacon detections prior to this,
        #we should clear them here, and wait for a fresh detection
        if not userdata.stop_on_beacon:
            userdata.beacon_point = None
            rospy.sleep(4.0)
        
        if userdata.beacon_point is None: #beacon not in view
            #first hope, we can spin slowly and see the beacon
            if not self.tried_spin:
                self.announcer.say("Beacon not in view. Rotate ing")
                userdata.simple_move = SimpleMoveGoal(type=SimpleMoveGoal.SPIN,
                                                      angle = math.pi*2,
                                                      velocity = userdata.spin_velocity)        
                userdata.stop_on_beacon = True
                self.tried_spin = True
                return 'spin'
            #already tried a spin, drive towards beacon_approach_point, stopping on detection
            elif distance_to_approach_point > 5.0:
                #we think we're far from approach_point, so try to go there
                self.announcer.say("Beacon not in view. Search ing")
                userdata.move_point_map = geometry_msg.PointStamped(map_header,
                                                                    userdata.beacon_approach_pose.pose.position)
                goal = samplereturn_msg.VFHMoveGoal(target_pose = userdata.beacon_approach_pose,
                                                    move_velocity = userdata.move_velocity,
                                                    spin_velocity = userdata.spin_velocity)
                userdata.move_goal = goal                
                userdata.stop_on_beacon = True
                self.tried_spin = False
                return 'move'
            else:
                #gotta add some heroic shit here
                #we think we're near the beacon, but we don't see it
                #right now, that is just to move 30 m on other side of the beacon that we don't know about
                self.announcer.say("Close to approach point in map.  Beacon not in view.  Search ing")
                search_pose = deepcopy(userdata.beacon_approach_pose)                
                #invert the approach_point, and try again
                search_pose.pose.position.x *= -1
                #save point
                userdata.move_point_map = geometry_msg.PointStamped(map_header,
                                                    search_pose.pose.position)
                goal = samplereturn_msg.VFHMoveGoal(target_pose = userdata.search_pose,
                                                    move_velocity = userdata.move_velocity,
                                                    spin_velocity = userdata.spin_velocity)
                userdata.move_goal = goal                        
                userdata.stop_on_beacon = True
                self.tried_spin = False
                
                return 'move'               
                
        else: #beacon is in view
            #need to add some stuff here to get to other side of beacon if viewing back
            current_yaw = util.get_current_robot_yaw(self.tf_listener,
                                                       userdata.world_fixed_frame)
            yaw_to_platform = util.pointing_yaw(current_pose.pose.position,
                                                userdata.platform_point.point)
            yaw_error = util.unwind(yaw_to_platform - current_yaw)
            userdata.stop_on_beacon = False
            self.tried_spin = False            
            #on the back side
            if current_pose.pose.position.x < 0:
                front_pose = deepcopy(userdata.beacon_approach_pose)
                #try not to drive through the platform
                front_pose.pose.position.y = 5.0 * np.sign(current_pose.pose.position.y)
                goal = samplereturn_msg.VFHMoveGoal(target_pose = front_pose,
                                                    move_velocity = userdata.move_velocity,
                                                    spin_velocity = userdata.spin_velocity)
                userdata.move_goal = goal                          
                self.announcer.say("Back of beacon in view. Move ing to front")
                return 'move'   
            elif distance_to_approach_point > 2.0:
                #on correct side of beacon, but far from approach point
                goal = samplereturn_msg.VFHMoveGoal(target_pose = userdata.beacon_approach_pose,
                                                    move_velocity = userdata.move_velocity,
                                                    spin_velocity = userdata.spin_velocity,
                                                    orient_at_target = True)
                userdata.move_goal = goal       
                self.announcer.say("Beacon in view. Move ing to approach point")                
                return 'move'   
            elif np.abs(yaw_error) > .2:
                #this means beacon is in view and we are within 1 meter of approach point
                #but not pointed so well at beacon
                self.announcer.say("At approach point. Rotate ing to beacon")
                userdata.simple_move = SimpleMoveGoal(type=SimpleMoveGoal.SPIN,
                                                      angle = yaw_error,
                                                      velocity = userdata.spin_velocity)                   
                return 'spin'
            else:    
                self.announcer.say("Initiate ing platform mount")
                userdata.stop_on_beacon = False
                return 'mount'
        
        return 'aborted'
    def execute(self, userdata):
        #set the move manager key for the move mux
        userdata.active_manager = userdata.manager_dict[self.label]
        userdata.report_sample = False
        userdata.report_beacon = True
        self.beacon_enable.publish(True)        

        if self.preempt_requested():
            self.service_preempt()
            return 'preempted'
        
        #get our position in map
        current_pose = util.get_current_robot_pose(self.tf_listener,
                                                   userdata.platform_frame)        
        
        #hopeful distance to approach point
        distance_to_approach_point = util.point_distance_2d(current_pose.pose.position,
                                                            userdata.beacon_approach_pose.pose.position)

        #if we have been ignoring beacon detections prior to this,
        #we should clear them here, and wait for a fresh detection
        if not userdata.stop_on_detection:
            self.announcer.say("Wait ing for beacon.")
            userdata.beacon_point = None
            start_time = rospy.Time.now()
            while not rospy.core.is_shutdown_requested():
                rospy.sleep(0.5)
                if (userdata.beacon_point is not None) \
                or ((rospy.Time.now() - start_time) > userdata.beacon_observation_delay):
                    break
                
        if userdata.beacon_point is None: #beacon not in view
            #first hope, we can spin slowly and see the beacon
            if not self.tried_spin:
                self.announcer.say("Beacon not in view. Rotate ing")
                userdata.simple_move = SimpleMoveGoal(type=SimpleMoveGoal.SPIN,
                                                      angle = np.pi*2,
                                                      velocity = userdata.spin_velocity)        
                userdata.stop_on_detection = True
                self.tried_spin = True
                return 'spin'
            #already tried a spin, drive towards beacon_approach_point, stopping on detection
            elif distance_to_approach_point > 5.0:
                #we think we're far from approach_point, so try to go there
                self.announcer.say("Beacon not in view. Moving to approach point.")
                userdata.stop_on_detection = True
                self.tried_spin = False
                userdata.move_target = userdata.beacon_approach_pose
                return 'move'
            else:
                #we think we're looking at the beacon, but we don't see it, this is probably real bad
                self.announcer.say("Close to approach point in map.  Beacon not in view.  Search ing")
                search_pose = deepcopy(current_pose)                
                #try a random position nearby-ish, ignore facing
                search_pose.pose.position.x += random.randrange(-30, 30, 10) 
                search_pose.pose.position.y += random.randrange(-30, 30, 10)
                userdata.stop_on_detection = True
                self.tried_spin = False
                userdata.move_target = geometry_msg.PointStamped(search_pose.header,
                                                                 search_pose.pose.position)
                return 'move'               
                
        else: #beacon is in view
            current_yaw = util.get_current_robot_yaw(self.tf_listener,
                                                     userdata.platform_frame)
            yaw_to_platform = util.pointing_yaw(current_pose.pose.position,
                                                userdata.platform_point.point)
            yaw_error = util.unwind(yaw_to_platform - current_yaw)
            userdata.stop_on_detection = False
            self.tried_spin = False            
            #on the back side
            if current_pose.pose.position.x < 0:
                #try not to drive through the platform
                self.announcer.say("Back of beacon in view. Move ing to front")
                front_pose = deepcopy(userdata.beacon_approach_pose)
                front_pose.pose.position.y = 5.0 * np.sign(current_pose.pose.position.y)
                pointing_quat = util.pointing_quaternion_2d(front_pose.pose.position,
                                                            userdata.platform_point.point)
                front_pose.pose.orientation = pointing_quat
                userdata.move_target = front_pose
                return 'move'   
            elif distance_to_approach_point > 2.0:
                #on correct side of beacon, but far from approach point
                self.announcer.say("Beacon in view. Move ing to approach point")                
                userdata.move_target = userdata.beacon_approach_pose
                return 'move'   
            elif np.abs(yaw_error) > .2:
                #this means beacon is in view and we are within 1 meter of approach point
                #but not pointed so well at beacon
                self.announcer.say("At approach point. Rotate ing to beacon")
                userdata.simple_move = SimpleMoveGoal(type=SimpleMoveGoal.SPIN,
                                                      angle = yaw_error,
                                                      velocity = userdata.spin_velocity)                   
                return 'spin'
            else:    
                self.announcer.say("Measure ing beacon position.")
                userdata.stop_on_detection = False
                return 'mount'
        
        return 'aborted'
Example #9
0
    def execute(self, userdata):

        result = samplereturn_msg.GeneralExecutiveResult()
        result.result_string = 'approach failed'
        result.result_int = samplereturn_msg.NamedPoint.NONE
        userdata.action_result = result
        #default velocity for all moves is in simple_mover!
        #initial approach pursuit done at pursuit_velocity
        userdata.lighting_optimized = True
        userdata.search_count = 0
        userdata.grab_count = 0

        rospy.loginfo("SAMPLE_PURSUIT input_point: %s" % (userdata.action_goal.input_point))

        #store the filter_id on entry, this instance of pursuit will only try to pick up this hypothesis
        userdata.latched_filter_id = userdata.action_goal.input_point.filter_id
        userdata.filter_changed = False        

        point, pose = calculate_pursuit(self.tf_listener,
                                        userdata.action_goal.input_point,
                                        userdata.min_pursuit_distance,
                                        userdata.odometry_frame)

        try:
            approach_point = geometry_msg.PointStamped(pose.header,
                                                       pose.pose.position)
            yaw, distance = util.get_robot_strafe(self.tf_listener, approach_point)
            robot_yaw = util.get_current_robot_yaw(self.tf_listener, userdata.odometry_frame)
        except tf.Exception:
            rospy.logwarn("PURSUE_SAMPLE failed to get base_link -> %s transform in 1.0 seconds", sample_frame)
            return 'aborted'
        rospy.loginfo("INITIAL_PURSUIT calculated with distance: {:f}, yaw: {:f} ".format(distance, yaw))

        #this is the initial vfh goal pose
        goal = samplereturn_msg.VFHMoveGoal(target_pose = pose,
                                            move_velocity = userdata.pursuit_velocity,
                                            spin_velocity = userdata.spin_velocity,
                                            orient_at_target = True)
        userdata.pursuit_goal = goal

        #create return destination
        current_pose = util.get_current_robot_pose(self.tf_listener,
                                                   userdata.odometry_frame)
        current_pose.header.stamp = rospy.Time(0)
        goal = samplereturn_msg.VFHMoveGoal(target_pose = current_pose,
                                            move_velocity = userdata.pursuit_velocity,
                                            spin_velocity = userdata.spin_velocity,
                                            orient_at_target = False)
        userdata.return_goal = goal

        #if distance to sample approach point is small, use a simple move
        if distance < userdata.simple_pursuit_threshold:

            facing_yaw = euler_from_orientation(pose.pose.orientation)[-1]
            dyaw = util.unwind(facing_yaw - robot_yaw)

            userdata.approach_strafe = SimpleMoveGoal(type=SimpleMoveGoal.STRAFE,
                                                  angle = yaw,
                                                  distance = distance,
                                                  velocity = userdata.pursuit_velocity)
            userdata.face_sample = SimpleMoveGoal(type=SimpleMoveGoal.SPIN,
                                                 angle = dyaw,
                                                 velocity = userdata.spin_velocity)

            return 'simple'

        #if further than threshold make a vfh move to approach point
        else:

            return 'goal'