def execute(self, userdata): #wait the settle time, then see if the sample is still there #timeout for this is also settle time rospy.sleep(userdata.settle_time) userdata.target_sample = None start_time = rospy.Time.now() while not rospy.core.is_shutdown_requested(): rospy.sleep(0.1) if userdata.target_sample is not None: break #sample is still there, approach if (rospy.Time.now() - start_time) > userdata.settle_time: return 'point_lost' #sample disappeared, give up try: point = userdata.target_sample self.tf_listener.waitForTransform('manipulator_arm', point.header.frame_id, point.header.stamp, rospy.Duration(2.0)) point_in_base = self.tf_listener.transformPoint('manipulator_arm', point).point robot_origin = geometry_msg.Point(0,0,0) yaw = util.pointing_yaw(robot_origin, point_in_base) distance = util.point_distance_2d(robot_origin, point_in_base) robot_yaw = util.get_current_robot_yaw(self.tf_listener, userdata.odometry_frame) except tf.Exception, e: rospy.logwarn("PURSUE_SAMPLE failed to get manipulator_arm -> %s transform in 1.0 seconds: %s", point.header.frame_id, e) return 'aborted'
def run_compute_angle_action(self, goal): image_sub = rospy.Subscriber('image', Image, self.image_callback, None, 1) self.starting_yaw = util.get_current_robot_yaw(self.tf_listener, self.odom_frame) #spin all the way around self.turning = True self.spin_goal.angle = 2*np.pi if not self.execute_move_goal(self.spin_goal): self.turning = False return self.turning = False best_angle_index = np.argmax(self.min_lightness) best_angle = self.img_angles[best_angle_index] #return to best angle self.spin_goal.angle = util.unwind(best_angle-self.starting_yaw) if not self.execute_move_goal(self.spin_goal): return self.min_lightness = [] self.img_angles = [] image_sub.unregister() #return the value of best angle in odometry_frame self.action_server.set_succeeded(ComputeAngleResult(best_angle))
def execute(self, userdata): if self.preempt_requested(): self.service_preempt() return 'preempted' #ignore samples after this (applies to mount moves) #clear previous move_point_map userdata.stop_on_sample = False userdata.move_point_map = None map_header = std_msg.Header(0, rospy.Time(0), userdata.world_fixed_frame) #get our position in map current_pose = util.get_current_robot_pose(self.tf_listener, userdata.world_fixed_frame) #this is possibly a highly inaccurate number. If we get to the approach point, #and don't see the beacon, the map is probably messed up distance_to_approach_point = util.point_distance_2d(current_pose.pose.position, userdata.beacon_approach_pose.pose.position) #if we have been ignoring beacon detections prior to this, #we should clear them here, and wait for a fresh detection if not userdata.stop_on_beacon: userdata.beacon_point = None rospy.sleep(4.0) if userdata.beacon_point is None: #beacon not in view #first hope, we can spin slowly and see the beacon if not self.tried_spin: self.announcer.say("Beacon not in view. Rotate ing") userdata.simple_move = SimpleMoveGoal(type=SimpleMoveGoal.SPIN, angle = math.pi*2, velocity = userdata.spin_velocity) userdata.stop_on_beacon = True self.tried_spin = True return 'spin' #already tried a spin, drive towards beacon_approach_point, stopping on detection elif distance_to_approach_point > 5.0: #we think we're far from approach_point, so try to go there self.announcer.say("Beacon not in view. Search ing") userdata.move_point_map = geometry_msg.PointStamped(map_header, userdata.beacon_approach_pose.pose.position) goal = samplereturn_msg.VFHMoveGoal(target_pose = userdata.beacon_approach_pose, move_velocity = userdata.move_velocity, spin_velocity = userdata.spin_velocity) userdata.move_goal = goal userdata.stop_on_beacon = True self.tried_spin = False return 'move' else: #gotta add some heroic shit here #we think we're near the beacon, but we don't see it #right now, that is just to move 30 m on other side of the beacon that we don't know about self.announcer.say("Close to approach point in map. Beacon not in view. Search ing") search_pose = deepcopy(userdata.beacon_approach_pose) #invert the approach_point, and try again search_pose.pose.position.x *= -1 #save point userdata.move_point_map = geometry_msg.PointStamped(map_header, search_pose.pose.position) goal = samplereturn_msg.VFHMoveGoal(target_pose = userdata.search_pose, move_velocity = userdata.move_velocity, spin_velocity = userdata.spin_velocity) userdata.move_goal = goal userdata.stop_on_beacon = True self.tried_spin = False return 'move' else: #beacon is in view #need to add some stuff here to get to other side of beacon if viewing back current_yaw = util.get_current_robot_yaw(self.tf_listener, userdata.world_fixed_frame) yaw_to_platform = util.pointing_yaw(current_pose.pose.position, userdata.platform_point.point) yaw_error = util.unwind(yaw_to_platform - current_yaw) userdata.stop_on_beacon = False self.tried_spin = False #on the back side if current_pose.pose.position.x < 0: front_pose = deepcopy(userdata.beacon_approach_pose) #try not to drive through the platform front_pose.pose.position.y = 5.0 * np.sign(current_pose.pose.position.y) goal = samplereturn_msg.VFHMoveGoal(target_pose = front_pose, move_velocity = userdata.move_velocity, spin_velocity = userdata.spin_velocity) userdata.move_goal = goal self.announcer.say("Back of beacon in view. Move ing to front") return 'move' elif distance_to_approach_point > 2.0: #on correct side of beacon, but far from approach point goal = samplereturn_msg.VFHMoveGoal(target_pose = userdata.beacon_approach_pose, move_velocity = userdata.move_velocity, spin_velocity = userdata.spin_velocity, orient_at_target = True) userdata.move_goal = goal self.announcer.say("Beacon in view. Move ing to approach point") return 'move' elif np.abs(yaw_error) > .2: #this means beacon is in view and we are within 1 meter of approach point #but not pointed so well at beacon self.announcer.say("At approach point. Rotate ing to beacon") userdata.simple_move = SimpleMoveGoal(type=SimpleMoveGoal.SPIN, angle = yaw_error, velocity = userdata.spin_velocity) return 'spin' else: self.announcer.say("Initiate ing platform mount") userdata.stop_on_beacon = False return 'mount' return 'aborted'
def execute(self, userdata): #set the move manager key for the move mux userdata.active_manager = userdata.manager_dict[self.label] userdata.report_sample = False userdata.report_beacon = True self.beacon_enable.publish(True) if self.preempt_requested(): self.service_preempt() return 'preempted' #get our position in map current_pose = util.get_current_robot_pose(self.tf_listener, userdata.platform_frame) #hopeful distance to approach point distance_to_approach_point = util.point_distance_2d(current_pose.pose.position, userdata.beacon_approach_pose.pose.position) #if we have been ignoring beacon detections prior to this, #we should clear them here, and wait for a fresh detection if not userdata.stop_on_detection: self.announcer.say("Wait ing for beacon.") userdata.beacon_point = None start_time = rospy.Time.now() while not rospy.core.is_shutdown_requested(): rospy.sleep(0.5) if (userdata.beacon_point is not None) \ or ((rospy.Time.now() - start_time) > userdata.beacon_observation_delay): break if userdata.beacon_point is None: #beacon not in view #first hope, we can spin slowly and see the beacon if not self.tried_spin: self.announcer.say("Beacon not in view. Rotate ing") userdata.simple_move = SimpleMoveGoal(type=SimpleMoveGoal.SPIN, angle = np.pi*2, velocity = userdata.spin_velocity) userdata.stop_on_detection = True self.tried_spin = True return 'spin' #already tried a spin, drive towards beacon_approach_point, stopping on detection elif distance_to_approach_point > 5.0: #we think we're far from approach_point, so try to go there self.announcer.say("Beacon not in view. Moving to approach point.") userdata.stop_on_detection = True self.tried_spin = False userdata.move_target = userdata.beacon_approach_pose return 'move' else: #we think we're looking at the beacon, but we don't see it, this is probably real bad self.announcer.say("Close to approach point in map. Beacon not in view. Search ing") search_pose = deepcopy(current_pose) #try a random position nearby-ish, ignore facing search_pose.pose.position.x += random.randrange(-30, 30, 10) search_pose.pose.position.y += random.randrange(-30, 30, 10) userdata.stop_on_detection = True self.tried_spin = False userdata.move_target = geometry_msg.PointStamped(search_pose.header, search_pose.pose.position) return 'move' else: #beacon is in view current_yaw = util.get_current_robot_yaw(self.tf_listener, userdata.platform_frame) yaw_to_platform = util.pointing_yaw(current_pose.pose.position, userdata.platform_point.point) yaw_error = util.unwind(yaw_to_platform - current_yaw) userdata.stop_on_detection = False self.tried_spin = False #on the back side if current_pose.pose.position.x < 0: #try not to drive through the platform self.announcer.say("Back of beacon in view. Move ing to front") front_pose = deepcopy(userdata.beacon_approach_pose) front_pose.pose.position.y = 5.0 * np.sign(current_pose.pose.position.y) pointing_quat = util.pointing_quaternion_2d(front_pose.pose.position, userdata.platform_point.point) front_pose.pose.orientation = pointing_quat userdata.move_target = front_pose return 'move' elif distance_to_approach_point > 2.0: #on correct side of beacon, but far from approach point self.announcer.say("Beacon in view. Move ing to approach point") userdata.move_target = userdata.beacon_approach_pose return 'move' elif np.abs(yaw_error) > .2: #this means beacon is in view and we are within 1 meter of approach point #but not pointed so well at beacon self.announcer.say("At approach point. Rotate ing to beacon") userdata.simple_move = SimpleMoveGoal(type=SimpleMoveGoal.SPIN, angle = yaw_error, velocity = userdata.spin_velocity) return 'spin' else: self.announcer.say("Measure ing beacon position.") userdata.stop_on_detection = False return 'mount' return 'aborted'
def update_yaw(self, msg): if self.turning: self.current_yaw = util.get_current_robot_yaw(self.tf_listener, self.odom_frame) self.action_server.publish_feedback(ComputeAngleFeedback(self.current_yaw))
def execute(self, userdata): result = samplereturn_msg.GeneralExecutiveResult() result.result_string = 'approach failed' result.result_int = samplereturn_msg.NamedPoint.NONE userdata.action_result = result #default velocity for all moves is in simple_mover! #initial approach pursuit done at pursuit_velocity userdata.lighting_optimized = True userdata.search_count = 0 userdata.grab_count = 0 rospy.loginfo("SAMPLE_PURSUIT input_point: %s" % (userdata.action_goal.input_point)) #store the filter_id on entry, this instance of pursuit will only try to pick up this hypothesis userdata.latched_filter_id = userdata.action_goal.input_point.filter_id userdata.filter_changed = False point, pose = calculate_pursuit(self.tf_listener, userdata.action_goal.input_point, userdata.min_pursuit_distance, userdata.odometry_frame) try: approach_point = geometry_msg.PointStamped(pose.header, pose.pose.position) yaw, distance = util.get_robot_strafe(self.tf_listener, approach_point) robot_yaw = util.get_current_robot_yaw(self.tf_listener, userdata.odometry_frame) except tf.Exception: rospy.logwarn("PURSUE_SAMPLE failed to get base_link -> %s transform in 1.0 seconds", sample_frame) return 'aborted' rospy.loginfo("INITIAL_PURSUIT calculated with distance: {:f}, yaw: {:f} ".format(distance, yaw)) #this is the initial vfh goal pose goal = samplereturn_msg.VFHMoveGoal(target_pose = pose, move_velocity = userdata.pursuit_velocity, spin_velocity = userdata.spin_velocity, orient_at_target = True) userdata.pursuit_goal = goal #create return destination current_pose = util.get_current_robot_pose(self.tf_listener, userdata.odometry_frame) current_pose.header.stamp = rospy.Time(0) goal = samplereturn_msg.VFHMoveGoal(target_pose = current_pose, move_velocity = userdata.pursuit_velocity, spin_velocity = userdata.spin_velocity, orient_at_target = False) userdata.return_goal = goal #if distance to sample approach point is small, use a simple move if distance < userdata.simple_pursuit_threshold: facing_yaw = euler_from_orientation(pose.pose.orientation)[-1] dyaw = util.unwind(facing_yaw - robot_yaw) userdata.approach_strafe = SimpleMoveGoal(type=SimpleMoveGoal.STRAFE, angle = yaw, distance = distance, velocity = userdata.pursuit_velocity) userdata.face_sample = SimpleMoveGoal(type=SimpleMoveGoal.SPIN, angle = dyaw, velocity = userdata.spin_velocity) return 'simple' #if further than threshold make a vfh move to approach point else: return 'goal'