def look_at_point(self, vector_stamped, end_time=0, pan_vel=1.0, tilt_vel=0.8, timeout=0): assert isinstance(vector_stamped, VectorStamped) point_stamped = kdl_vector_stamped_to_point_stamped(vector_stamped) self._setHeadReferenceGoal(0, pan_vel, tilt_vel, end_time, point_stamped, timeout=timeout)
def execute(self, userdata): """ :return: Failed: when taking too long, can't transform RGBD image to point or when preempt requested follow: When successfully recovered operator """ self._robot.head.look_at_standing_person() self._robot.speech.speak("%s, please look at me while I am looking for you" % self._operator_name, block=False) start_time = rospy.Time.now() look_distance = 2.0 look_angles = [0.0, math.pi / 6, math.pi / 4, math.pi / 2.3, 0.0, -math.pi / 6, -math.pi / 4, -math.pi / 2.3] head_goals = [kdl_conversions.VectorStamped(x=look_distance * math.cos(angle), y=look_distance * math.sin(angle), z=1.7, frame_id="/%s/base_link" % self._robot.robot_name) for angle in look_angles] i = 0 while (rospy.Time.now() - start_time).to_sec() < self._lost_timeout: if self.preempt_requested(): return 'Failed' self._robot.head.look_at_point(head_goals[i]) i += 1 if i == len(head_goals): i = 0 self._robot.head.wait_for_motion_done() raw_detections = self._robot.perception.detect_faces() best_detection = self._robot.perception.get_best_face_recognition(raw_detections, "operator", probability_threshold=3.0) rospy.loginfo("best_detection = {}".format(best_detection)) if best_detection: roi = best_detection.roi try: operator_pos_kdl = self._robot.perception.project_roi(roi=roi, frame_id="map") except Exception as e: rospy.logerr("head.project_roi failed: %s", e) return 'Failed' operator_pos_ros = kdl_conversions.kdl_vector_stamped_to_point_stamped(operator_pos_kdl) self._face_pos_pub.publish(operator_pos_ros) recovered_operator = self._robot.ed.get_closest_laser_entity(radius=self._lost_distance, center_point=operator_pos_kdl, ignore_z=True) if recovered_operator: print "Found one!" self._operator_id = recovered_operator.id print "Recovered operator id: %s" % self._operator_id self._operator = recovered_operator self._robot.speech.speak("There you are! Go ahead, I'll follow you again", block=False) self._robot.head.close() self._time_started = rospy.Time.now() userdata.recovered_operator = recovered_operator return 'follow' else: print "Could not find an entity {} meter near {}".format(self._lost_distance, operator_pos_kdl) self._robot.head.close() return 'Failed'
def _recover_operator(self): rospy.loginfo("Trying to recover the operator") self._robot.head.look_at_standing_person() self._robot.speech.speak( "%s, please look at me while I am looking for you" % self._operator_name, block=False) # Wait for the operator and find his/her face operator_recovery_timeout = self._lost_timeout start_time = rospy.Time.now() recovered_operator = None look_distance = 2.0 look_angles = [ 0.0, math.pi / 6, math.pi / 4, math.pi / 2.3, 0.0, -math.pi / 6, -math.pi / 4, -math.pi / 2.3 ] head_goals = [ kdl_conversions.VectorStamped(x=look_distance * math.cos(angle), y=look_distance * math.sin(angle), z=1.7, frame_id="/%s/base_link" % self._robot.robot_name) for angle in look_angles ] i = 0 while (rospy.Time.now() - start_time).to_sec() < operator_recovery_timeout: if self.preempt_requested(): return False self._robot.head.look_at_point(head_goals[i]) i += 1 if i == len(head_goals): i = 0 self._robot.head.wait_for_motion_done() # raw_detections is a list of Recognitions # a recognition contains a CategoricalDistribution # a CategoricalDistribution is a list of CategoryProbabilities # a CategoryProbability has a label and a float raw_detections = self._robot.perception.detect_faces() best_detection = self._robot.perception.get_best_face_recognition( raw_detections, "operator") rospy.loginfo("best_detection = {}".format(best_detection)) if best_detection: # print "Best detection: {}".format(best_detection) roi = best_detection.roi try: operator_pos_kdl = self._robot.perception.project_roi( roi=roi, frame_id="map") except Exception as e: rospy.logerr("head.project_roi failed: %s", e) return False operator_pos_ros = kdl_conversions.kdl_vector_stamped_to_point_stamped( operator_pos_kdl) self._face_pos_pub.publish(operator_pos_ros) recovered_operator = self._robot.ed.get_closest_laser_entity( radius=self._lost_distance, center_point=operator_pos_kdl) if recovered_operator: print "Found one!" self._operator_id = recovered_operator.id print "Recovered operator id: %s" % self._operator_id self._operator = recovered_operator self._robot.speech.speak( "There you are! Go ahead, I'll follow you again", block=False) self._robot.head.close() self._time_started = rospy.Time.now() return True else: print "Could not find an entity {} meter near {}".format( self._lost_distance, operator_pos_kdl) self._robot.head.close() self._turn_towards_operator() self._update_navigation() rospy.sleep(2.0) return False
def _recover_operator(self): rospy.loginfo( "Trying to recover the operator") self._robot.head.look_at_standing_person() self._robot.speech.speak("%s, please look at me while I am looking for you" % self._operator_name, block=False) # Wait for the operator and find his/her face operator_recovery_timeout = self._lost_timeout start_time = rospy.Time.now() recovered_operator = None look_distance = 2.0 look_angles = [0.0, math.pi/6, math.pi/4, math.pi/2.3, 0.0, -math.pi/6, -math.pi/4, -math.pi/2.3] head_goals = [kdl_conversions.VectorStamped(x=look_distance*math.cos(angle), y=look_distance*math.sin(angle), z=1.7, frame_id="/%s/base_link" % self._robot.robot_name) for angle in look_angles] i = 0 while (rospy.Time.now() - start_time).to_sec() < operator_recovery_timeout: if self.preempt_requested(): return False self._robot.head.look_at_point(head_goals[i]) i += 1 if i == len(head_goals): i = 0 self._robot.head.wait_for_motion_done() # raw_detections is a list of Recognitions # a recognition contains a CategoricalDistribution # a CategoricalDistribution is a list of CategoryProbabilities # a CategoryProbability has a label and a float raw_detections = self._robot.perception.detect_faces() best_detection = self._robot.perception.get_best_face_recognition(raw_detections, "operator") rospy.loginfo("best_detection = {}".format(best_detection)) if best_detection: # print "Best detection: {}".format(best_detection) roi = best_detection.roi try: operator_pos_kdl = self._robot.perception.project_roi(roi=roi, frame_id="map") except Exception as e: rospy.logerr("head.project_roi failed: %s", e) return False operator_pos_ros = kdl_conversions.kdl_vector_stamped_to_point_stamped(operator_pos_kdl) self._face_pos_pub.publish(operator_pos_ros) recovered_operator = self._robot.ed.get_closest_laser_entity(radius=self._lost_distance, center_point=operator_pos_kdl) if recovered_operator: print "Found one!" self._operator_id = recovered_operator.id print "Recovered operator id: %s" % self._operator_id self._operator = recovered_operator self._robot.speech.speak("There you are! Go ahead, I'll follow you again",block=False) self._robot.head.close() self._time_started = rospy.Time.now() return True else: print "Could not find an entity {} meter near {}".format(self._lost_distance, operator_pos_kdl) self._robot.head.close() self._turn_towards_operator() self._update_navigation() rospy.sleep(2.0) return False
def execute(self, userdata=None): rospy.loginfo("Trying to find {}".format(self._person_label)) self._robot.head.look_at_standing_person() self._robot.speech.speak("{}, please look at me while I am looking for you".format(self._person_label), block=False) start_time = rospy.Time.now() look_distance = 2.0 # magic number 4 look_angles = [f * math.pi / d if d != 0 else 0.0 for f in [-1, 1] for d in [0, 6, 4, 2.3]] # Magic numbers head_goals = [kdl_conversions.VectorStamped(x=look_distance * math.cos(angle), y=look_distance * math.sin(angle), z=1.3, frame_id="/%s/base_link" % self._robot.robot_name) for angle in look_angles] i = 0 while (rospy.Time.now() - start_time).to_sec() < self._search_timeout: if self.preempt_requested(): return 'failed' self._robot.head.look_at_point(head_goals[i]) i += 1 if i == len(head_goals): i = 0 self._robot.head.wait_for_motion_done() raw_detections = self._robot.perception.detect_faces() if self._discard_other_labels: best_detection = self._robot.perception.get_best_face_recognition( raw_detections, self._person_label, probability_threshold=self._probability_threshold) else: if raw_detections: # Take the biggest ROI best_detection = max(raw_detections, key=lambda r: r.roi.height) else: best_detection = None rospy.loginfo("best_detection = {}".format(best_detection)) if not best_detection: continue roi = best_detection.roi try: person_pos_kdl = self._robot.perception.project_roi(roi=roi, frame_id="map") except Exception as e: rospy.logerr("head.project_roi failed: %s", e) return 'failed' person_pos_ros = kdl_conversions.kdl_vector_stamped_to_point_stamped(person_pos_kdl) self._face_pos_pub.publish(person_pos_ros) found_person = self._robot.ed.get_closest_laser_entity(radius=self._look_distance, center_point=person_pos_kdl) if self._room: room_entity = self._robot.ed.get_entity(id=self._room) if not room_entity.in_volume(found_person.pose.extractVectorStamped(), 'in'): found_person = None if found_person: rospy.loginfo("I found {} at {}".format(self._person_label, found_person.pose.extractVectorStamped(), block=False)) self._robot.speech.speak("I found {}.".format(self._person_label, block=False)) self._robot.head.close() self._robot.ed.update_entity( id=self._person_label, frame_stamped=kdl_conversions.FrameStamped(kdl.Frame(person_pos_kdl.vector), "/map"), type="waypoint") if self._found_entity_designator: self._found_entity_designator.write(found_person) return 'found' else: rospy.logwarn("Could not find {}".format(self._person_label)) self._robot.head.close() rospy.sleep(2.0) return 'failed'
def execute(self, userdata=None): # Point head in the right direction (look at the ground 100 m in front of the robot self._robot.head.look_at_ground_in_front_of_robot(distance=100.0) # Wait until a face has been detected near the robot rate = rospy.Rate(1.0) while not rospy.is_shutdown(): in_range, face_pos = self._face_within_range(threshold=2.0) if in_range: break else: rospy.loginfo("PointingDetector: waiting for someone to come into view") self._robot.speech.speak("Hi there", block=True) self._robot.lights.set_color(r=1.0, g=0.0, b=0.0, a=1.0) # Get RayTraceResult start = rospy.Time.now() result = None while not rospy.is_shutdown() and (rospy.Time.now() - start).to_sec() < 10.0: try: result = get_ray_trace_from_closest_person(robot=self._robot) if result is not None: break except Exception as e: rospy.logerr("Could not get ray trace from closest person: {}".format(e)) # If result is None: fallback scenario # result = RayTraceResponse() # result.entity_id = self._default_entity_id if result is None: result = RayTraceResponse() result.entity_id = self._default_entity_id face_pos_map = face_pos.projectToFrame("map", self._robot.tf_listener) face_pos_msg = kdl_conversions.kdl_vector_stamped_to_point_stamped(face_pos_map) e = self._robot.ed.get_entity(id=self._default_entity_id) from geometry_msgs.msg import PointStamped e_pos_msg = PointStamped() e_pos_msg.header.frame_id = "map" e_pos_msg.header.stamp = rospy.Time.now() e_pos_msg.point.x = e._pose.p.x() e_pos_msg.point.y = e._pose.p.y() e_pos_msg.point.z = e._pose.p.z() result = get_ray_trace_from_closest_person_dummy(robot=self._robot, arm_norm_threshold=0.1, upper_arm_norm_threshold=0.7, entity_id=self._default_entity_id, operator_pos=face_pos_msg, furniture_pos=e_pos_msg) self._robot.lights.set_color(r=0.0, g=0.0, b=1.0, a=1.0) # Query the entity from ED entity = self._robot.ed.get_entity(id=result.entity_id) # If the result is of the desired super type, we can try to do something with it if entity.is_a(self._super_type): self._designator.set_id(identifier=entity.id) rospy.loginfo("Object pointed at: {}".format(entity.id)) self._robot.speech.speak("Okay, here we go") return "succeeded" # Otherwise, try to get the closest one entities = self.ed.get_entities() entities = [e for e in entities if e.is_a(self._super_type)] if not entities: rospy.logerr("ED doesn't contain any entities of super type {}".format(self._super_type)) return "failed" # If we have entities, sort them according to distance assert result.intersection_point.header.frame_id in ["/map", "map"] raypos = kdl.Vector(result.intersection_point.point.x, result.intersection_point.point.y, 0.0) entities.sort(key=lambda e: e.distance_to_2d(raypos)) self._designator.set_id(identifier=entities[0].id) rospy.loginfo("Object pointed at: {}".format(entities[0].id)) self._robot.speech.speak("Okay, here we go") return "succeeded"
def execute(self, userdata=None): rospy.loginfo("Trying to find {}".format(self._person_label)) self._robot.head.look_at_standing_person() self._robot.speech.speak( "{}, please look at me while I am looking for you".format( self._person_label), block=False) start_time = rospy.Time.now() look_distance = 2.0 # magic number 4 look_angles = [ f * math.pi / d if d != 0 else 0.0 for f in [-1, 1] for d in [0, 6, 4, 2.3] ] # Magic numbers head_goals = [ kdl_conversions.VectorStamped(x=look_distance * math.cos(angle), y=look_distance * math.sin(angle), z=1.7, frame_id="/%s/base_link" % self._robot.robot_name) for angle in look_angles ] i = 0 while (rospy.Time.now() - start_time).to_sec() < self._timeout: if self.preempt_requested(): return 'failed' self._robot.head.look_at_point(head_goals[i]) i += 1 if i == len(head_goals): i = 0 self._robot.head.wait_for_motion_done() raw_detections = self._robot.perception.detect_faces() best_detection = self._robot.perception.get_best_face_recognition( raw_detections, self._person_label, probability_threshold=self._probability_threshold) rospy.loginfo("best_detection = {}".format(best_detection)) if not best_detection: continue roi = best_detection.roi try: person_pos_kdl = self._robot.perception.project_roi( roi=roi, frame_id="map") except Exception as e: rospy.logerr("head.project_roi failed: %s", e) return 'failed' person_pos_ros = kdl_conversions.kdl_vector_stamped_to_point_stamped( person_pos_kdl) self._face_pos_pub.publish(person_pos_ros) found_person = self._robot.ed.get_closest_laser_entity( radius=self._look_distance, center_point=person_pos_kdl) if found_person: self._robot.speech.speak("I found {}".format( self._person_label), block=False) self._robot.head.close() self._robot.ed.update_entity( id=self._person_label, frame_stamped=kdl_conversions.FrameStamped( kdl.Frame(person_pos_kdl.vector), "/map"), type="waypoint") return 'found' else: rospy.logwarn("Could not find {} in the {}".format( self._person_label, self.area)) self._robot.head.close() rospy.sleep(2.0) return 'failed'