def database_query(self, object_name=None, raise_exception=True, **kwargs): if object_name is not None: kwargs['name'] = object_name res = yield self._database_query(ObjectDBQueryRequest(**kwargs)) if not res.found and raise_exception: raise MissingPerceptionObject(kwargs['name']) defer.returnValue(res) res = yield self._database_query(ObjectDBQueryRequest(**kwargs)) defer.returnValue(res)
def start_search_in_cone(self, start_point, ray, angle_tol=30, distance_tol=10, speed=0.5, clear=False, c_func=None): if clear: print 'SONAR_OBJECTS: clearing pointcloud' self._clear_pcl(TriggerRequest()) yield self.sub.nh.sleep(1) for pose in self.pattern: yield pose.go(speed=speed, blind=True) # sleep yield self.sub.nh.sleep(0.1) # Break out of loop if we find something satisifying function res = yield self._objects_service(ObjectDBQueryRequest()) g_obj = self._get_objects_within_cone(res.objects, start_point, ray, angle_tol, distance_tol) g_obj = self._sort_by_angle(g_obj, ray, start_point) yield if c_func is not None: out = c_func(g_obj, ray) print 'SONAR_OBJECTS: ' + str(out) if out is not None or out is True: print 'SONAR_OBJECTS: found objects satisfing function' break res = yield self._objects_service(ObjectDBQueryRequest()) g_obj = self._get_objects_within_cone(res.objects, start_point, ray, angle_tol, distance_tol) g_obj = self._sort_by_angle(g_obj, ray, start_point) res.objects = g_obj defer.returnValue(res)
def start_search_in_cone(self, start_point, ray, angle_tol=30, distance_tol=10, speed=0.5, clear=False): if clear: print 'SONAR_OBJECTS: clearing pointcloud' self._clear_pcl(TriggerRequest()) for pose in self.pattern: yield pose.go(speed=speed, blind=True) res = yield self._objects_service(ObjectDBQueryRequest()) g_obj = self._get_objects_within_cone( res.objects, start_point, ray, angle_tol, distance_tol) g_obj = self._sort_by_angle(g_obj, ray, start_point) res.objects = g_obj defer.returnValue(res)
def update_object(self, object_msg, class_probabilities): object_id = object_msg.id # Update the total class probabilities if object_id in self.object_map: self.object_map[object_id] += class_probabilities else: self.object_map[object_id] = class_probabilities total_probabilities = self.object_map[object_id] # Guess the type of object based most_likely_index = np.argmax(total_probabilities) most_likely_name = self.classifier.CLASSES[most_likely_index] # Unforuntely this doesn't really work' if most_likely_name in self.BLACK_OBJECT_CLASSES: object_scale = rosmsg_to_numpy(object_msg.scale) object_volume = object_scale.dot(object_scale) object_area = object_scale[:2].dot(object_scale[:2]) height = object_scale[2] if object_id in self.volume_means: self.volume_means[object_id].add_value(object_volume) self.area_means[object_id].add_value(object_area) else: self.volume_means[object_id] = RunningMean(object_volume) self.area_means[object_id] = RunningMean(object_area) running_mean_volume = self.volume_means[object_id].mean running_mean_area = self.area_means[object_id].mean if height > self.TOTEM_MIN_HEIGHT: black_guess = 'black_totem' else: black_guess_index = np.argmin( np.abs(self.BLACK_OBJECT_VOLUMES - running_mean_volume)) black_guess = self.POSSIBLE_BLACK_OBJECTS[black_guess_index] most_likely_name = black_guess rospy.loginfo( '{} current/running volume={}/{} area={}/{} height={}-> {}'. format(object_id, object_volume, running_mean_volume, object_area, running_mean_area, height, black_guess)) obj_title = object_msg.labeled_classification probability = class_probabilities[most_likely_index] rospy.loginfo('Object {} {} classified as {} ({}%)'.format( object_id, object_msg.labeled_classification, most_likely_name, probability * 100.)) if obj_title != most_likely_name: cmd = '{}={}'.format(object_id, most_likely_name) rospy.loginfo('Updating object {} to {}'.format( object_id, most_likely_name)) if not self.is_training: self.database_client(ObjectDBQueryRequest(cmd=cmd)) return most_likely_name
def start_search(self, speed=0.5, clear=False): """ Do a search and return all objects Parameters: speed: how fast sub should move clear: clear pointcloud """ if clear: print 'SONAR_OBJECTS: clearing pointcloud' self._clear_pcl(TriggerRequest()) print 'SONAR_OBJECTS: running pattern' yield self._run_pattern(speed) print 'SONAR_OBJECTS: requesting objects' res = yield self._objects_service(ObjectDBQueryRequest()) defer.returnValue(res)
def start_until_found_in_cone(self, start_point, speed=0.5, clear=False, object_count=0, ray=np.array([0, 1, 0]), angle_tol=30, distance_tol=12): """ Search until objects are found within a cone-shaped range Parameters: start_point: numpy array for the starting point of the direction vector speed: how fast the sub should move clear: should the pointcloud be clear beforehand object_count: how many objects we are looking for ray: the direction vector angle_tol: how far off the direction vector should be allowed distance_tol: how far away are we willing to accept Returns: ObjectDBQuery: with objects field filled by good objects """ if clear: print 'SONAR_OBJECTS: clearing pointcloud' self._clear_pcl(TriggerRequest()) count = -1 while count < object_count: for pose in self.pattern: yield pose.go(speed=speed, blind=True) res = yield self._objects_service(ObjectDBQueryRequest()) g_obj = self._get_objects_within_cone(res.objects, start_point, ray, angle_tol, distance_tol) if g_obj is None: continue count = len(g_obj) print 'SONAR OBJECTS: found {} that satisfy cone'.format(count) if count >= object_count: g_obj = self._sort_by_angle(g_obj, ray, start_point) res.objects = g_obj defer.returnValue(res) defer.returnValue(None)
def start_until_found_x(self, speed=0.5, clear=False, object_count=0): """ Search until a number of objects are found Parameters: speed: how fast sub should move clear: clear pointcloud object_count: how many objects we want """ if clear: print 'SONAR_OBJECTS: clearing pointcloud' self._clear_pcl(TriggerRequest()) count = -1 while count < object_count: for pose in self.pattern: yield pose.go(speed=speed) res = yield self._objects_service(ObjectDBQueryRequest()) count = len(res.objects) if count >= object_count: defer.returnValue(res) defer.returnValue(None)
def pcodar_label(cls, idx, name): cmd = '%d=%s' % (idx, name) yield cls._database_query(ObjectDBQueryRequest(name='', cmd=cmd))
def process_boxes(self, msg): if not self.enabled: return if self.camera_model is None: return if self.last_objects is None or len(self.last_objects.objects) == 0: return now = rospy.Time.now() if now - self.last_update_time < self.update_period: return self.last_update_time = now # Get Transform from ENU to optical at the time of this image transform = self.tf_buffer.lookup_transform( self.sub.last_image_header.frame_id, "enu", self.sub.last_image_header.stamp, timeout=rospy.Duration(1)) translation = rosmsg_to_numpy(transform.transform.translation) rotation = rosmsg_to_numpy(transform.transform.rotation) rotation_mat = quaternion_matrix(rotation)[:3, :3] # Transform the center of each object into optical frame positions_camera = [ translation + rotation_mat.dot(rosmsg_to_numpy(obj.pose.position)) for obj in self.last_objects.objects ] pixel_centers = [ self.camera_model.project3dToPixel(point) for point in positions_camera ] distances = np.linalg.norm(positions_camera, axis=1) CUTOFF_METERS = 30 if self.is_perception_task: CUTOFF_METERS = 100 # Get a list of indicies of objects who are sufficiently close and can be seen by camera met_criteria = [] for i in xrange(len(self.last_objects.objects)): distance = distances[i] if self.in_frame( pixel_centers[i] ) and distance < CUTOFF_METERS and positions_camera[i][2] > 0: met_criteria.append(i) # print 'Keeping {} of {}'.format(len(met_criteria), len(self.last_objects.objects)) classified = set() #for each bounding box,check which buoy is closest to boat within pixel range of bounding box for a in msg.bounding_boxes: buoys = [] for i in met_criteria: if self.in_rect(pixel_centers[i], a): buoys.append(i) if len(buoys) > 0: closest_to_box = buoys[0] closest_to_boat = buoys[0] for i in buoys[1:]: if distances[i] < distances[closest_to_boat]: closest_to_box = i closest_to_boat = i classified.add(self.last_objects.objects[closest_to_box].id) print('Object {} classified as {}'.format( self.last_objects.objects[closest_to_box].id, a.Class)) cmd = '{}={}'.format( self.last_objects.objects[closest_to_box].id, a.Class) self.database_client(ObjectDBQueryRequest(cmd=cmd)) if not self.is_perception_task: return for a in met_criteria: if self.last_objects.objects[a].id in classified: continue height = self.last_objects.objects[a].scale.z #if pixel_centers[i][0] > 1280 or pixel_centers[i][0] > 720: # return if height > 0.45: print('Reclassified as white') print('Object {} classified as {}'.format( self.last_objects.objects[a].id, "mb_marker_buoy_white")) cmd = '{}={}'.format(self.last_objects.objects[a].id, "mb_marker_buoy_white") self.database_client(ObjectDBQueryRequest(cmd=cmd)) else: print('Object {} classified as {}'.format( self.last_objects.objects[a].id, "mb_round_buoy_black")) cmd = '{}={}'.format(self.last_objects.objects[a].id, "mb_round_buoy_black") self.database_client(ObjectDBQueryRequest(cmd=cmd))