def main(navigator): ''' Once we see the gate - move to a point 10m in front of the gate. When we get there, make another observation of the gate to make sure we are lined up and then go through. ''' navigator.change_wrench("autonomous") # Only try to get the point 5 times for _try in range(5): print "Waiting for points..." m = yield navigator.vision_request("start_gate") print m if not m.success: continue p, q = navigator_tools.pose_to_numpy(m.target.pose) # Move backwards a bit from the waypoint target_r = tf.transformations.quaternion_matrix(q) back_up = target_r.dot(np.array([-10, 0, 0, 1])) # Homogeneous target_p = p + back_up[:3] yield navigator.move.set_orientation(target_r).go() yield navigator.move.set_position(target_p).go() break if not m.success: print "No objects found. Exiting." # Raise an alarm defer.returnValue(False) for _try in range(5): print "Waiting for points..." m = yield navigator.vision_request("start_gate") print m if not m.success: continue p, q = navigator_tools.pose_to_numpy(m.target.pose) # Average the two rotation and positions together to get a "better" guess weight_factor = .8 # Weight for the newer data over the old stuff target_r = weight_factor * tf.transformations.quaternion_matrix(q) + \ (1 - weight_factor) * target_r target_p = weight_factor * p + \ (1 - weight_factor) * target_p move_through = target_r.dot([10, 0, 0, 1]) target_p += move_through[:3] yield navigator.move.set_orientation(target_r).go() yield navigator.move.set_position(target_p).go() break
def to_pose(self, pose): ''' Takes a Pose or PoseStamped ''' if isinstance(pose, PoseStamped): pose = pose.pose position, orientation = navigator_tools.pose_to_numpy(pose) return PoseEditor2(self.nav, [position, orientation])
def state_cb(self, msg): if rospy.Time.now( ) - self.last_state_sub_time < self.state_sub_max_prd: return self.last_state_sub_time = rospy.Time.now() if self.target in msg.name: target_index = msg.name.index(self.target) twist = msg.twist[target_index] enu_pose = navigator_tools.pose_to_numpy(msg.pose[target_index]) self.last_ecef = self.enu_to_ecef(enu_pose[0]) self.last_enu = enu_pose self.last_odom = Odometry( header=navigator_tools.make_header(frame='/enu'), child_frame_id='/measurement', pose=PoseWithCovariance( pose=navigator_tools.numpy_quat_pair_to_pose( *self.last_enu)), twist=TwistWithCovariance(twist=twist)) self.last_absodom = Odometry( header=navigator_tools.make_header(frame='/ecef'), child_frame_id='/measurement', pose=PoseWithCovariance( pose=navigator_tools.numpy_quat_pair_to_pose( self.last_ecef, self.last_enu[1])), twist=TwistWithCovariance(twist=twist))
def __init__(self, frame_id='enu', map_size=500, resolution=0.3, rate=1): self.frame_id = frame_id self.ogrids = {} self.odom = None # Some default values self.plow = True self.plow_factor = 0 self.ogrid_min_value = -1 self.draw_bounds = False self.resolution = resolution self.enforce_bounds = False self.enu_bounds = [[0, 0, 1], [0, 0, 1], [0, 0, 1], [0, 0, 1]] # Default to centering the ogrid position = np.array([-(map_size * resolution) / 2, -(map_size * resolution) / 2, 0]) quaternion = np.array([0, 0, 0, 1]) self.origin = navigator_tools.numpy_quat_pair_to_pose(position, quaternion) self.global_ogrid = self.create_grid((map_size, map_size)) set_odom = lambda msg: setattr(self, 'odom', navigator_tools.pose_to_numpy(msg.pose.pose)) rospy.Subscriber('/odom', Odometry, set_odom) self.publisher = rospy.Publisher('/ogrid_master', OccupancyGrid, queue_size=1) Server(OgridConfig, self.dynamic_cb) dynam_client = Client("bounds_server", config_callback=self.bounds_cb) rospy.Timer(rospy.Duration(1.0 / rate), self.publish)
def state_cb(self, msg): if self.target in msg.name: target_index = msg.name.index(self.target) twist = msg.twist[target_index] enu_pose = navigator_tools.pose_to_numpy(msg.pose[target_index]) self.last_ecef = self.enu_to_ecef(enu_pose[0]) self.last_enu = enu_pose self.last_odom = Odometry( header=navigator_tools.make_header(frame='/enu'), child_frame_id='/base_link', pose=PoseWithCovariance( pose=navigator_tools.numpy_quat_pair_to_pose(*self.last_enu) ), twist=TwistWithCovariance( twist=twist ) ) self.last_absodom = Odometry( header=navigator_tools.make_header(frame='/ecef'), child_frame_id='/base_link', pose=PoseWithCovariance( pose=navigator_tools.numpy_quat_pair_to_pose(self.last_ecef, self.last_enu[1]) ), twist=TwistWithCovariance( twist=twist ) )
def is_feasible(self, x, u): """ Given a state x and effort u, returns a bool that is only True if that (x, u) is feasible. """ # If there's no ogrid yet or it's a blind move, anywhere is valid if self.ogrid is None or self.blind: return True # Body to world c, s = np.cos(x[2]), np.sin(x[2]) R = np.array([[c, -s], [s, c]]) # Vehicle points in world frame points = x[:2] + R.dot(params.vps).T # Check for collision cpm = 1 / self.ogrid.info.resolution origin = navigator_tools.pose_to_numpy(self.ogrid.info.origin)[0][:2] indicies = (cpm * (points - origin)).astype(np.int64) try: data = np.array(self.ogrid.data).reshape(self.ogrid.info.width, self.ogrid.info.height) grid_values = data[indicies[:, 1], indicies[:, 0]] except IndexError: return False # Greater than threshold is a hit return np.all(grid_values < 90)
def state_cb(self, msg): if self.target in msg.name: target_index = msg.name.index(self.target) twist = msg.twist[target_index] enu_pose = navigator_tools.pose_to_numpy(msg.pose[target_index]) self.last_ecef = self.enu_to_ecef(enu_pose[0]) self.last_enu = enu_pose self.last_odom = Odometry( header=navigator_tools.make_header(frame='/enu'), child_frame_id='/base_link', pose=PoseWithCovariance( pose=navigator_tools.numpy_quat_pair_to_pose( *self.last_enu)), twist=TwistWithCovariance(twist=twist)) self.last_absodom = Odometry( header=navigator_tools.make_header(frame='/ecef'), child_frame_id='/base_link', pose=PoseWithCovariance( pose=navigator_tools.numpy_quat_pair_to_pose( self.last_ecef, self.last_enu[1])), twist=TwistWithCovariance(twist=twist))
def _lidar_view(self, pose): radius = 30 # m position, quat = pose yaw = tf.transformations.euler_from_quaternion( [quat[0], quat[1], quat[2], quat[3]])[2] # Need to get the position on the ogrid to put the shape cpm = 1 / self.map_metadata.resolution # cells per meter origin = navigator_tools.pose_to_numpy(self.map_metadata.origin)[0] # Convert from meters to grid measurements center_grid = (cpm * (position - origin - radius)).astype(np.int64)[:2] radius_grid = int(radius * cpm) # Draw the shape on it's own canvas then combine shape = np.zeros( (2 * radius_grid, 2 * radius_grid)) # Blank place to put the cone cv2.circle(shape, (radius_grid, radius_grid), radius_grid, 1, -1) # Get rid of that triangle of unseen space pts = np.array([[radius_grid, radius_grid], shape.shape, [0, shape.shape[1]]]) pts = pts.reshape((-1, 1, 2)).astype(np.int32) cv2.fillPoly(shape, [pts], 0) # Then rotate so that we are oriented the same as the boat M = cv2.getRotationMatrix2D((radius_grid, radius_grid), np.degrees(yaw) - 90, 1.0) shape = cv2.warpAffine(shape, M, shape.shape[:2][::-1]) # Add the shape to the grid (you have to do some funky flipping here) self.grid[center_grid[1]:center_grid[1] + int(2 * radius_grid), center_grid[0]:center_grid[0] + int(2 * radius_grid)] += np.flipud(shape)
def state_cb(self, msg): if rospy.Time.now() - self.last_state_sub_time < self.state_sub_max_prd: return self.last_state_sub_time = rospy.Time.now() if self.target in msg.name: target_index = msg.name.index(self.target) twist = msg.twist[target_index] enu_pose = navigator_tools.pose_to_numpy(msg.pose[target_index]) self.last_ecef = self.enu_to_ecef(enu_pose[0]) self.last_enu = enu_pose self.last_odom = Odometry( header=navigator_tools.make_header(frame='/enu'), child_frame_id='/measurement', pose=PoseWithCovariance( pose=navigator_tools.numpy_quat_pair_to_pose(*self.last_enu) ), twist=TwistWithCovariance( twist=twist ) ) self.last_absodom = Odometry( header=navigator_tools.make_header(frame='/ecef'), child_frame_id='/measurement', pose=PoseWithCovariance( pose=navigator_tools.numpy_quat_pair_to_pose(self.last_ecef, self.last_enu[1]) ), twist=TwistWithCovariance( twist=twist ) )
def __init__(self, frame_id='enu', map_size=500, resolution=0.3, rate=1): self.frame_id = frame_id self.ogrids = {} self.odom = None # Some default values self.plow = True self.plow_factor = 0 self.ogrid_min_value = -1 self.draw_bounds = False self.resolution = resolution self.enforce_bounds = False self.enu_bounds = [[0, 0, 1], [0, 0, 1], [0, 0, 1], [0, 0, 1]] # Default to centering the ogrid position = np.array( [-(map_size * resolution) / 2, -(map_size * resolution) / 2, 0]) quaternion = np.array([0, 0, 0, 1]) self.origin = navigator_tools.numpy_quat_pair_to_pose( position, quaternion) self.global_ogrid = self.create_grid((map_size, map_size)) set_odom = lambda msg: setattr( self, 'odom', navigator_tools.pose_to_numpy(msg.pose.pose)) rospy.Subscriber('/odom', Odometry, set_odom) self.publisher = rospy.Publisher('/ogrid_master', OccupancyGrid, queue_size=1) Server(OgridConfig, self.dynamic_cb) dynam_client = Client("bounds_server", config_callback=self.bounds_cb) rospy.Timer(rospy.Duration(1.0 / rate), self.publish)
def make_ogrid_transform(ogrid): """ Returns a matrix that transforms a point in ENU to this ogrid. Invert the result to get ogrid -> ENU. """ resolution = ogrid.info.resolution origin = navigator_tools.pose_to_numpy(ogrid.info.origin)[0] # Transforms points from ENU to ogrid frame coordinates t = np.array([[1 / resolution, 0, -origin[0] / resolution], [0, 1 / resolution, -origin[1] / resolution], [0, 0, 1]]) return t
def _circle(self, pose): ''' ENU center and meter radius ''' radius = 50 # m position = pose[0] cpm = 1 / self.map_metadata.resolution # cells per meter origin = navigator_tools.pose_to_numpy(self.map_metadata.origin)[0] # Convert from meters to grid measurements center_grid = (cpm * (position - origin)).astype(np.int64) radius_grid = int(radius * cpm) cv2.circle(self.grid, tuple(center_grid[:2]), radius, 1, -1)
def set_odom(self, msg): self.odom = navigator_tools.pose_to_numpy(msg.pose.pose) self.odom_pub.publish(msg)
def __init__(self): info_topic = camera_root + "/camera_info" image_topic = camera_root + "/image_rect_color" self.tf_listener = tf.TransformListener() self.status_pub = rospy.Publisher("/database_color_status", ColoramaDebug, queue_size=1) self.odom = None set_odom = lambda msg: setattr(self, "odom", navigator_tools.pose_to_numpy(msg.pose.pose)) rospy.Subscriber("/odom", Odometry, set_odom) fprint("Waiting for odom...") while self.odom is None and not rospy.is_shutdown(): rospy.sleep(1) fprint("Odom found!", msg_color='green') db_request = rospy.ServiceProxy("/database/requests", ObjectDBQuery) self.make_request = lambda **kwargs: db_request(ObjectDBQueryRequest(**kwargs)) self.image_history = ImageHistory(image_topic) # Wait for camera info, and exit if not found fprint("Waiting for camera info on: '{}'".format(info_topic)) while not rospy.is_shutdown(): try: camera_info_msg = rospy.wait_for_message(info_topic, CameraInfo, timeout=3) except rospy.exceptions.ROSException: rospy.sleep(1) continue break fprint("Camera info found!", msg_color="green") self.camera_model = PinholeCameraModel() self.camera_model.fromCameraInfo(camera_info_msg) self.debug = DebugImage("/colorama/debug", prd=.5) # These are color mappings from Hue values [0, 360] self.color_map = {'red': np.radians(0), 'yellow': np.radians(60), 'green': np.radians(120), 'blue': np.radians(200)} # RGB map for database setting self.database_color_map = {'red': (255, 0, 0), 'yellow': (255, 255, 0), 'green': (0, 255, 0), 'blue': (0, 0, 255)} # ========= Some tunable parameters =================================== self.update_time = 1 # s # Observation parameters self.saturation_reject = 20 # Reject color obs with below this saturation self.value_reject = 50 # Reject color obs with below this value self.height_remove = 0.4 # Remove points that are this percent down on the object (%) # 1 keeps all, .4 removes the bottom 40% # Update parameters self.history_length = 100 # How many of each color to keep self.min_obs = 5 # Need atleast this many observations before making a determination self.conf_reject = .5 # When to reject an observation based on it's confidence # Confidence weights self.v_factor = 0.6 # Favor value values close to the nominal value self.v_u = 200 # Mean of norm for variance error self.v_sig = 60 # Sigma of norm for variance error self.dist_factor = 0.3 # Favor being closer to the totem self.dist_sig = 30 # Sigma of distance (m) self.q_factor = 0 # Favor not looking into the sun self.q_sig = 1.2 # Sigma of norm for quaternion error (rads) # Maps id => observations self.colored = {} rospy.Timer(ros_t(self.update_time), self.do_observe)
def __init__(self): info_topic = camera_root + "/camera_info" image_topic = camera_root + "/image_rect_color" self.tf_listener = tf.TransformListener() self.status_pub = rospy.Publisher("/database_color_status", ColoramaDebug, queue_size=1) self.odom = None set_odom = lambda msg: setattr( self, "odom", navigator_tools.pose_to_numpy(msg.pose.pose)) rospy.Subscriber("/odom", Odometry, set_odom) fprint("Waiting for odom...") while self.odom is None and not rospy.is_shutdown(): rospy.sleep(1) fprint("Odom found!", msg_color='green') db_request = rospy.ServiceProxy("/database/requests", ObjectDBQuery) self.make_request = lambda **kwargs: db_request( ObjectDBQueryRequest(**kwargs)) self.image_history = ImageHistory(image_topic) # Wait for camera info, and exit if not found fprint("Waiting for camera info on: '{}'".format(info_topic)) while not rospy.is_shutdown(): try: camera_info_msg = rospy.wait_for_message(info_topic, CameraInfo, timeout=3) except rospy.exceptions.ROSException: rospy.sleep(1) continue break fprint("Camera info found!", msg_color="green") self.camera_model = PinholeCameraModel() self.camera_model.fromCameraInfo(camera_info_msg) self.debug = DebugImage("/colorama/debug", prd=.5) # These are color mappings from Hue values [0, 360] self.color_map = { 'red': np.radians(0), 'yellow': np.radians(60), 'green': np.radians(120), 'blue': np.radians(200) } # RGB map for database setting self.database_color_map = { 'red': (255, 0, 0), 'yellow': (255, 255, 0), 'green': (0, 255, 0), 'blue': (0, 0, 255) } # ========= Some tunable parameters =================================== self.update_time = 1 # s # Observation parameters self.saturation_reject = 20 # Reject color obs with below this saturation self.value_reject = 50 # Reject color obs with below this value self.height_remove = 0.4 # Remove points that are this percent down on the object (%) # 1 keeps all, .4 removes the bottom 40% # Update parameters self.history_length = 100 # How many of each color to keep self.min_obs = 5 # Need atleast this many observations before making a determination self.conf_reject = .5 # When to reject an observation based on it's confidence # Confidence weights self.v_factor = 0.6 # Favor value values close to the nominal value self.v_u = 200 # Mean of norm for variance error self.v_sig = 60 # Sigma of norm for variance error self.dist_factor = 0.3 # Favor being closer to the totem self.dist_sig = 30 # Sigma of distance (m) self.q_factor = 0 # Favor not looking into the sun self.q_sig = 1.2 # Sigma of norm for quaternion error (rads) # Maps id => observations self.colored = {} rospy.Timer(ros_t(self.update_time), self.do_observe)