def odom_cb(self, msg): '''HACK: Intermediate hack until we have tf set up''' pose, twist, _, _ = mil_ros_tools.odometry_to_numpy(msg) position, orientation = pose self.world_to_body = self.transformer.fromTranslationRotation(position, orientation)[:3, :3] self.cur_position = position cur_orientation = tf.transformations.quaternion_matrix(orientation) self.cur_orientation = cur_orientation[:3, :3]
def odom_cb(self, msg): time_of = rospy.Time.now() if (time_of - self.last_sample) < self._sampling_period: return else: self.last_sample = time_of pose, twist, _, _ = mil_ros_tools.odometry_to_numpy(msg) position, orientation = pose linear, angular = twist self.cur_state_history.append(np.hstack((position, linear)))
def init_(self, cam): image_sub = "/stereo/right/image_rect_color" cam_info = "/stereo/right/camera_info" yield self.nh.subscribe(image_sub, Image, self._img_cb) self._database = yield self.nh.get_service_client('/database/requests', ObjectDBQuery) self._odom_sub = yield self.nh.subscribe('/odom', Odometry, lambda odom: setattr(self, 'pose', odometry_to_numpy(odom)[0])) self.cam_info_sub = yield self.nh.subscribe(cam_info, CameraInfo, self._info_cb) self.tf_listener = tf.TransformListener(self.nh) defer.returnValue(self)
def odom_cb(self, msg): ''' Message structure defined in ROS specification [1] http://docs.ros.org/jade/api/nav_msgs/html/msg/Odometry.html ''' # Using ropsy.Time.now() so we can use simulated accelerated time (not CPU time) pose, twist, pose_cov, twist_cov = mil_ros_tools.odometry_to_numpy(msg) (position, orientation_q), (linear_vel, angular_vel) = pose, twist self.current_state = { 'position': position, 'linear_vel': linear_vel, 'orientation_q': orientation_q, 'angular_vel': angular_vel, } time_of = rospy.Time.now() if (time_of - self.last_sample) < self.sampling_period: return else: self.last_sample = time_of # For now, we'll ignore pose and twist covariance (Implementing for controls final project) # Don't include states that are "too old" for state_variable in self.state_variables: if len(self.state_history[state_variable]) > self.history_length: self.state_history[state_variable].popleft() self.state_history['position'].append(position) self.state_history['linear_vel'].append(linear_vel) self.state_history['orientation_q'].append(orientation_q) self.state_history['angular_vel'].append(angular_vel) # Hold at last target state if self.target_trajectory is None: return if len(self.target_trajectory['position']) > 1: p_error = np.linalg.norm(position - self.target_trajectory['position'][0]) v_error = np.linalg.norm(linear_vel - self.target_trajectory['linear_vel'][0]) if p_error + v_error < self.waypoint_epsilon: rospy.logwarn("Waypoint achieved!") for state in self.state_variables: self.target_trajectory[state].popleft( ) # Remove a target state!
def init_(self, cam): image_sub = "/stereo/right/image_rect_color" cam_info = "/stereo/right/camera_info" yield self.nh.subscribe(image_sub, Image, self._img_cb) self._database = yield self.nh.get_service_client( '/database/requests', ObjectDBQuery) self._odom_sub = yield self.nh.subscribe( '/odom', Odometry, lambda odom: setattr(self, 'pose', odometry_to_numpy(odom)[0])) self.cam_info_sub = yield self.nh.subscribe(cam_info, CameraInfo, self._info_cb) self.tf_listener = tf.TransformListener(self.nh) defer.returnValue(self)