class AUVTransformPublisher(Node):
    def __init__(self):
        super().__init__('auv_transform_publisher')

        qos_profile = QoSProfile(depth=10)
        self.state_subscriber = self.create_subscription(
            PoseStamped, '/triton/state', self.state_callback, 10)
        self.path_publisher = self.create_publisher(Path, '/triton/path',
                                                    qos_profile)
        self.pose_array = []
        self.broadcaster = TransformBroadcaster(self, qos=qos_profile)
        self.get_logger().info("AUVTransformPublisher successfully started!")

    def state_callback(self, msg):
        self.pose_array.append(msg)
        now = self.get_clock().now()
        p = msg.pose.position
        q = msg.pose.orientation
        odom_trans = TransformStamped()
        odom_trans.header.frame_id = msg.header.frame_id
        odom_trans.child_frame_id = 'base_link'
        odom_trans.header.stamp = now.to_msg()
        odom_trans.transform.translation.x = p.x
        odom_trans.transform.translation.y = p.y
        odom_trans.transform.translation.z = p.z
        odom_trans.transform.rotation = q
        self.broadcaster.sendTransform(odom_trans)
        path_msg = Path()
        path_msg.header.frame_id = msg.header.frame_id
        path_msg.header.stamp = now.to_msg()
        path_msg.poses = self.pose_array
        self.path_publisher.publish(path_msg)
def publish_odometry():

    # AJC: make this configurable
    rate = rospy.Rate(30)
    odom_publisher = rospy.Publisher('adabot/odometry', Odometry, queue_size=10)
    odom_broadcaster = TransformBroadcaster()

    while not rospy.is_shutdown():

        current_time = rospy.get_rostime()

        # Publish the transform over tf
        odom_transform = TransformStamped()
        odom_transform.header.stamp = current_time
        odom_transform.header.frame_id = 'odom'
        odom_transform.child_frame_id = 'base_link'
        odom_transform.transform.rotation.w = 1
        odom_broadcaster.sendTransform(odom_transform)

        # Publish the odometry message over ROS
        odom = Odometry()
        odom.header.stamp = current_time
        odom.header.frame_id = 'odom'
        odom.child_frame_id = 'base_link'
        # odom.pose.pose = 0
        odom.twist.twist.linear.x = vx
        odom_publisher.publish(odom)

        rate.sleep()
Example #3
0
    def _broadcastTransform(self, Pose, parentName, childName):
        br = TransformBroadcaster()
        tr = TransformStamped()

        tr.header.stamp = Time.now()
        tr.header.frame_id = parentName
        tr.child_frame_id = childName
        tr.transform.translation = Pose.position
        tr.transform.rotation = Pose.orientation

        br.sendTransform(tr)
Example #4
0
def callback(msg):
    br = TransformBroadcaster()
    t = geometry_msgs.msg.TransformStamped()

    t.header.stamp = rospy.Time.now()
    t.header.frame_id = "odom"
    t.child_frame_id = "base_link"
    t.transform.translation.x = msg.pose.pose.position.x
    t.transform.translation.y = msg.pose.pose.position.y
    t.transform.translation.z = 0.0
    t.transform.rotation.x = msg.pose.pose.orientation.x
    t.transform.rotation.y = msg.pose.pose.orientation.y
    t.transform.rotation.z = msg.pose.pose.orientation.z
    t.transform.rotation.w = msg.pose.pose.orientation.w
    br.sendTransform(t)    
Example #5
0
class TfWrapper(object):
    def __init__(self, buffer_size=2):
        self.tfBuffer = Buffer(rospy.Duration(buffer_size))
        self.tf_listener = TransformListener(self.tfBuffer)
        # self.tf_static = StaticTransformBroadcaster()
        self.tf_frequency = rospy.Duration(.05)
        self.broadcasting_frames = {}
        self.broadcasting_frames_lock = Lock()
        rospy.sleep(0.1)

    def transform_pose(self, target_frame, pose):
        try:
            transform = self.tfBuffer.lookup_transform(
                target_frame,
                pose.header.frame_id,  # source frame
                pose.header.stamp,
                rospy.Duration(2.0))
            new_pose = do_transform_pose(pose, transform)
            return new_pose
        except ExtrapolationException as e:
            rospy.logwarn(e)

    def lookup_transform(self, target_frame, source_frame):
        p = PoseStamped()
        p.header.frame_id = source_frame
        p.pose.orientation.w = 1.0
        return self.transform_pose(target_frame, p)

    def set_frame_from_pose(self, name, pose_stamped):
        with self.broadcasting_frames_lock:
            frame = TransformStamped()
            frame.header = pose_stamped.header
            frame.child_frame_id = name
            frame.transform.translation = pose_stamped.pose.position
            frame.transform.rotation = pose_stamped.pose.orientation
            self.broadcasting_frames[name] = frame

    def start_frame_broadcasting(self):
        self.tf_broadcaster = TransformBroadcaster()
        self.tf_timer = rospy.Timer(self.tf_frequency, self.broadcasting_cb)

    def broadcasting_cb(self, _):
        with self.broadcasting_frames_lock:
            for frame in self.broadcasting_frames.values():
                frame.header.stamp = rospy.get_rostime()
                self.tf_broadcaster.sendTransform(frame)
Example #6
0
class TfBridge(object):
    """ Utility class to interface with /tf2 """
    def __init__(self):
        """ """
        self.tf_buffer = Buffer()
        self.tf_listener = TransformListener(self.tf_buffer)
        self.tf_broadcaster = TransformBroadcaster()

    def publish_pose_to_tf(self, pose, source_frame, target_frame, time=None):
        """ Publish the given pose to /tf2 """
        msg = pose.to_msg()
        transform = TransformStamped()
        transform.child_frame_id = target_frame
        transform.header.frame_id = source_frame
        if time is not None:
            transform.header.stamp = time
        else:
            transform.header.stamp = rospy.Time().now()
        transform.transform.translation = msg.position
        transform.transform.rotation = msg.orientation
        self.tf_broadcaster.sendTransform(transform)

    def get_pose_from_tf(self, source_frame, target_frame, time=None):
        """ Get the pose from /tf2 """
        try:
            if time is not None:
                trans = self.tf_buffer.lookup_transform(
                    source_frame, target_frame, time)
            else:
                trans = self.tf_buffer.lookup_transform(
                    source_frame, target_frame, rospy.Time(0))
            x = trans.transform.translation.x
            y = trans.transform.translation.y
            z = trans.transform.translation.z

            rx = trans.transform.rotation.x
            ry = trans.transform.rotation.y
            rz = trans.transform.rotation.z
            rw = trans.transform.rotation.w
            pose = Vector6D(x=x, y=y, z=z).from_quaternion(rx, ry, rz, rw)
            return True, pose
        except (tf2_ros.LookupException, tf2_ros.ConnectivityException,
                tf2_ros.ExtrapolationException) as e:
            rospy.logwarn("[perception] Exception occured: {}".format(e))
            return False, None
Example #7
0
def handle_turtle_pose(msg, turtlename):

    transform_broadcaster = TransformBroadcaster()
    transformed_msg = TransformStamped()

    transformed_msg.header.stamp = rospy.Time.now()
    transformed_msg.header.frame_id = 'world'
    transformed_msg.child_frame_id = turtlename
    transformed_msg.transform.translation.x = msg.x
    transformed_msg.transform.translation.y = msg.y
    transformed_msg.transform.translation.z = 0.0
    q = tf_conversions.transformations.quaternion_from_euler(0, 0, msg.theta)
    transformed_msg.transform.rotation.x = q[0]
    transformed_msg.transform.rotation.y = q[1]
    transformed_msg.transform.rotation.z = q[2]
    transformed_msg.transform.rotation.w = q[3]

    transform_broadcaster.sendTransform(transformed_msg)
Example #8
0
class Pose2Tf():
    def __init__(self):
        self.tf_broadcaster = TransformBroadcaster()

        self.ts = TransformStamped()
        self.ts.header.frame_id = 'world'
        self.ts.child_frame_id = 'drone'

        pose_sub = Subscriber('/dji_sdk/pose', PoseStamped, self.pose_callback)

    def pose_callback(self, msg):
        self.ts.header.stamp = Time.now()
        self.ts.transform.translation.x = msg.pose.position.x
        self.ts.transform.translation.y = msg.pose.position.y
        self.ts.transform.translation.z = msg.pose.position.z
        self.ts.transform.rotation.x = msg.pose.orientation.x
        self.ts.transform.rotation.y = msg.pose.orientation.y
        self.ts.transform.rotation.z = msg.pose.orientation.z
        self.ts.transform.rotation.w = msg.pose.orientation.w
        self.tf_broadcaster.sendTransform(self.ts)
Example #9
0
class WheelOdom(Node):
    def __init__(self):
        super().__init__('wheel_odom')
        self.create_timer(0.1, self.cb)
        self.odom_broadcaster = TransformBroadcaster(self)
        print('fake odom')

    def cb(self):
        now = self.get_clock().now()
        quaternion = Quaternion()
        quaternion.x = 0.0
        quaternion.y = 0.0
        quaternion.z = 0.0
        quaternion.w = 1.0
        transform_stamped_msg = TransformStamped()
        transform_stamped_msg.header.stamp = now.to_msg()
        transform_stamped_msg.header.frame_id = 'odom'
        transform_stamped_msg.child_frame_id = 'base_link'
        transform_stamped_msg.transform.translation.x = 0.0
        transform_stamped_msg.transform.translation.y = 0.0
        transform_stamped_msg.transform.translation.z = 0.0
        transform_stamped_msg.transform.rotation.x = quaternion.x
        transform_stamped_msg.transform.rotation.y = quaternion.y
        transform_stamped_msg.transform.rotation.z = quaternion.z
        transform_stamped_msg.transform.rotation.w = quaternion.w
        self.odom_broadcaster.sendTransform(transform_stamped_msg)
        quaternion = Quaternion()
        quaternion.x = 0.0
        quaternion.y = 0.0
        quaternion.z = 0.0
        quaternion.w = 1.0
        transform_stamped_msg = TransformStamped()
        transform_stamped_msg.header.stamp = now.to_msg()
        transform_stamped_msg.header.frame_id = 'map'
        transform_stamped_msg.child_frame_id = 'odom'
        transform_stamped_msg.transform.translation.x = 0.0
        transform_stamped_msg.transform.translation.y = 0.0
        transform_stamped_msg.transform.translation.z = 0.0
        transform_stamped_msg.transform.rotation.x = quaternion.x
        transform_stamped_msg.transform.rotation.y = quaternion.y
        transform_stamped_msg.transform.rotation.z = quaternion.z
        transform_stamped_msg.transform.rotation.w = quaternion.w
        self.odom_broadcaster.sendTransform(transform_stamped_msg)
        quaternion.x = 0.0
        quaternion.y = 0.0
        quaternion.z = 0.0
        quaternion.w = 1.0
        transform_stamped_msg = TransformStamped()
        transform_stamped_msg.header.stamp = now.to_msg()
        transform_stamped_msg.header.frame_id = 'base_link'
        transform_stamped_msg.child_frame_id = 'camera_link'
        transform_stamped_msg.transform.translation.x = 0.0
        transform_stamped_msg.transform.translation.y = 0.0
        transform_stamped_msg.transform.translation.z = 0.4
        transform_stamped_msg.transform.rotation.x = quaternion.x
        transform_stamped_msg.transform.rotation.y = quaternion.y
        transform_stamped_msg.transform.rotation.z = quaternion.z
        transform_stamped_msg.transform.rotation.w = quaternion.w
        self.odom_broadcaster.sendTransform(transform_stamped_msg)
Example #10
0
class TabletopPerception(object):
    def __init__(self):
        self.tf_buffer = Buffer()
        self.tf_listener = TransformListener(self.tf_buffer)
        self.tf_broadcaster = TransformBroadcaster()

        self.rgb_image_topic = rospy.get_param("~rgb_image_topic",
                                               "/camera/rgb/image_raw")
        self.rgb_camera_info_topic = rospy.get_param(
            "~rgb_camera_info_topic", "/camera/rgb/camera_info")

        self.depth_image_topic = rospy.get_param("~depth_image_topic",
                                                 "/camera/depth/image_raw")
        self.depth_camera_info_topic = rospy.get_param(
            "~depth_camera_info_topic", "/camera/depth/camera_info")

        self.base_frame_id = rospy.get_param("~base_frame_id", "base_link")
        self.global_frame_id = rospy.get_param("~global_frame_id", "odom")

        self.bridge = CvBridge()

        rospy.loginfo(
            "[tabletop_perception] Subscribing to '/{}' topic...".format(
                self.depth_camera_info_topic))
        self.camera_info = None
        self.camera_frame_id = None
        self.camera_info_subscriber = rospy.Subscriber(
            self.rgb_camera_info_topic, CameraInfo, self.camera_info_callback)

        detector_model_filename = rospy.get_param("~detector_model_filename",
                                                  "")
        detector_weights_filename = rospy.get_param(
            "~detector_weights_filename", "")
        detector_config_filename = rospy.get_param("~detector_config_filename",
                                                   "")

        face_detector_model_filename = rospy.get_param(
            "~face_detector_model_filename", "")
        face_detector_weights_filename = rospy.get_param(
            "~face_detector_weights_filename", "")
        face_detector_config_filename = rospy.get_param(
            "~face_detector_config_filename", "")

        self.detector = SSDDetector(detector_model_filename,
                                    detector_weights_filename,
                                    detector_config_filename, 300)

        self.face_detector = SSDDetector(face_detector_model_filename,
                                         face_detector_weights_filename,
                                         face_detector_config_filename, 300)

        self.foreground_detector = ForegroundDetector(interactive_mode=False)

        shape_predictor_config_filename = rospy.get_param(
            "~shape_predictor_config_filename", "")
        self.facial_landmarks_estimator = FacialLandmarksEstimator(
            shape_predictor_config_filename)
        face_3d_model_filename = rospy.get_param("~face_3d_model_filename", "")
        self.head_pose_estimator = HeadPoseEstimator(face_3d_model_filename)

        facial_features_model_filename = rospy.get_param(
            "~facial_features_model_filename", "")
        self.facial_features_estimator = FacialFeaturesEstimator(
            face_3d_model_filename, facial_features_model_filename)

        self.color_features_estimator = ColorFeaturesEstimator()

        self.n_frame = rospy.get_param("~n_frame", 4)
        self.frame_count = 0

        self.publish_tf = rospy.get_param("~publish_tf", True)
        self.publish_visualization_image = rospy.get_param(
            "~publish_visualization_image", True)

        self.n_init = rospy.get_param("~n_init", 1)
        self.max_iou_distance = rospy.get_param("~max_iou_distance", 0.8)
        self.max_color_distance = rospy.get_param("~max_color_distance", 0.8)
        self.max_face_distance = rospy.get_param("~max_face_distance", 0.8)
        self.max_centroid_distance = rospy.get_param("~max_centroid_distance",
                                                     0.8)
        self.max_disappeared = rospy.get_param("~max_disappeared", 7)
        self.max_age = rospy.get_param("~max_age", 10)

        self.object_tracker = MultiObjectTracker(iou_cost,
                                                 color_cost,
                                                 0.7,
                                                 self.max_color_distance,
                                                 10,
                                                 5,
                                                 self.max_age,
                                                 use_appearance_tracker=True)

        self.face_tracker = MultiObjectTracker(iou_cost,
                                               centroid_cost,
                                               self.max_iou_distance,
                                               None,
                                               self.n_init,
                                               self.max_disappeared,
                                               self.max_age,
                                               use_appearance_tracker=False)

        self.person_tracker = MultiObjectTracker(iou_cost,
                                                 color_cost,
                                                 self.max_iou_distance,
                                                 self.max_color_distance,
                                                 self.n_init,
                                                 self.max_disappeared,
                                                 self.max_age,
                                                 use_appearance_tracker=False)

        self.shape_estimator = ShapeEstimator()

        self.object_pose_estimator = ObjectPoseEstimator()

        self.tracks_publisher = rospy.Publisher("tracks",
                                                SceneChangesStamped,
                                                queue_size=1)

        self.visualization_publisher = rospy.Publisher("tracks_image",
                                                       Image,
                                                       queue_size=1)

        self.use_depth = rospy.get_param("~use_depth", False)
        if self.use_depth is True:
            rospy.loginfo(
                "[tabletop_perception] Subscribing to '/{}' topic...".format(
                    self.rgb_image_topic))
            self.rgb_image_sub = message_filters.Subscriber(
                self.rgb_image_topic, Image)
            rospy.loginfo(
                "[tabletop_perception] Subscribing to '/{}' topic...".format(
                    self.depth_image_topic))
            self.depth_image_sub = message_filters.Subscriber(
                self.depth_image_topic, Image)

            self.sync = message_filters.ApproximateTimeSynchronizer(
                [self.rgb_image_sub, self.depth_image_sub],
                10,
                0.2,
                allow_headerless=True)
            self.sync.registerCallback(self.observation_callback)
        else:
            rospy.loginfo(
                "[tabletop_perception] Subscribing to '/{}' topic...".format(
                    self.rgb_image_topic))
            self.rgb_image_sub = rospy.Subscriber(self.rgb_image_topic,
                                                  Image,
                                                  self.observation_callback,
                                                  queue_size=1)

    def camera_info_callback(self, msg):
        if self.camera_info is None:
            rospy.loginfo("[tabletop_perception] Camera info received !")
        self.camera_info = msg
        self.camera_frame_id = msg.header.frame_id
        self.camera_matrix = np.array(msg.K).reshape((3, 3))
        self.dist_coeffs = np.array(msg.D)

    def observation_callback(self, bgr_image_msg, depth_image_msg=None):
        if self.camera_info is not None:
            perception_timer = cv2.getTickCount()
            bgr_image = self.bridge.imgmsg_to_cv2(bgr_image_msg, "bgr8")
            rgb_image = cv2.cvtColor(bgr_image, cv2.COLOR_BGR2RGB)
            if depth_image_msg is not None:
                depth_image = self.bridge.imgmsg_to_cv2(depth_image_msg)
            else:
                depth_image = None
            if self.publish_visualization_image is True:
                viz_frame = bgr_image

            _, image_height, image_width = bgr_image.shape

            success, view_pose = self.get_pose_from_tf2(
                self.global_frame_id, self.camera_frame_id)

            if success is not True:
                rospy.logwarn(
                    "[tabletop_perception] The camera sensor is not localized in world space (frame '{}'), please check if the sensor frame is published in /tf"
                    .format(self.global_frame_id))
            else:
                view_matrix = view_pose.transform()

                self.frame_count %= self.n_frame
                ######################################################
                # Detection
                ######################################################

                detections = []
                if self.frame_count == 0:
                    detections = self.detector.detect(rgb_image,
                                                      depth_image=depth_image)
                    object_detections = self.foreground_detector.detect(
                        rgb_image,
                        depth_image=depth_image,
                        prior_detections=detections)
                    detections += object_detections
                elif self.frame_count == 1:
                    detections = self.face_detector.detect(
                        rgb_image, depth_image=depth_image)
                else:
                    detections = []

                ####################################################################
                # Features estimation
                ####################################################################

                self.color_features_estimator.estimate(rgb_image, detections)

                ######################################################
                # Tracking
                ######################################################

                if self.frame_count == 0:
                    object_detections = [
                        d for d in detections if d.label != "person"
                    ]
                    person_detections = [
                        d for d in detections if d.label == "person"
                    ]
                    face_tracks = self.face_tracker.update(rgb_image, [])
                    object_tracks = self.object_tracker.update(
                        rgb_image, object_detections)
                    person_tracks = self.person_tracker.update(
                        rgb_image, person_detections)
                elif self.frame_count == 1:
                    face_tracks = self.face_tracker.update(
                        rgb_image, detections)
                    object_tracks = self.object_tracker.update(rgb_image, [])
                    person_tracks = self.person_tracker.update(rgb_image, [])
                else:
                    face_tracks = self.face_tracker.update(rgb_image, [])
                    object_tracks = self.object_tracker.update(rgb_image, [])
                    person_tracks = self.person_tracker.update(rgb_image, [])

                tracks = face_tracks + object_tracks + person_tracks

                ########################################################
                # Pose & Shape estimation
                ########################################################

                self.facial_landmarks_estimator.estimate(
                    rgb_image, face_tracks)

                self.head_pose_estimator.estimate(face_tracks, view_matrix,
                                                  self.camera_matrix,
                                                  self.dist_coeffs)
                self.object_pose_estimator.estimate(
                    object_tracks + person_tracks, view_matrix,
                    self.camera_matrix, self.dist_coeffs)

                self.shape_estimator.estimate(rgb_image, tracks,
                                              self.camera_matrix,
                                              self.dist_coeffs)

                ########################################################
                # Recognition
                ########################################################

                self.facial_features_estimator.estimate(rgb_image, face_tracks)

                self.frame_count += 1
                ######################################################
                # Visualization of debug image
                ######################################################

                perception_fps = cv2.getTickFrequency() / (cv2.getTickCount() -
                                                           perception_timer)

                cv2.rectangle(viz_frame, (0, 0), (250, 40), (200, 200, 200),
                              -1)
                perception_fps_str = "Perception fps : {:0.1f}hz".format(
                    perception_fps)
                cv2.putText(
                    viz_frame, "Nb detections/tracks : {}/{}".format(
                        len(detections), len(tracks)), (5, 15),
                    cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 0), 1)
                cv2.putText(viz_frame, perception_fps_str, (5, 30),
                            cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 0), 1)

                scene_changes = SceneChangesStamped()
                scene_changes.world = "tracks"

                header = bgr_image_msg.header
                header.frame_id = self.global_frame_id
                scene_changes.header = header

                for track in tracks:
                    if self.publish_visualization_image is True:
                        track.draw(viz_frame, (230, 0, 120, 125), 1,
                                   view_matrix, self.camera_matrix,
                                   self.dist_coeffs)
                    if track.is_confirmed():
                        scene_node = track.to_msg(header)
                        if self.publish_tf is True:
                            if track.is_located() is True:
                                self.publish_tf_pose(scene_node.pose_stamped,
                                                     self.global_frame_id,
                                                     scene_node.id)
                        scene_changes.changes.nodes.append(scene_node)
                if self.publish_visualization_image is True:
                    viz_img_msg = self.bridge.cv2_to_imgmsg(viz_frame)
                    self.visualization_publisher.publish(viz_img_msg)

                self.tracks_publisher.publish(scene_changes)

    def get_pose_from_tf2(self, source_frame, target_frame, time=None):
        try:
            if time is not None:
                trans = self.tf_buffer.lookup_transform(
                    source_frame, target_frame, time)
            else:
                trans = self.tf_buffer.lookup_transform(
                    source_frame, target_frame, rospy.Time(0))
            x = trans.transform.translation.x
            y = trans.transform.translation.y
            z = trans.transform.translation.z

            rx = trans.transform.rotation.x
            ry = trans.transform.rotation.y
            rz = trans.transform.rotation.z
            rw = trans.transform.rotation.w
            pose = Vector6D(x=x, y=y, z=z).from_quaternion(rx, ry, rz, rw)
            return True, pose
        except (tf2_ros.LookupException, tf2_ros.ConnectivityException,
                tf2_ros.ExtrapolationException) as e:
            rospy.logwarn("[perception] Exception occured: {}".format(e))
            return False, None

    def publish_tf_pose(self, pose_stamped, source_frame, target_frame):
        transform = TransformStamped()
        transform.child_frame_id = target_frame
        transform.header.frame_id = source_frame
        transform.header.stamp = pose_stamped.header.stamp
        transform.transform.translation = pose_stamped.pose.pose.position
        transform.transform.rotation = pose_stamped.pose.pose.orientation
        self.tf_broadcaster.sendTransform(transform)
Example #11
0
    rospy.loginfo('Software version:   {0}'.format(sw))
    rospy.loginfo('Bootloader version: {0}'.format(bl))
    rospy.loginfo('Accelerometer ID:   0x{0:02X}'.format(accel))
    rospy.loginfo('Magnetometer ID:    0x{0:02X}'.format(mag))
    rospy.loginfo('Gyroscope ID:       0x{0:02X}\n'.format(gyro))

    br = TransformBroadcaster()
    quat_pub = rospy.Publisher('/imu_quat', Quaternion, queue_size=1)

    rate = rospy.Rate(50.0)
    while not rospy.is_shutdown():
        sys, gyro, accel, mag = bno.get_calibration_status()
        x, y, z, w = bno.read_quaternion()
        norm = sqrt(x**2 + y**2 + z**2 + w**2)

        q = Quaternion(x=x / norm, y=y / norm, z=z / norm, w=w / norm)
        quat_pub.publish(q)

        tf = TransformStamped()
        tf.header.stamp = rospy.get_rostime()
        tf.header.frame_id = "imu_footprint"
        tf.child_frame_id = "imu"
        tf.transform.rotation = q
        br.sendTransform(tf)

        rospy.logdebug_throttle(
            1,
            'Q(xyzw)=({0:0.3F}, {1:0.3F}, {2:0.3F}, {3:0.3F}\tSys_cal={4} Gyro_cal={5} Accel_cal={6} Mag_cal={7}'
            .format(x, y, z, w, sys, gyro, accel, mag))
        rate.sleep()
Example #12
0
        marker.scale = Vector3(z=0.5)
        marker.color = ColorRGBA(r=1.0, g=1.0, b=1.0, a=1.0)
        marker.ns = k + "_text"
        marker.id = id
        marker.pose.position.x = x
        marker.pose.position.y = y
        marker.pose.position.z = marker.scale.z * 1.4
        marker.pose.orientation.w = 1.0
        marker.text = k
        markers.markers.append(marker)

        id += 1

        if k == "WP3":
            map_tf.header.frame_id = "world"
            map_tf.child_frame_id = "map"
            map_tf.transform.translation.x = x
            map_tf.transform.translation.y = y
            map_tf.transform.rotation.w = 1.0

    rate = rospy.Rate(1.0)
    while not rospy.is_shutdown():
        t = rospy.Time.now()
        for m in markers.markers:
            m.header.stamp = t
        map_tf.header.stamp = t

        pub_markers.publish(markers)
        br.sendTransform(map_tf)
        rate.sleep()
class RunSimulationViz:
    def __init__(self, visualize=True, verbose=True):

        self.visualize = visualize
        self.verbose = verbose

        self.update_pose_rate = rospy.get_param("~update_pose_rate")

        # read parameters
        # parameters for car/s
        self.car_config = {
            "scan_beams": rospy.get_param("~scan_beams"),
            "scan_fov": rospy.get_param("~scan_fov"),
            "scan_std": rospy.get_param("~scan_std"),
            "free_thresh": rospy.get_param("~free_thresh"),
            "scan_dist_to_base": rospy.get_param("~scan_dist_to_base"),
            "max_speed": rospy.get_param("~max_speed"),
            "max_accel": rospy.get_param("~max_accel"),
            "max_decel": rospy.get_param("~max_decel"),
            "max_steer_ang": rospy.get_param("~max_steer_ang"),
            "max_steer_vel": rospy.get_param("~max_steer_vel"),
            #"col_thresh": rospy.get_param("~coll_threshold"),
            "ttc_thresh": rospy.get_param("~ttc_thresh"),
            "width": rospy.get_param("~width"),
            "length": rospy.get_param("~length"),
            "scan_max_range": rospy.get_param("~scan_max_range"),
            "update_pose_rate": rospy.get_param("~update_pose_rate"),
            "wb": rospy.get_param("~wheelbase"),
            "fc": rospy.get_param("~friction_coeff"),
            "h_cg": rospy.get_param("~height_cg"),
            "l_r": rospy.get_param("~l_cg2rear"),
            "l_f": rospy.get_param("~l_cg2front"),
            "cs_f": rospy.get_param("~C_S_front"),
            "cs_r": rospy.get_param("~C_S_rear"),
            "I_z": rospy.get_param("~moment_inertia"),
            "mass": rospy.get_param("~mass"),
            "batch_size": rospy.get_param("~batch_size")
        }

        self.cars = ["one", "two"]
        # create simulation with two players
        self.rcs = RacecarSimulatorTwoPlayer(self.car_config,
                                             [('one', (0, 0.5)),
                                              ('two', (0, -0.5))])
        # set Maps
        map_service_name = rospy.get_param("~static_map", "static_map")
        rospy.wait_for_service(map_service_name)
        self.map_msg = rospy.ServiceProxy(map_service_name, GetMap)().map
        new_map = []
        for i in xrange(len(self.map_msg.data)):
            if self.map_msg.data[i] > 0:
                new_map.append(255)
            else:
                new_map.append(0)
        self.map_msg.data = tuple(new_map)
        self.rcs.setMap(self.map_msg)

        # driving actios for player one and two
        self.drive_topic_one = rospy.get_param("~drive_topic") + "one"
        self.drive_topic_two = rospy.get_param("~drive_topic") + "two"

        # simulation doesnt need this?
        self.map_topic = rospy.get_param("~map_topic")

        # we dont need to display scan in rviz, but agents need it
        # to make actions
        self.scan_topic_one = rospy.get_param("~scan_topic") + "one"
        self.scan_topic_two = rospy.get_param("~scan_topic") + "two"

        # we dont need pose updates
        self.pose_topic_one = rospy.get_param("~pose_topic") + "one"
        self.pose_topic_two = rospy.get_param("~pose_topic") + "two"

        # this should publish "gt" for both p1 and p2
        self.odom_topic_one = rospy.get_param("~odom_topic") + "one"
        self.odom_topic_two = rospy.get_param("~odom_topic") + "two"

        # since we are not displaying any lidar, do we need to
        # do tfs?
        self.map_frame = rospy.get_param("~map_frame")
        self.base_frame = rospy.get_param("~base_frame")
        self.scan_frame = rospy.get_param("~scan_frame")

        # transform broadcaster, for gt_poses only
        self.br = TransformBroadcaster()

        ## publishers
        # publishers for scans, no tf used
        self.scan_pub_one = rospy.Publisher(self.scan_topic_one,
                                            LaserScan,
                                            queue_size=1)
        self.scan_pub_two = rospy.Publisher(self.scan_topic_two,
                                            LaserScan,
                                            queue_size=1)

        # odom for rviz, rviz takes poses
        self.pose_pub_one = rospy.Publisher(self.pose_topic_one,
                                            PoseStamped,
                                            queue_size=1)
        self.pose_pub_two = rospy.Publisher(self.pose_topic_two,
                                            PoseStamped,
                                            queue_size=1)

        self.odom_pub_one = rospy.Publisher(self.odom_topic_one,
                                            Odometry,
                                            queue_size=1)
        self.odom_pub_two = rospy.Publisher(self.odom_topic_two,
                                            Odometry,
                                            queue_size=1)
        """
        # dont need this prob
        self.map_pub = rospy.Publisher(self.map_topic, OccupancyGrid, queue_size=1)
        self.pose_pub = rospy.Publisher(self.gt_pose_topic, PoseStamped, queue_size=1)
        """

        # subscribers
        self.timer = rospy.Timer(rospy.Duration(self.update_pose_rate),
                                 self.updateSimulationCallback)

        self.drive_sub_one = rospy.Subscriber(self.drive_topic_one,
                                              AckermannDriveStamped,
                                              self.driveCallbackOne,
                                              queue_size=1)
        self.drive_sub_two = rospy.Subscriber(self.drive_topic_two,
                                              AckermannDriveStamped,
                                              self.driveCallbackTwo,
                                              queue_size=1)

        # dont need this prob
        """
        self.map_sub = rospy.Subscriber(self.map_topic, OccupancyGrid, self.mapCallback)
        self.pose_sub = rospy.Subscriber(self.pose_topic, PoseStamped, self.poseCallback)
        self.pose_rviz_sub = rospy.Subscriber(self.pose_rviz_topic,
                PoseWithCovarianceStamped,  self.poseRvizCallback)
        """

        if self.verbose:
            print "Simulation constructed"

    def updateSimulationCallback(self, event):
        """
            Updates simulatio one step
        """
        # create timestamp
        timestamp = rospy.get_rostime()
        """ update player one """
        # update simulation
        for car in self.cars:
            self.rcs.updatePose(car)
        # write to rviz
        for car in self.cars:
            # pub pose as transform
            self.poseTransformPub(timestamp, car)

            # publish steering ang
            self.steerAngTransformPub(timestamp, car)

            # publish odom
            self.odomPub(timestamp, car)

        # write to agents
        # sim lidar
        t1 = time.time()
        self.rcs.runScan(self.cars[1])
        self.lidarPub(timestamp, self.cars[0])

        self.rcs.runScan(self.cars[0])
        self.lidarPub(timestamp, self.cars[1])

        print "wtf is this time, %f" % (time.time() - t1)

        #if self.rcs.checkCollision() > 0:
        # do other things here too
        #self.rcs.stop()

    def steerAngTransformPub(self, timestamp, player):
        """
            Publish steering transforms, left and right steer the same
        """

        if player == "one":
            prefix = "r1/"
        else:
            prefix = "r2/"

        state = self.rcs.getState(player)

        ts_msg = TransformStamped()
        ts_msg.header.stamp = timestamp

        quat = quaternion_from_euler(0.0, 0.0, state[2])
        ts_msg.transform.rotation.x = quat[0]
        ts_msg.transform.rotation.y = quat[1]
        ts_msg.transform.rotation.z = quat[2]
        ts_msg.transform.rotation.w = quat[3]

        # publish for right and left steering
        ts_msg.header.frame_id = prefix + "front_left_hinge"
        ts_msg.child_frame_id = prefix + "front_left_wheel"

        self.br.sendTransform(ts_msg)

        ts_msg.header.frame_id = prefix + "front_right_hinge"
        ts_msg.child_frame_id = prefix + "front_right_wheel"
        self.br.sendTransform(ts_msg)

    def odomPub(self, timestamp, player):
        """
            Publish simulation odometry
        """

        state = self.rcs.getState(player)

        od_msg = Odometry()
        od_msg.header.stamp = timestamp
        od_msg.header.frame_id = self.map_frame
        od_msg.child_frame_id = self.base_frame
        quat = quaternion_from_euler(0.0, 0.0, state[2])
        od_msg.pose.pose.orientation.x = quat[0]
        od_msg.pose.pose.orientation.y = quat[1]
        od_msg.pose.pose.orientation.z = quat[2]
        od_msg.pose.pose.orientation.w = quat[3]

        od_msg.pose.pose.position.x = state[0]
        od_msg.pose.pose.position.y = state[1]
        od_msg.twist.twist.linear.x = state[3]
        od_msg.twist.twist.linear.z = state[4]

        if player == "one":
            self.odom_pub_one.publish(od_msg)
        else:
            self.odom_pub_two.publish(od_msg)

    def lidarPub(self, timestamp, player):
        """
            Publish lidar
        """

        scan = self.rcs.runScan(player)

        scan_msg = LaserScan()
        scan_msg.header.stamp = timestamp
        scan_msg.header.frame_id = self.scan_frame
        scan_msg.angle_min = -self.car_config["scan_fov"] / 2.0
        scan_msg.angle_max = self.car_config["scan_fov"] / 2.0
        scan_msg.angle_increment = self.car_config[
            "scan_fov"] / self.car_config["scan_beams"]
        scan_msg.range_max = self.car_config["scan_max_range"]
        scan_msg.ranges = scan
        scan_msg.intensities = scan

        if player == "one":
            self.scan_pub_one.publish(scan_msg)

        else:
            self.scan_pub_two.publish(scan_msg)

    def driveCallbackOne(self, msg):
        """
            Pass actions for driving
        """

        self.rcs.drive("one", msg.drive.speed, msg.drive.steering_angle)

    def driveCallbackTwo(self, msg):
        """
            Pass actions for driving
        """

        self.rcs.drive("two", msg.drive.speed, msg.drive.steering_angle)

    def poseTransformPub(self, timestamp, player):
        """
            Publish the transform for pose
        """

        if player == "one":
            prefix = "r1/"
        else:
            prefix = "r2/"

        # get state information
        state = self.rcs.getState(player)

        # create the message
        pt_msg = Transform()
        pt_msg.translation.x = state[0]
        pt_msg.translation.y = state[1]
        quat = quaternion_from_euler(0.0, 0.0, state[2])
        pt_msg.rotation.x = quat[0]
        pt_msg.rotation.y = quat[1]
        pt_msg.rotation.z = quat[2]
        pt_msg.rotation.w = quat[3]

        # ground truth
        ps = PoseStamped()
        ps.header.frame_id = self.map_topic
        ps.pose.position.x = state[0]
        ps.pose.position.y = state[1]
        ps.pose.orientation.x = quat[0]
        ps.pose.orientation.y = quat[1]
        ps.pose.orientation.z = quat[2]
        ps.pose.orientation.w = quat[3]

        # add a header
        ts = TransformStamped()
        ts.header.stamp = timestamp
        ts.header.frame_id = self.map_frame
        ts.child_frame_id = prefix + self.base_frame
        ts.transform = pt_msg

        self.br.sendTransform(ts)

        if player == "one":
            self.pose_pub_one.publish(ps)
        else:
            self.pose_pub_two.publish(ps)
Example #14
0
            elif key in key_map:
                cmd[key_map[key][0]] += key_map[key][1]

                printCmd(cmd)

                ct = CompactTransform.fromXYZRPY(cmd['X'], cmd['Y'], cmd['Z'],
                                                 cmd['C'], cmd['B'], cmd['A'])

                redundancy_pub.publish(Float64(cmd['R']))

                pose_pub.publish(
                    PoseStamped(header=Header(
                        frame_id='{}_link_0_horizontal'.format(robot_name)),
                                pose=ct.toPose()))

                tf_broadcaster.sendTransform(
                    TransformStamped(
                        child_frame_id='{}/teleop'.format(robot_name),
                        header=Header(
                            frame_id='{}_link_0_horizontal'.format(robot_name),
                            stamp=Time.now()),
                        transform=ct.toTransform()))
            else:
                printCmd(cmd)

    except Exception as e:
        print(e)

    finally:
        termios.tcsetattr(sys.stdin, termios.TCSADRAIN, settings)
Example #15
0
class LandmarkMethodROS(LandmarkMethodBase):
    def __init__(self, img_proc=None):
        super(LandmarkMethodROS,
              self).__init__(device_id_facedetection=rospy.get_param(
                  "~device_id_facedetection", default="cuda:0"))
        self.subject_tracker = FaceEncodingTracker() if rospy.get_param(
            "~use_face_encoding_tracker",
            default=True) else SequentialTracker()
        self.bridge = CvBridge()
        self.__subject_bridge = SubjectListBridge()

        self.camera_frame = rospy.get_param("~camera_frame", "kinect2_link")
        self.ros_tf_frame = rospy.get_param("~ros_tf_frame",
                                            "kinect2_ros_frame")

        self.tf2_broadcaster = TransformBroadcaster()
        self.tf2_buffer = Buffer()
        self.tf2_listener = TransformListener(self.tf2_buffer)
        self.tf_prefix = rospy.get_param("~tf_prefix", default="gaze")
        self.visualise_headpose = rospy.get_param("~visualise_headpose",
                                                  default=True)

        self.pose_stabilizers = {}  # Introduce scalar stabilizers for pose.

        try:
            if img_proc is None:
                tqdm.write("Wait for camera message")
                cam_info = rospy.wait_for_message("/camera_info",
                                                  CameraInfo,
                                                  timeout=None)
                self.img_proc = PinholeCameraModel()
                # noinspection PyTypeChecker
                self.img_proc.fromCameraInfo(cam_info)
            else:
                self.img_proc = img_proc

            if np.array_equal(
                    self.img_proc.intrinsicMatrix().A,
                    np.array([[0., 0., 0.], [0., 0., 0.], [0., 0., 0.]])):
                raise Exception(
                    'Camera matrix is zero-matrix. Did you calibrate'
                    'the camera and linked to the yaml file in the launch file?'
                )
            tqdm.write("Camera message received")
        except rospy.ROSException:
            raise Exception("Could not get camera info")

        # multiple person images publication
        self.subject_pub = rospy.Publisher("/subjects/images",
                                           MSG_SubjectImagesList,
                                           queue_size=3)
        # multiple person faces publication for visualisation
        self.subject_faces_pub = rospy.Publisher("/subjects/faces",
                                                 Image,
                                                 queue_size=3)

        Server(ModelSizeConfig, self._dyn_reconfig_callback)

        # have the subscriber the last thing that's run to avoid conflicts
        self.color_sub = rospy.Subscriber("/image",
                                          Image,
                                          self.process_image,
                                          buff_size=2**24,
                                          queue_size=3)

    def _dyn_reconfig_callback(self, config, _):
        self.model_points /= (self.model_size_rescale *
                              self.interpupillary_distance)
        self.model_size_rescale = config["model_size"]
        self.interpupillary_distance = config["interpupillary_distance"]
        self.model_points *= (self.model_size_rescale *
                              self.interpupillary_distance)
        self.head_pitch = config["head_pitch"]
        return config

    def process_image(self, color_msg):
        color_img = gaze_tools.convert_image(color_msg, "bgr8")
        timestamp = color_msg.header.stamp

        self.update_subject_tracker(color_img)

        if not self.subject_tracker.get_tracked_elements():
            tqdm.write("\033[2K\033[1;31mNo face found\033[0m", end="\r")
            return

        self.subject_tracker.update_eye_images(self.eye_image_size)

        final_head_pose_images = []
        for subject_id, subject in self.subject_tracker.get_tracked_elements(
        ).items():
            if subject.left_eye_color is None or subject.right_eye_color is None:
                continue
            if subject_id not in self.pose_stabilizers:
                self.pose_stabilizers[subject_id] = [
                    Stabilizer(state_num=2,
                               measure_num=1,
                               cov_process=0.1,
                               cov_measure=0.1) for _ in range(6)
                ]

            success, head_rpy, translation_vector = self.get_head_pose(
                subject.marks, subject_id)

            if success:
                # Publish all the data
                self.publish_pose(timestamp, translation_vector, head_rpy,
                                  subject_id)

                if self.visualise_headpose:
                    # pitch roll yaw
                    trans_msg = self.tf2_buffer.lookup_transform(
                        self.ros_tf_frame, self.tf_prefix +
                        "/head_pose_estimated" + str(subject_id), timestamp)
                    rotation = trans_msg.transform.rotation
                    euler_angles_head = list(
                        transformations.euler_from_quaternion(
                            [rotation.x, rotation.y, rotation.z, rotation.w]))
                    euler_angles_head = gaze_tools.limit_yaw(euler_angles_head)

                    face_image_resized = cv2.resize(
                        subject.face_color,
                        dsize=(224, 224),
                        interpolation=cv2.INTER_CUBIC)

                    final_head_pose_images.append(
                        LandmarkMethodROS.visualize_headpose_result(
                            face_image_resized,
                            gaze_tools.get_phi_theta_from_euler(
                                euler_angles_head)))

        if len(self.subject_tracker.get_tracked_elements().items()) > 0:
            self.publish_subject_list(
                timestamp, self.subject_tracker.get_tracked_elements())

        if len(final_head_pose_images) > 0:
            headpose_image_ros = self.bridge.cv2_to_imgmsg(
                np.hstack(final_head_pose_images), "bgr8")
            headpose_image_ros.header.stamp = timestamp
            self.subject_faces_pub.publish(headpose_image_ros)

    def get_head_pose(self, landmarks, subject_id):
        """
        We are given a set of 2D points in the form of landmarks. The basic idea is that we assume a generic 3D head
        model, and try to fit the 2D points to the 3D model based on the Levenberg-Marquardt optimization. We can use
        OpenCV's implementation of SolvePnP for this.
        This method is inspired by http://www.learnopencv.com/head-pose-estimation-using-opencv-and-dlib/
        We are planning to replace this with our own head pose estimator.
        :param landmarks: Landmark positions to be used to determine head pose
        :param subject_id: ID of the subject that corresponds to the given landmarks
        :return: success - Whether the pose was successfully extracted
                 rotation_vector - rotation vector that along with translation vector brings points from the model
                 coordinate system to the camera coordinate system
                 translation_vector - see rotation_vector
        """

        camera_matrix = self.img_proc.intrinsicMatrix()
        dist_coeffs = self.img_proc.distortionCoeffs()

        try:
            success, rodrigues_rotation, translation_vector, _ = cv2.solvePnPRansac(
                self.model_points,
                landmarks.reshape(len(self.model_points), 1, 2),
                cameraMatrix=camera_matrix,
                distCoeffs=dist_coeffs,
                flags=cv2.SOLVEPNP_DLS)

        except cv2.error as e:
            tqdm.write(
                '\033[2K\033[1;31mCould not estimate head pose: {}\033[0m'.
                format(e),
                end="\r")
            return False, None, None

        if not success:
            tqdm.write(
                '\033[2K\033[1;31mUnsuccessful in solvingPnPRanscan\033[0m',
                end="\r")
            return False, None, None

        # this is generic point stabiliser, the underlying representation doesn't matter
        rotation_vector, translation_vector = self.apply_kalman_filter_head_pose(
            subject_id, rodrigues_rotation, translation_vector / 1000.0)

        rotation_vector[0] += self.head_pitch

        _rotation_matrix, _ = cv2.Rodrigues(rotation_vector)
        _rotation_matrix = np.matmul(
            _rotation_matrix, np.array([[0, 1, 0], [0, 0, -1], [-1, 0, 0]]))
        _m = np.zeros((4, 4))
        _m[:3, :3] = _rotation_matrix
        _m[3, 3] = 1
        _rpy_rotation = np.array(transformations.euler_from_matrix(_m))

        return success, _rpy_rotation, translation_vector

    def apply_kalman_filter_head_pose(self, subject_id,
                                      rotation_vector_unstable,
                                      translation_vector_unstable):
        stable_pose = []
        pose_np = np.array(
            (rotation_vector_unstable, translation_vector_unstable)).flatten()
        for value, ps_stb in zip(pose_np, self.pose_stabilizers[subject_id]):
            ps_stb.update([value])
            stable_pose.append(ps_stb.state[0])
        stable_pose = np.reshape(stable_pose, (-1, 3))
        rotation_vector = stable_pose[0]
        translation_vector = stable_pose[1]
        return rotation_vector, translation_vector

    def publish_subject_list(self, timestamp, subjects):
        assert (subjects is not None)

        subject_list_message = self.__subject_bridge.images_to_msg(
            subjects, timestamp)

        self.subject_pub.publish(subject_list_message)

    def publish_pose(self, timestamp, nose_center_3d_tf, head_rpy, subject_id):
        t = TransformStamped()
        t.header.frame_id = self.camera_frame
        t.header.stamp = timestamp
        t.child_frame_id = self.tf_prefix + "/head_pose_estimated" + str(
            subject_id)
        t.transform.translation.x = nose_center_3d_tf[0]
        t.transform.translation.y = nose_center_3d_tf[1]
        t.transform.translation.z = nose_center_3d_tf[2]

        rotation = transformations.quaternion_from_euler(*head_rpy)
        t.transform.rotation.x = rotation[0]
        t.transform.rotation.y = rotation[1]
        t.transform.rotation.z = rotation[2]
        t.transform.rotation.w = rotation[3]

        try:
            self.tf2_broadcaster.sendTransform([t])
        except rospy.ROSException as exc:
            if str(exc) == "publish() to a closed topic":
                pass
            else:
                raise exc

        self.tf2_buffer.set_transform(t, 'extract_landmarks')

    def update_subject_tracker(self, color_img):
        faceboxes = self.get_face_bb(color_img)
        if len(faceboxes) == 0:
            self.subject_tracker.clear_elements()
            return

        tracked_subjects = self.get_subjects_from_faceboxes(
            color_img, faceboxes)

        # track the new faceboxes according to the previous ones
        self.subject_tracker.track(tracked_subjects)
Example #16
0
class DeadReckoningNode(DTROS):
    """Performs deadreckoning.
    The node performs deadreckoning to estimate odometry
    based upon wheel encoder values.

    Args:
        node_name (:obj:`str`): a unique, descriptive name for the node that ROS will use

    Configuration:
        ~veh (:obj:`str`): Robot name
        ~publish_hz (:obj:`float`): Frequency at which to publish odometry
        ~encoder_stale_dt (:obj:`float`): Time in seconds after encoders are considered stale
        ~wheelbase (:obj:`float`): Lateral distance between the center of wheels (in meters)
        ~ticks_per_meter (:obj:`int`): Total encoder ticks associated with one meter of travel
        ~debug (:obj: `bool`): Enable/disable debug output

    Publisher:
        ~odom (:obj:`Odometry`): The computed odometry

    Subscribers:
        ~left_wheel_encoder_node/tick (:obj:`WheelEncoderStamped`): Encoder ticks for left wheel
        ~right_wheel_encoder_node/tick (:obj:`WheelEncoderStamped`): Encoder ticks for right wheel
    """
    def __init__(self, node_name):
        super(DeadReckoningNode,
              self).__init__(node_name=node_name,
                             node_type=NodeType.LOCALIZATION)
        self.node_name = node_name

        self.veh = rospy.get_param("~veh")
        self.publish_hz = rospy.get_param("~publish_hz")
        self.encoder_stale_dt = rospy.get_param("~encoder_stale_dt")
        self.ticks_per_meter = rospy.get_param("~ticks_per_meter")
        self.wheelbase = rospy.get_param("~wheelbase")
        self.origin_frame = rospy.get_param("~origin_frame").replace(
            "~", self.veh)
        self.target_frame = rospy.get_param("~target_frame").replace(
            "~", self.veh)
        self.debug = rospy.get_param("~debug", False)

        self.left_encoder_last = None
        self.right_encoder_last = None
        self.encoders_timestamp_last = None
        self.encoders_timestamp_last_local = None

        # Current pose, forward velocity, and angular rate
        self.timestamp = None
        self.x = 0.0
        self.y = 0.0
        self.z = 0.0
        self.yaw = 0.0
        self.q = [0.0, 0.0, 0.0, 1.0]
        self.tv = 0.0
        self.rv = 0.0

        # Used for debugging
        self.x_trajectory = []
        self.y_trajectory = []
        self.yaw_trajectory = []
        self.time = []

        self.total_dist = 0

        # Setup subscribers
        self.sub_encoder_left = message_filters.Subscriber(
            "~left_wheel", WheelEncoderStamped)

        self.sub_encoder_right = message_filters.Subscriber(
            "~right_wheel", WheelEncoderStamped)

        # Setup the time synchronizer
        self.ts_encoders = message_filters.ApproximateTimeSynchronizer(
            [self.sub_encoder_left, self.sub_encoder_right], 10, 0.5)
        self.ts_encoders.registerCallback(self.cb_ts_encoders)

        # Setup publishers
        self.pub = rospy.Publisher("~odom", Odometry, queue_size=10)

        # Setup timer
        self.timer = rospy.Timer(rospy.Duration(1 / self.publish_hz),
                                 self.cb_timer)
        self._print_time = 0
        self._print_every_sec = 30
        # tf broadcaster for odometry TF
        self._tf_broadcaster = TransformBroadcaster()

        self.loginfo("Initialized")

    def cb_ts_encoders(self, left_encoder, right_encoder):
        timestamp_now = rospy.get_time()

        # Use the average of the two encoder times as the timestamp
        left_encoder_timestamp = left_encoder.header.stamp.to_sec()
        right_encoder_timestamp = right_encoder.header.stamp.to_sec()
        timestamp = (left_encoder_timestamp + right_encoder_timestamp) / 2

        if not self.left_encoder_last:
            self.left_encoder_last = left_encoder
            self.right_encoder_last = right_encoder
            self.encoders_timestamp_last = timestamp
            self.encoders_timestamp_last_local = timestamp_now
            return

        # Skip this message if the time synchronizer gave us an older message
        dtl = left_encoder.header.stamp - self.left_encoder_last.header.stamp
        dtr = right_encoder.header.stamp - self.right_encoder_last.header.stamp
        if dtl.to_sec() < 0 or dtr.to_sec() < 0:
            self.loginfo("Ignoring stale encoder message")
            return

        left_dticks = left_encoder.data - self.left_encoder_last.data
        right_dticks = right_encoder.data - self.right_encoder_last.data

        left_distance = left_dticks * 1.0 / self.ticks_per_meter
        right_distance = right_dticks * 1.0 / self.ticks_per_meter

        # Displacement in body-relative x-direction
        distance = (left_distance + right_distance) / 2

        # Change in heading
        dyaw = (right_distance - left_distance) / self.wheelbase

        dt = timestamp - self.encoders_timestamp_last

        if dt < 1e-6:
            self.logwarn(
                "Time since last encoder message (%f) is too small. Ignoring" %
                dt)
            return

        self.tv = distance / dt
        self.rv = dyaw / dt

        if self.debug:
            self.loginfo(
                "Left wheel:\t Time = %.4f\t Ticks = %d\t Distance = %.4f m" %
                (left_encoder.header.stamp.to_sec(), left_encoder.data,
                 left_distance))

            self.loginfo(
                "Right wheel:\t Time = %.4f\t Ticks = %d\t Distance = %.4f m" %
                (right_encoder.header.stamp.to_sec(), right_encoder.data,
                 right_distance))

            self.loginfo("TV = %.2f m/s\t RV = %.2f deg/s\t DT = %.4f" %
                         (self.tv, self.rv * 180 / math.pi, dt))

        dist = self.tv * dt
        dyaw = self.rv * dt

        self.yaw = self.angle_clamp(self.yaw + dyaw)
        self.x = self.x + dist * math.cos(self.yaw)
        self.y = self.y + dist * math.sin(self.yaw)
        self.q = tr.quaternion_from_euler(0, 0, self.yaw)
        self.timestamp = timestamp

        self.left_encoder_last = left_encoder
        self.right_encoder_last = right_encoder
        self.encoders_timestamp_last = timestamp
        self.encoders_timestamp_last_local = timestamp_now

    def cb_timer(self, _):
        need_print = time.time() - self._print_time > self._print_every_sec
        if self.encoders_timestamp_last:
            dt = rospy.get_time() - self.encoders_timestamp_last_local
            if abs(dt) > self.encoder_stale_dt:
                if need_print:
                    self.logwarn(
                        "No encoder messages received for %.2f seconds. "
                        "Setting translational and rotational velocities to zero"
                        % dt)
                self.rv = 0.0
                self.tv = 0.0
        else:
            if need_print:
                self.logwarn(
                    "No encoder messages received. "
                    "Setting translational and rotational velocities to zero")
            self.rv = 0.0
            self.tv = 0.0

        # Publish the odometry message
        self.publish_odometry()
        if need_print:
            self._print_time = time.time()

    def publish_odometry(self):
        odom = Odometry()
        odom.header.stamp = rospy.Time.now()  # Ideally, should be encoder time
        odom.header.frame_id = self.origin_frame
        odom.pose.pose = Pose(Point(self.x, self.y, self.z),
                              Quaternion(*self.q))
        odom.child_frame_id = self.target_frame
        odom.twist.twist = Twist(Vector3(self.tv, 0.0, 0.0),
                                 Vector3(0.0, 0.0, self.rv))

        self.pub.publish(odom)

        self._tf_broadcaster.sendTransform(
            TransformStamped(
                header=odom.header,
                child_frame_id=self.target_frame,
                transform=Transform(translation=Vector3(
                    self.x, self.y, self.z),
                                    rotation=Quaternion(*self.q)),
            ))

    @staticmethod
    def angle_clamp(theta):
        if theta > 2 * math.pi:
            return theta - 2 * math.pi
        elif theta < -2 * math.pi:
            return theta + 2 * math.pi
        else:
            return theta
Example #17
0
class ChassisNode(Node):
    CameraElevationMeters = 0.4
    WheelBaselineMeters = 0.430
    WheelRadiusMeters = 0.115
    ReportIntervalSec = 0.02
    SecInNsec = 10**-9
    RadPerSecInWheelFeedback = 1.

    MaxSpeedMetersSec = 1.0  # linear velocity from vel_cmd is scaled and bounded to this
    MaxYawRateRadSec = 1.0  # yaw rate from vel_cmd is scaled and bounded to this. todo : set to actual max yaw rate of chassis

    def __init__(self):
        super().__init__('chassis')

        self.odom_pub = self.create_publisher(Odometry, "odom_dirty", 10)
        self.odom_broadcaster = TransformBroadcaster(self)
        self.x = 0
        self.y = 0
        self.yaw = 0
        self.Vx = 0
        self.yawRate = 0

        self.cmdSpeed = 0
        self.cmdSteering = 0

        self.tsFeedback = self.get_clock().now().nanoseconds
        self.chassis = ChassisInterface()
        self.chassis.setWheelCallback(self.processWheelFeedback)

        self.sub = self.create_subscription(Twist, 'cmd_vel', self.processCmd,
                                            10)
        logging.info('chassis node initialized')

    def processWheelFeedback(self, DeviceIdIgnored, respTuple):
        now = self.get_clock().now()
        deltaTimeSec = (now.nanoseconds -
                        self.tsFeedback) * ChassisNode.SecInNsec

        zeroRotation = Quaternion()
        zeroTranslation = Vector3()
        zeroTranslation.x = 0.0
        zeroTranslation.y = 0.0
        zeroTranslation.z = 0.0

        if deltaTimeSec >= ChassisNode.ReportIntervalSec:
            self.tsFeedback = now.nanoseconds
            wheelLRadSec = respTuple[0] * ChassisNode.RadPerSecInWheelFeedback
            wheelRRadSec = respTuple[1] * ChassisNode.RadPerSecInWheelFeedback
            self.calculatePose(deltaTimeSec, wheelRRadSec, wheelLRadSec)

            logging.debug(
                'chassis wheel feedback : {0} {1} speed {2} yawRate {3} pos [{4} {5}] yaw {6}'
                .format(wheelLRadSec, wheelRRadSec, self.Vx, self.yawRate,
                        self.x, self.y, self.yaw))

            quaternion = Quaternion()
            quaternion.x = 0.0
            quaternion.y = 0.0
            quaternion.z = sin(self.yaw / 2)
            quaternion.w = cos(self.yaw / 2)

            odom = Odometry()
            odom.header.stamp = now.to_msg()
            odom.header.frame_id = 'odom'
            odom.child_frame_id = 'base_link'
            odom.pose.pose.position.x = self.x
            odom.pose.pose.position.y = self.y
            odom.pose.pose.position.z = 0.0
            odom.pose.covariance[0] = 0.1
            odom.pose.covariance[7] = 0.1
            odom.pose.covariance[35] = 0.1
            odom.pose.pose.orientation = quaternion
            odom.twist.twist.linear.x = self.Vx
            odom.twist.twist.linear.y = 0.0
            odom.twist.twist.angular.z = self.yawRate
            odom.twist.covariance[0] = 0.01
            odom.twist.covariance[7] = 0.01
            odom.twist.covariance[35] = 0.01
            self.odom_pub.publish(odom)

            transform_stamped_msg = TransformStamped()
            transform_stamped_msg.header.stamp = now.to_msg()
            transform_stamped_msg.header.frame_id = 'odom'
            transform_stamped_msg.child_frame_id = 'base_link'
            transform_stamped_msg.transform.translation = zeroTranslation
            transform_stamped_msg.transform.translation.x = self.x
            transform_stamped_msg.transform.translation.y = self.y
            transform_stamped_msg.transform.rotation = quaternion
            self.odom_broadcaster.sendTransform(transform_stamped_msg)

            transform_stamped_msg = TransformStamped()
            transform_stamped_msg.header.stamp = now.to_msg()
            transform_stamped_msg.header.frame_id = 'map'
            transform_stamped_msg.child_frame_id = 'odom'
            transform_stamped_msg.transform.translation = zeroTranslation
            transform_stamped_msg.transform.rotation = zeroRotation
            self.odom_broadcaster.sendTransform(transform_stamped_msg)

            transform_stamped_msg = TransformStamped()
            transform_stamped_msg.header.stamp = now.to_msg()
            transform_stamped_msg.header.frame_id = 'base_link'
            transform_stamped_msg.child_frame_id = 'camera_link'
            transform_stamped_msg.transform.translation = zeroTranslation
            transform_stamped_msg.transform.translation.z = ChassisNode.CameraElevationMeters
            self.odom_broadcaster.sendTransform(transform_stamped_msg)

    def processCmd(self, data):
        #todo : add deadzone for Vx and yaw rate
        #todo : reset chassis control to zeroes if cmd_vel has expired
        cmdSpeedBounded = max(min(data.linear.x, MaxSpeedMetersSec),
                              -MaxSpeedMetersSec)
        self.cmdSpeed = cmdSpeedBounded / MaxSpeedMetersSec
        self.chassis.setSpeed(self.cmdSpeed)

        cmdYawRateBounded = max(min(data.angular.z, MaxYawRateRadSec),
                                -MaxYawRateRadSec)
        self.cmdSteering = cmdYawRateBounded / MaxYawRateRadSec
        self.chassis.setSteering(self.cmdSteering)
        logging.debug('chassis cmd speed {0}/{1} steering {2}/{3}'.format(
            data.linear.x, self.cmdSpeed, data.angular.z, cmdSteering))

    #dtSec : integration interval
    #Lvel, Rvel : wheels angular speed, rad/sec
    def calculatePose(self, dtSec, Lvel, Rvel):
        # Lvel= -Lvel
        self.Vx = (ChassisNode.WheelRadiusMeters) * (Rvel + Lvel) / 2
        self.yawRate = -1.6 * (ChassisNode.WheelRadiusMeters) * (
            Rvel - Lvel) / ChassisNode.WheelBaselineMeters
        self.yaw += dtSec * self.yawRate
        self.y += dtSec * sin(self.yaw) * self.Vx
        self.x += dtSec * cos(self.yaw) * self.Vx
class StatePublisher(Node):

    def __init__(self):
        rclpy.init()
        super().__init__('state_publisher')

        qos_profile = QoSProfile(depth=10)
        self.joint_pub = self.create_publisher(JointState, 'joint_states', qos_profile)
        self.broadcaster = TransformBroadcaster(self, qos=qos_profile)
        self.nodeName = self.get_name()
        self.get_logger().info("{0} started".format(self.nodeName))

        degree = pi / 180.0
        loop_rate = self.create_rate(30)

        # robot state
        tilt = 0.
        tinc = degree
        swivel = 0.
        angle = 0.
        height = 0.
        hinc = 0.005

        # message declarations
        odom_trans = TransformStamped()
        odom_trans.header.frame_id = 'odom'
        odom_trans.child_frame_id = 'axis'
        joint_state = JointState()

        try:
            while rclpy.ok():
                rclpy.spin_once(self)

                # update joint_state
                now = self.get_clock().now()
                joint_state.header.stamp = now.to_msg()
                joint_state.name = ['swivel', 'tilt', 'periscope']
                joint_state.position = [swivel, tilt, height]

                # update transform
                # (moving in a circle with radius=2)
                odom_trans.header.stamp = now.to_msg()
                odom_trans.transform.translation.x = cos(angle)*2
                odom_trans.transform.translation.y = sin(angle)*2
                odom_trans.transform.translation.z = 0.7
                odom_trans.transform.rotation = \
                    euler_to_quaternion(0, 0, angle + pi/2) # roll,pitch,yaw

                # send the joint state and transform
                self.joint_pub.publish(joint_state)
                self.broadcaster.sendTransform(odom_trans)

                # Create new robot state
                tilt += tinc
                if tilt < -0.5 or tilt > 0.0:
                    tinc *= -1
                height += hinc
                if height > 0.2 or height < 0.0:
                    hinc *= -1
                swivel += degree
                angle += degree/4

                # This will adjust as needed per iteration
                loop_rate.sleep()

        except KeyboardInterrupt:
            pass
Example #19
0
class ServiceNodeVelocity(WebotsNode):
    def __init__(self, args):
        super().__init__('slave_node', args)

        # Enable 3 sensors
        self.service_node_vel_timestep = 32

        # Sensor section
        self.sensor_timer = self.create_timer(
            0.001 * self.service_node_vel_timestep, self.sensor_callback)

        self.right_sensor = self.robot.getDistanceSensor(
            'distance_sensor_right')
        self.right_sensor.enable(self.service_node_vel_timestep)
        self.sensor_publisher_right = self.create_publisher(
            Float64, 'right_IR', 1)

        self.mid_sensor = self.robot.getDistanceSensor('distance_sensor_mid')
        self.mid_sensor.enable(self.service_node_vel_timestep)
        self.sensor_publisher_mid = self.create_publisher(Float64, 'mid_IR', 1)

        self.left_sensor = self.robot.getDistanceSensor('distance_sensor_left')
        self.left_sensor.enable(self.service_node_vel_timestep)
        self.sensor_publisher_left = self.create_publisher(
            Float64, 'left_IR', 1)

        # Front wheels
        self.left_motor_front = self.robot.getMotor('left_front_wheel')
        self.left_motor_front.setPosition(float('inf'))
        self.left_motor_front.setVelocity(0)

        self.right_motor_front = self.robot.getMotor('right_front_wheel')
        self.right_motor_front.setPosition(float('inf'))
        self.right_motor_front.setVelocity(0)

        # Rear wheels
        self.left_motor_rear = self.robot.getMotor('left_rear_wheel')
        self.left_motor_rear.setPosition(float('inf'))
        self.left_motor_rear.setVelocity(0)

        self.right_motor_rear = self.robot.getMotor('right_rear_wheel')
        self.right_motor_rear.setPosition(float('inf'))
        self.right_motor_rear.setVelocity(0)

        # position sensors
        self.left_wheel_sensor = self.robot.getPositionSensor(
            'left_rear_position')
        self.right_wheel_sensor = self.robot.getPositionSensor(
            'right_rear_position')
        self.left_wheel_sensor.enable(self.timestep)
        self.right_wheel_sensor.enable(self.timestep)
        self.motor_max_speed = self.left_motor_rear.getMaxVelocity()

        # Create Subscriber
        self.cmd_vel_subscriber = self.create_subscription(
            Twist, 'cmd_vel', self.cmdVel_callback, 1)

        # Create Lidar subscriber
        self.lidar_sensor = self.robot.getLidar('lidar_sensor')
        self.lidar_sensor.enable(self.service_node_vel_timestep)
        self.laser_publisher = self.create_publisher(LaserScan, '/scan', 1)

        ##########################
        self.x = 0.0
        self.y = 0.0
        self.th = 0.0
        self.vx = 0.0
        self.vy = 0.0
        self.vth = 0.0
        self.time_step = 0.032
        self.left_omega = 0.0
        self.right_omega = 0.0
        self.odom_pub = self.create_publisher(Odometry, "odom", 1)
        self.odom_timer = self.create_timer(self.time_step, self.odom_callback)
        #########################
        self.get_logger().info('Sensor enabled')
        self.prev_angle = 0.0
        self.prev_left_wheel_ticks = 0.0
        self.prev_right_wheel_ticks = 0.0
        self.last_time = 0.0
        self.wheel_gap = 0.12  # in meter
        self.wheel_radius = 0.04  # in meter
        self.front_back = 0.1  # in meter

    ####################################
    def odom_callback(self):
        self.publish_odom()

    def publish_odom(self):
        stamp = Time(seconds=self.robot.getTime()).to_msg()

        self.odom_broadcaster = TransformBroadcaster(self)
        time_diff_s = self.robot.getTime() - self.last_time
        # time_diff_s = self.time_step

        left_wheel_ticks = self.left_wheel_sensor.getValue()
        right_wheel_ticks = self.right_wheel_sensor.getValue()

        if time_diff_s == 0.0:
            return

        # Calculate velocities
        v_left_rad = (left_wheel_ticks -
                      self.prev_left_wheel_ticks) / time_diff_s
        v_right_rad = (right_wheel_ticks -
                       self.prev_right_wheel_ticks) / time_diff_s
        v_left = v_left_rad * self.wheel_radius
        v_right = v_right_rad * self.wheel_radius
        v = (v_left + v_right) / 2
        omega = (v_right - v_left
                 ) / 2 * 2 * self.wheel_gap  # (Vright - Vleft) / 2* wheel_gap

        # ################################################################
        # angle_v = self.th+omega
        # vx=v*cos(omega)
        # vy=v*sin(omega)
        # # self.get_logger().info('th = %f , v = %f , omega = %f' % (self.th ,v , omega) )
        # dx = (cos(angle_v)*vx - sin(angle_v)*vy)*time_diff_s
        # dy = (sin(angle_v)*vx + cos(angle_v)*vy)*time_diff_s
        # dth = tan(omega)*vx*time_diff_s / self.front_back

        # self.x += dx
        # self.y += dy
        # self.th += omega

        # # Calculate position & angle
        # # Fourth order Runge - Kutta
        # # Reference: https://www.cs.cmu.edu/~16311/s07/labs/NXTLabs/Lab%203.html
        # k00 = v * cos(self.prev_angle)
        # k01 = v * sin(self.prev_angle)
        # k02 = omega
        # k10 = v * cos(self.prev_angle + time_diff_s * k02 / 2)
        # k11 = v * sin(self.prev_angle + time_diff_s * k02 / 2)
        # k12 = omega
        # k20 = v * cos(self.prev_angle + time_diff_s * k12 / 2)
        # k21 = v * sin(self.prev_angle + time_diff_s * k12 / 2)
        # k22 = omega
        # k30 = v * cos(self.prev_angle + time_diff_s * k22 / 2)
        # k31 = v * sin(self.prev_angle + time_diff_s * k22 / 2)
        # k32 = omega

        self.x += v * cos(self.prev_angle) * time_diff_s
        self.y += v * sin(self.prev_angle) * time_diff_s
        self.th += omega

        ################################################################

        # Reset section
        self.prev_angle = self.th
        self.prev_left_wheel_ticks = left_wheel_ticks
        self.prev_right_wheel_ticks = right_wheel_ticks
        self.last_time = self.robot.getTime()

        # since all odometry is 6DOF we'll need a quaternion created from yaw
        odom_quat = euler_to_quaternion(0.0, 0.0, self.th)

        # first, we'll publish the transform over tf
        odom_transform = TransformStamped()
        odom_transform.header.stamp = stamp
        odom_transform.header.frame_id = 'odom'
        odom_transform.child_frame_id = 'base_link'
        odom_transform.transform.rotation = odom_quat
        odom_transform.transform.translation.x = self.x
        odom_transform.transform.translation.y = self.y
        odom_transform.transform.translation.z = 0.0

        self.odom_broadcaster.sendTransform(odom_transform)

        odom = Odometry()
        odom.header.stamp = stamp
        odom.header.frame_id = "odom"
        odom.child_frame_id = "base_link"
        # set the position
        odom.pose.pose.position.x = self.x
        odom.pose.pose.position.y = self.y
        odom.pose.pose.orientation = odom_quat
        # set the velocity
        odom.twist.twist.linear.x = self.vx
        odom.twist.twist.angular.z = self.vth

        # publish the message
        self.odom_pub.publish(odom)

    ###################################

    def cmdVel_callback(self, msg):
        self.vx = msg.linear.x
        self.vth = msg.angular.z
        left_speed = ((2.0 * msg.linear.x - msg.angular.z * self.wheel_gap) /
                      (2.0 * self.wheel_radius))
        right_speed = ((2.0 * msg.linear.x + msg.angular.z * self.wheel_gap) /
                       (2.0 * self.wheel_radius))
        left_speed = min(self.motor_max_speed,
                         max(-self.motor_max_speed, left_speed))
        right_speed = min(self.motor_max_speed,
                          max(-self.motor_max_speed, right_speed))

        self.left_omega = left_speed / (self.wheel_radius)
        self.right_omega = right_speed / (self.wheel_radius)
        self.left_motor_front.setVelocity(left_speed)
        self.right_motor_front.setVelocity(right_speed)
        self.left_motor_rear.setVelocity(left_speed)
        self.right_motor_rear.setVelocity(right_speed)

    def sensor_callback(self):
        # Publish distance sensor value
        msg_right = Float64()
        msg_right.data = self.right_sensor.getValue()
        self.sensor_publisher_right.publish(msg_right)

        msg_mid = Float64()
        msg_mid.data = self.mid_sensor.getValue()
        self.sensor_publisher_mid.publish(msg_mid)

        msg_left = Float64()
        msg_left.data = self.left_sensor.getValue()
        self.sensor_publisher_left.publish(msg_left)

        self.laser_pub()

    def laser_pub(self):
        msg_lidar = LaserScan()
        msg_lidar.header.frame_id = 'base_link'
        stamp = Time(seconds=self.robot.getTime()).to_msg()
        msg_lidar.header.stamp = stamp
        msg_lidar.angle_min = 0.0
        msg_lidar.angle_max = 2 * 22 / 7
        msg_lidar.angle_increment = (0.25 * 22) / (180 * 7)
        msg_lidar.range_min = 0.12
        msg_lidar.range_max = 2.0
        msg_lidar.scan_time = 0.032
        msg_lidar.ranges = self.lidar_sensor.getRangeImage()

        self.laser_publisher.publish(msg_lidar)
Example #20
0
class PlayerROSInterface:
    def __init__(self, config, visualize=True, verbose=True, name="one"):

        self.visualize = visualize
        self.verbose = verbose

        # parameters for simulation
        self.drive_topic = rospy.get_param("~drive_topic_%s" % name)
        self.scan_topic = rospy.get_param("~scan_topic_%s" % name)
        self.pose_topic = rospy.get_param("~pose_topic_%s" % name)
        self.odom_topic = rospy.get_param("~odom_topic_%s" % name)

        self.map_frame = rospy.get_param("~map_frame")
        self.base_frame = rospy.get_param("~base_frame")
        self.scan_frame = rospy.get_param("~scan_frame")
        self.update_pose_rate = rospy.get_param("~update_pose_rate")

        self.config = config

        # racecar object
        self.rcs = RacecarSimulator(config)

        # transform broadcaster
        self.br = TransformBroadcaster()

        # publishers
        self.scan_pub = rospy.Publisher(self.scan_topic, LaserScan)
        self.odom_pub = rospy.Publisher(self.odom_topic, Odometry)

        # subscribers
        self.drive_sub = rospy.Subscriber(self.drive_topic,
                                          AckermannDriveStamped,
                                          self.driveCallback,
                                          queue_size=1)
        self.pose_sub = rospy.Subscriber(self.pose_topic, PoseStamped,
                                         self.poseCallback)

        if self.verbose:
            print "Player %s construted!" % name

    def driveCallback(self, msg):
        """
            Pass actions for driving
        """

        self.rcs.drive(msg.drive.speed, msg.drive.steering_angle)

    def poseTransformPub(self, timestamp):
        """
            Publish the transform for pose
        """

        # get state information
        state = self.rcs.getState()

        # create the message
        pt_msg = Transform()
        pt_msg.translation.x = state[0]
        pt_msg.translation.y = state[1]
        quat = quaternion_from_euler(0.0, 0.0, state[2])
        pt_msg.rotation.x = quat[0]
        pt_msg.rotation.y = quat[1]
        pt_msg.rotation.z = quat[2]
        pt_msg.rotation.w = quat[3]

        # ground truth
        ps = PoseStamped()
        ps.header.frame_id = self.map_topic
        ps.pose.position.x = state[0]
        ps.pose.position.y = state[1]
        ps.pose.orientation.x = quat[0]
        ps.pose.orientation.y = quat[1]
        ps.pose.orientation.z = quat[2]
        ps.pose.orientation.w = quat[3]

        # add a header
        ts = TransformStamped()
        ts.header.stamp = timestamp
        ts.header.frame_id = self.map_frame
        ts.child_frame_id = self.base_frame
        ts.transform = pt_msg

        if self.broadcast_transform:
            self.br.sendTransform(ts)

        if self.pub_gt_pose:
            self.pose_pub.publish(ps)

    def steerAngTransformPub(self, timestamp):
        """
            Publish steering transforms, left and right steer the same
        """

        state = self.rcs.getState()

        ts_msg = TransformStamped()
        ts_msg.header.stamp = timestamp

        quat = quaternion_from_euler(0.0, 0.0, state[2])
        ts_msg.transform.rotation.x = quat[0]
        ts_msg.transform.rotation.y = quat[1]
        ts_msg.transform.rotation.z = quat[2]
        ts_msg.transform.rotation.w = quat[3]

        # publish for right and left steering
        ts_msg.header.frame_id = "front_left_hinge"
        ts_msg.child_frame_id = "front_left_wheel"
        self.br.sendTransform(ts_msg)

        ts_msg.header.frame_id = "front_right_hinge"
        ts_msg.child_frame_id = "front_right_wheel"
        self.br.sendTransform(ts_msg)

    def laserLinkTransformPub(self, timestamp):
        """
            Publish the lidar transform, from base
        """

        ts_msg = TransformStamped()
        ts_msg.header.stamp = timestamp
        ts_msg.header.frame_id = self.base_frame
        ts_msg.child_frame_id = self.scan_frame
        ts_msg.transform.translation.x = self.car_config["scan_dist_to_base"]
        ts_msg.transform.rotation.w = 1
        self.br.sendTransform(ts_msg)

    def odomPub(self, timestamp):
        """
            Publish simulation odometry
        """

        state = self.rcs.getState()

        od_msg = Odometry()
        od_msg.header.stamp = timestamp
        od_msg.header.frame_id = self.map_frame
        od_msg.child_frame_id = self.base_frame
        quat = quaternion_from_euler(0.0, 0.0, state[2])
        od_msg.pose.pose.orientation.x = quat[0]
        od_msg.pose.pose.orientation.y = quat[1]
        od_msg.pose.pose.orientation.z = quat[2]
        od_msg.pose.pose.orientation.w = quat[3]

        od_msg.pose.pose.position.x = state[0]
        od_msg.pose.pose.position.y = state[1]
        od_msg.twist.twist.linear.x = state[3]
        od_msg.twist.twist.linear.z = state[4]
        self.odom_pub.publish(od_msg)

    def lidarPub(self, timestamp):
        """
            Publish lidar
        """

        scan = self.rcs.getScan()

        scan_msg = LaserScan()
        scan_msg.header.stamp = timestamp
        scan_msg.header.frame_id = self.scan_frame
        scan_msg.angle_min = -self.car_config["scan_fov"] / 2.0
        scan_msg.angle_max = self.car_config["scan_fov"] / 2.0
        scan_msg.angle_increment = self.car_config[
            "scan_fov"] / self.car_config["scan_beams"]
        scan_msg.range_max = self.car_config["scan_max_range"]
        scan_msg.ranges = scan
        scan_msg.intensities = scan
        self.scan_pub.publish(scan_msg)
class WaterlinkedGPS():
    def __init__(self):
        rospy.loginfo('Waterlinked GPS object created...\n')
        ip = rospy.get_param("~ip", "192.168.2.94")
        port = rospy.get_param("~port", "80")
        """
        Default TF Behaviour: If no datum point given, then waterlinked node sends utm->map transform referenced to the
        master GPS position and a utm->waterlinked transform referenced to the master GPS position and heading. Pose 
        data is referenced to waterlinked frame.
        Datum TF Behaviour: Datum [latitude, longitude] overrides default behaviour. utm->map transform referenced to the
        datum point given, ENU alignment. utm->waterlinked transform referenced to the master GPS position and heading. 
        Pose data is referenced to waterlinked frame.
        """

        self._map_frame_id = rospy.get_param("~map_frame_id", "map")
        self._waterlinked_frame_id = rospy.get_param("~waterlinked_frame_id",
                                                     "waterlinked")
        self._send_tf = rospy.get_param("~send_tf", False)
        self._datum = rospy.get_param("~datum", None)  # if no datum specified
        self._master_gps_ns = rospy.get_param("~master_gps_ns", None)  # None
        self._master_orientation_topic = rospy.get_param(
            "~master_imu_topic", None)

        self._tf_buffer = Buffer()
        self._tf_bcast = TransformBroadcaster()

        # The base URL is the one specified through: http://192.168.2.2:2770/waterlinked
        self._base_url = 'http://' + ip + ':' + port + '/api/v1'
        # self._base_url = 'http://192.168.2.94:80/api/v1'

        # The complete API can be found here: http://192.168.2.94/swagger/
        # Divide the messages into a slow and a fast group.
        self._api_endpoints_slow = [
            '/about', '/about/status', '/about/temperature', '/config/generic',
            '/config/receivers'
        ]

        self._api_endpoints_fast = [
            '/external/orientation', '/position/acoustic/filtered',
            '/position/acoustic/raw', '/position/global', '/position/master'
        ]

        # Create lists of the full APIs (base URL + endpoint)
        self._urls_slow = [
            self._base_url + api_endpoint
            for api_endpoint in self._api_endpoints_slow
        ]
        self._urls_fast = [
            self._base_url + api_endpoint
            for api_endpoint in self._api_endpoints_fast
        ]

        # Specify the frequencies for the two groups
        self._rate_slow_hz = 0.25
        self._rate_fast_hz = 4.0

        # Print the URLs and their specified frequencies
        self.print_urls()

        # HTTP request session
        self._session = FuturesSession(max_workers=10)

        # If a master gps topic has been specified, then forward that to waterlinked for use
        if self._master_gps_ns is not None:
            self._gps_msg = {
                "cog": 0,
                "fix_quality": 1,
                "hdop": 0,
                "lat": 0.0,
                "lon": 0.0,
                "numsats": 11,
                "orientation": 0.0,
                "sog": 0.0
            }
            self._gps_url = self._base_url + "/external/master"
            rospy.Subscriber(self._master_gps_ns + "/fix", NavSatFix,
                             self._handle_master_gps)
            rospy.Subscriber(self._master_gps_ns + "/vel", TwistStamped,
                             self._handle_master_vel)
            rospy.Timer(rospy.Duration.from_sec(1.0),
                        self._forward_master_position)

        # Configure the slow and fast timer callbacks
        rospy.Timer(rospy.Duration.from_sec(1.0 / self._rate_slow_hz),
                    self.slow_callback)
        rospy.Timer(rospy.Duration.from_sec(1.0 / self._rate_fast_hz),
                    self.fast_callback)

        # Time logging variables
        self._show_loop_timing = False
        self._is_first_slow_loop = True
        self._is_first_fast_loop = True

        # Slow publishers
        self._pub_about = rospy.Publisher('waterlinked/about',
                                          About,
                                          queue_size=5)
        self._pub_about_status = rospy.Publisher('waterlinked/about/status',
                                                 AboutStatus,
                                                 queue_size=5)
        self._pub_about_temperature = rospy.Publisher(
            'waterlinked/about/temperature', AboutTemperature, queue_size=5)
        self._pub_config_generic = rospy.Publisher(
            'waterlinked/config/generic', ConfigGeneric, queue_size=5)
        self._pub_config_receivers = rospy.Publisher(
            'waterlinked/config/receivers', ConfigReceivers, queue_size=5)

        # Fast publishers
        self._pub_external_orientation = rospy.Publisher(
            'waterlinked/external/orientation',
            ExternalOrientation,
            queue_size=5)
        self._pub_position_acoustic_filtered = rospy.Publisher(
            'waterlinked/position/acoustic/filtered',
            PositionAcousticFiltered,
            queue_size=5)
        self._pub_position_acoustic_raw = rospy.Publisher(
            'waterlinked/position/acoustic/raw',
            PositionAcousticRaw,
            queue_size=5)
        self._pub_position_global = rospy.Publisher(
            'waterlinked/position/global', PositionGlobal, queue_size=5)
        self._pub_position_master = rospy.Publisher(
            'waterlinked/position/master', PositionMaster, queue_size=5)

        # Prepare sensor data for the robot_localization package
        self._pub_pos_with_covariance_stamped = rospy.Publisher(
            'waterlinked/pose_with_cov_stamped',
            PoseWithCovarianceStamped,
            queue_size=5)

        # Enter infinite spinning
        rospy.spin()

    def connection_error(self):
        rospy.logerr_throttle(
            10, "{} | Unable to connect to Waterlinked GPS on: {}".format(
                rospy.get_name(), self._base_url))

    def print_urls(self):
        message = 'Waterlinked APIs to be requested (see https://demo.waterlinked.com/swagger/#/):\n'
        message += 'Slow (f = ' + str(self._rate_slow_hz) + ' Hz)\n'
        for url_str in self._urls_slow:
            message += '- ' + url_str + '\n'
        message += 'Fast (f = ' + str(self._rate_fast_hz) + ' Hz)\n'
        for url_str in self._urls_fast:
            message += '- ' + url_str + '\n'
        rospy.loginfo('{} | {}'.format(rospy.get_name(), message))

    def _forward_master_position(self, event):
        """ If an external gps topic is subscribed, forward the latitude and longitude over to waterlinked."""
        try:
            r = self._session.put(self._gps_url, json=self._gps_msg, timeout=2)
            r = r.result(10)
        except Exception as e:
            rospy.logerr_throttle(
                10.0, "{} | {}".format(rospy.get_name(), e.message))
            return
        if r.status_code != 200:
            rospy.logerr("Error setting master position: {} {}".format(
                r.status_code, r.text))

    def _handle_master_gps(self, msg):
        """Fill in GPS information for message"""
        self._gps_msg["lat"] = msg.latitude
        self._gps_msg["lon"] = msg.longitude
        self._gps_msg["hdop"] = msg.position_covariance[0]

    def _handle_master_vel(self, msg):
        """Fill in GPS cog/sog for message"""
        self._gps_msg["sog"] = sqrt(msg.twist.linear.x**2 +
                                    msg.twist.linear.y**2)
        val = -1 * (
            atan2(msg.twist.linear.y, msg.twist.linear.x) * 180.0 / pi - 90)
        val = 360 + val if val < 0 else val
        self._gps_msg["cog"] = val

    def slow_callback(self, event):
        """ Callback function that requests Waterlinked status and config
        settings at a low rate. """
        # Request current time and use it for all messages
        tnow = rospy.Time.now()
        if self._is_first_slow_loop:
            self._is_first_slow_loop = False
            self.f_cum_slow = 0
            self.n_slow = 0
            self._slow_t0 = tnow.to_sec()
        else:
            f = 1 / (tnow.to_sec() - self._slow_t0)
            self.f_cum_slow += f
            self.n_slow += 1
            f_avg = self.f_cum_slow / self.n_slow
            rospy.logdebug("slow loop (n = %d): f_avg = %.3f Hz" %
                           (self.n_slow, f_avg))

        # Initiate HTTP request to all URLs
        future_list = [
            self._session.get(url, timeout=2.0) for url in self._urls_slow
        ]

        try:
            # waterlinked/about
            res_about = future_list[0].result()

            if res_about.ok:
                data = res_about.json()
                msg_about = About()
                msg_about.header.stamp = tnow
                msg_about.chipid = data['chipid']
                msg_about.version = data['version']
                self._pub_about.publish(msg_about)

            # waterlinked/about/status
            res_about_status = future_list[1].result()
            if res_about_status.ok:
                data = res_about_status.json()
                msg_about_status = AboutStatus()
                msg_about_status.header.stamp = tnow
                msg_about_status.gps = data['gps']
                msg_about_status.imu = data['imu']
                self._pub_about_status.publish(msg_about_status)

            # waterlinked/about/temperature
            res_about_temperature = future_list[2].result()
            if res_about_temperature.ok:
                data = res_about_temperature.json()
                msg_about_temperature = AboutTemperature()
                msg_about_temperature.header.stamp = tnow
                msg_about_temperature.board = data['board']
                self._pub_about_temperature.publish(msg_about_temperature)

            # waterlinked/config/generic
            res_config_generic = future_list[3].result()
            if res_config_generic:
                data = res_config_generic.json()
                msg_config_generic = ConfigGeneric()
                msg_config_generic.header.stamp = tnow
                #msg_config_generic.carrier_frequency = data['carrier_frequency']
                msg_config_generic.compass = data['compass'].encode(
                    'ascii', 'ignore')
                msg_config_generic.gps = data['gps'].encode('ascii', 'ignore')
                msg_config_generic.range_max_x = data['range_max_x']
                msg_config_generic.range_max_y = data['range_max_y']
                msg_config_generic.range_max_z = data['range_max_z']
                msg_config_generic.range_min_x = data['range_min_x']
                msg_config_generic.range_min_y = data['range_min_y']
                msg_config_generic.static_lat = data['static_lat']
                msg_config_generic.static_lon = data['static_lon']
                msg_config_generic.static_orientation = data[
                    'static_orientation']
                #msg_config_generic.use_external_depth = data['use_external_depth']
                self._pub_config_generic.publish(msg_config_generic)

            # waterlinked/config/receivers
            res_config_receivers = future_list[4].result()
            if res_config_receivers.ok:
                data = res_config_receivers.json()
                msg_config_receivers = ConfigReceivers()
                msg_config_receivers.header.stamp = tnow
                msg_config_receivers.receivers = []
                for i in range(len(data)):
                    rec = Receiver()
                    rec.id = data[i]['id']
                    rec.x = data[i]['x']
                    rec.y = data[i]['y']
                    rec.z = data[i]['z']
                    msg_config_receivers.receivers.append(rec)
                self._pub_config_receivers.publish(msg_config_receivers)
        except ConnectionError as e:
            self.connection_error()

    def fast_callback(self, event):
        """ Callback function that requests Waterlinked position and orientation
        information at a fast rate. """
        # Request current time and use it for all messages
        tnow = rospy.Time.now()
        if self._is_first_fast_loop:
            self._is_first_fast_loop = False
            self.f_cum_fast = 0
            self.n_fast = 0
            self._fast_t0 = tnow.to_sec()
        else:
            f = 1 / (tnow.to_sec() - self._fast_t0)
            self.f_cum_fast += f
            self.n_fast += 1
            f_avg = self.f_cum_fast / self.n_fast
            rospy.logdebug("fast loop (n = %d): f_avg = %.3f Hz" %
                           (self.n_fast, f_avg))

        # Initiate HTTP request to all URLs
        future_list = [
            self._session.get(url, timeout=2.0) for url in self._urls_fast
        ]

        try:
            # WARN: ORIENTATION IS CLOCKWISE REFERENCED FROM MAGNETIC NORTH
            # /waterlinked/external/orientation
            res_external_orientation = future_list[0].result()
            if res_external_orientation.ok:
                data = res_external_orientation.json()
                msg_external_orientation = ExternalOrientation()
                msg_external_orientation.header.stamp = tnow
                msg_external_orientation.orientation = data['orientation']
                self._pub_external_orientation.publish(
                    msg_external_orientation)

            # /waterlinked/position/acoustic/filtered
            res_position_acoustic_filtered = future_list[1].result()

            # WARN: WATERLINKED POSITION IS LEFT HANDED RFD -> X: RIGHT, y: FORWARDS, Z: DOWN
            # DO NOT USE ACOUSTIC_FILTERED FOR NAVIGATION!
            if res_position_acoustic_filtered.ok:
                data = res_position_acoustic_filtered.json()
                msg_position_acoustic_filtered = PositionAcousticFiltered()
                msg_position_acoustic_filtered.header.stamp = tnow
                msg_position_acoustic_filtered.header.frame_id = self._waterlinked_frame_id
                msg_position_acoustic_filtered.std = data['std']
                msg_position_acoustic_filtered.temp = data['temp']
                msg_position_acoustic_filtered.x = data['x']
                msg_position_acoustic_filtered.y = data['y']
                msg_position_acoustic_filtered.z = data['z']
                if self._pub_position_acoustic_filtered.get_num_connections(
                ) > 0:
                    rospy.logwarn_once(
                        "{} | waterlinked/acoustic_filtered is left-handed RFD, don't use for navigation, "
                        "use waterlinked/pose_with_cov_stamped (FLU) instead.")
                self._pub_position_acoustic_filtered.publish(
                    msg_position_acoustic_filtered)

                # Create message of the type geometry_msgs/PoseWithCovariance
                msg_pose_with_cov_stamped = PoseWithCovarianceStamped()
                var_xyz = pow(data['std'],
                              2)  # calculate variance from standard deviation
                msg_pose_with_cov_stamped.header.stamp = tnow
                msg_pose_with_cov_stamped.header.frame_id = self._waterlinked_frame_id
                msg_pose_with_cov_stamped.pose.pose.position.x = data['y']
                msg_pose_with_cov_stamped.pose.pose.position.y = -data['x']
                msg_pose_with_cov_stamped.pose.pose.position.z = -data['z']
                msg_pose_with_cov_stamped.pose.pose.orientation = Quaternion(
                    0, 0, 0, 1)
                msg_pose_with_cov_stamped.pose.covariance = [
                    var_xyz, 0, 0, 0, 0, 0, 0, var_xyz, 0, 0, 0, 0, 0, 0,
                    var_xyz, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
                    0, 0, 0, 0
                ]
                self._pub_pos_with_covariance_stamped.publish(
                    msg_pose_with_cov_stamped)

            # /waterlinked/position/acoustic/raw
            res_position_acoustic_raw = future_list[2].result()
            if res_position_acoustic_raw.ok:
                data = res_position_acoustic_raw.json()
                msg_position_acoustic_raw = PositionAcousticRaw()
                msg_position_acoustic_raw.header.stamp = tnow
                msg_position_acoustic_raw.header.frame_id = self._waterlinked_frame_id
                msg_position_acoustic_raw.std = data['std']
                msg_position_acoustic_raw.temp = data['temp']
                msg_position_acoustic_raw.x = data['x']
                msg_position_acoustic_raw.y = data['y']
                msg_position_acoustic_raw.z = data['z']
                if self._pub_position_acoustic_raw.get_num_connections() > 0:
                    rospy.logwarn_once(
                        "{} | waterlinked/acoustic_raw is left-handed RFD, don't use for navigation, "
                        "use waterlinked/pose_with_cov_stamped (FLU) instead.")
                self._pub_position_acoustic_raw.publish(
                    msg_position_acoustic_raw)

            # /waterlinked/position/global
            res_position_global = future_list[3].result()
            if res_position_global.ok:
                data = res_position_global.json()
                msg_position_global = PositionGlobal()
                msg_position_global.header.stamp = tnow
                msg_position_global.lat = data['lat']
                msg_position_global.lon = data['lon']
                self._pub_position_global.publish(msg_position_global)

            # /waterlinked/position/master
            res_position_master = future_list[4].result()
            msg_position_master = None
            if res_position_master.ok:
                data = res_position_master.json()
                msg_position_master = PositionMaster()
                msg_position_master.header.stamp = tnow
                msg_position_master.cog = data['cog']
                msg_position_master.hdop = data['hdop']
                msg_position_master.lat = data['lat']
                msg_position_master.lon = data['lon']
                msg_position_master.numsats = data['numsats']
                msg_position_master.orientation = data['orientation']
                msg_position_master.sog = data['sog']
                self._pub_position_master.publish(msg_position_master)

            # CONVENTION: UTM -> WATERLINKED IS DEFINED BY UTM POSITION OF MASTER, ROTATED ACCORDING TO MASTER ORIENTATION
            # CONVENTION: UTM -> MAP IS DEFINED BY UTM POSITION OF MASTER, WITHOUT ANY ROTATION (ALIGNED WITH NORTH)
            # CONVENTION: UTM -> MAP CAN ALSO BE DEFINED BY AN EXTERNAL DATUM [LATITUDE, LONGITUDE]
            if self._send_tf and msg_position_master is not None:
                tf_map = TransformStamped()  # Map transformation
                tf_map.header.stamp = tnow
                tf_map.header.frame_id = self._map_frame_id
                tf_map.child_frame_id = self._waterlinked_frame_id
                # ORIENTATION IS PROVIDED AS NORTH REFERENCED CW
                # NEEDS TO BE CONVERTED TO EAST REFERENCED CCW
                q = Rotation.from_euler(
                    'xyz', [0, 0, 90 - msg_position_master.orientation],
                    degrees=True).as_quat()
                tf_map.transform.rotation = Quaternion(*q)
                self._tf_bcast.sendTransform(tf_map)
        except ConnectionError as e:
            self.connection_error()
Example #22
0
class TFManager(object):
    """
    Listens to datum GPS or service and broadcasts:
        1. ECEF TO UTM
        2. UTM TO MAP
        3. MAP TO ODOM (fixed or updated by a pose with covariance stamped topic).
    """
    def __init__(self):
        self._datum = rospy.get_param("~datum", None)
        if self._datum is not None and len(self._datum) < 3:
            self._datum += [0.0]
        self._use_datum = self._datum is not None
        self._earth_frame_id = rospy.get_param("~earth_frame_id", "earth")
        self._utm_frame_id = rospy.get_param("~utm_frame_id", "utm")
        self._map_frame_id = rospy.get_param("~map_frame_id", "map")
        self._odom_frame_id = rospy.get_param("~odom_frame_id", "odom")
        self._body_frame_id = rospy.get_param("~base_frame_id",
                                              "base_link")  # currently unused
        self._tf_broadcast_rate = rospy.get_param("~broadcast_rate", 10.0)
        self._serv = rospy.Service("~set_datum", SetDatum, self._set_datum)
        self._tf_buff = Buffer()
        TransformListener(self._tf_buff)
        self._tf_bcast = TransformBroadcaster()
        self._last_datum = None
        self._static_map_odom = rospy.get_param("~static_map_odom", False)
        self._odom_updated = False
        self._update_odom_serv = rospy.Service("~set_odom", Trigger,
                                               self._handle_set_odom)
        if not self._use_datum:
            rospy.Subscriber("gps_datum", NavSatFix, self._handle_datum)
        if not self._static_map_odom:
            rospy.Subscriber("waterlinked/pose_with_cov_stamped",
                             PoseWithCovarianceStamped, self._handle_odom_tf)
        else:
            StaticTransformBroadcaster().sendTransform(
                TransformStamped(
                    Header(0, rospy.Time.now(), self._map_frame_id),
                    self._odom_frame_id, None, Quaternion(0, 0, 0, 1)))
        self._map_odom_tf = None
        rospy.Timer(rospy.Duration.from_sec(1.0 / self._tf_broadcast_rate),
                    self._send_tf)

    def _handle_set_odom(self, req):
        self._odom_updated = False
        res = TriggerResponse(True, "map -> odom tf set.")
        return res

    def _handle_odom_tf(self, msg):
        # Given the pose of the vehicle in waterlinked frame, output the map -> odom tf transformation
        if not self._odom_updated:
            point = PointStamped(
                Header(0, rospy.Time.from_sec(0.0), msg.header.frame_id),
                msg.pose.pose.position)
            try:
                point_map = self._tf_buff.transform(point, self._map_frame_id)
            except Exception as e:
                rospy.logerr_throttle(
                    5, "{} | {}".format(rospy.get_name(), e.message))
                return
            # Odom is always at same depth as map
            self._map_odom_tf = TransformStamped(
                Header(0, rospy.Time.now(), self._map_frame_id),
                self._odom_frame_id,
                Transform(Vector3(point_map.point.x, point_map.point.y, 0),
                          Quaternion(0, 0, 0, 1)))
            self._odom_updated = True

    def _set_datum(self, req):
        self._datum = [
            req.geo_pose.position.latitude, req.geo_pose.position.longitude,
            req.geo_pose.position.altitude
        ]
        return

    def _get_coords(self, latitude, longitude):
        # Get ECEF translation to UTM and Rotation to UTM from latitude and longitude (assuming 0.0 altitude)
        x, y, z = geodetic_helpers.utm_origin_ecef(
            *geodetic_helpers.lla_to_ecef(latitude, longitude))
        q = geodetic_helpers.latlong_ecef_enu_rotation(latitude, longitude)
        # Get UTM translation to ENU and rotation to ENU from latitude and longitude (assuming 0.0 altitude)
        utm_current = geodetic_helpers.lla_to_utm(latitude, longitude)
        Y = geodetic_helpers.grid_convergence(latitude,
                                              longitude,
                                              radians=False)
        Y = 0  # The grid convergence seems to be already accounted for?
        enu_rotation = Rotation.from_euler('xyz', [0.0, 0.0, -Y],
                                           degrees=True).as_quat().tolist()
        return (x, y, z) + tuple(q), utm_current[:3] + tuple(enu_rotation)

    def _get_tfs(self, data):
        if type(data) is NavSatFix:
            earth, utm = self._get_coords(data.latitude, data.longitude)
            header_earth = Header(data.header.seq, data.header.stamp,
                                  self._earth_frame_id)
            header_utm = Header(data.header.seq, data.header.stamp,
                                self._utm_frame_id)
        else:
            earth, utm = self._get_coords(self._datum[0], self._datum[1])
            header_earth = Header(0, rospy.Time.now(), self._earth_frame_id)
            header_utm = Header(0, rospy.Time.now(), self._utm_frame_id)
        # Broadcast datum's latitude and longitude
        # Map from ECEF Frame
        earth_to_utm = TransformStamped(
            header_earth, self._utm_frame_id,
            Transform(Vector3(*earth[:3]), Quaternion(*earth[3:])))
        # UTM from ECEF Frame
        utm_to_map = TransformStamped(
            header_utm, self._map_frame_id,
            Transform(Vector3(*utm[:3]), Quaternion(*utm[3:])))
        return earth_to_utm, utm_to_map

    def _handle_datum(self, msg):
        self._last_datum = msg

    def _send_tf(self, event):
        if self._use_datum:
            tf_utm, tf_map = self._get_tfs(None)
        elif self._last_datum is not None:
            tf_utm, tf_map = self._get_tfs(self._last_datum)
        else:
            return
        self._tf_bcast.sendTransform(tf_utm)
        self._tf_bcast.sendTransform(tf_map)
        if self._map_odom_tf is not None:
            tf = self._map_odom_tf
            tf.header.stamp = rospy.Time.now()
            self._tf_bcast.sendTransform(tf)
class Odometry_Encoders(Node):
    #############################################################################

    #############################################################################
    def __init__(self):
        #############################################################################
        super().__init__("odometry_encoders")

        self.nodename = self.get_name()
        self.get_logger().info("%s started" % self.nodename)

        #### parameters #######
        self.radius = float(
            self.declare_parameter(
                'wheels.radius', 0.02569).value)  # The wheel radius in meters
        self.base_width = float(
            self.declare_parameter(
                'wheels.base_width',
                0.1275).value)  # The wheel base width in meters

        # the name of the base frame of the robot
        self.base_frame_id = self.declare_parameter('base_frame_id',
                                                    'base_link').value
        # the name of the odometry reference frame
        self.odom_frame_id = self.declare_parameter('odom_frame_id',
                                                    'odom').value

        self.ticks_mode = self.declare_parameter('ticks.ticks_mode',
                                                 False).value
        # The number of wheel encoder ticks per meter of travel
        self.ticks_meter = float(
            self.declare_parameter('ticks.ticks_meter', 50.0).value)
        self.encoder_min = self.declare_parameter('ticks.encoder_min',
                                                  -32768).value
        self.encoder_max = self.declare_parameter('ticks.encoder_max',
                                                  32768).value

        # Init variables
        self.init()

        # subscriptions / publishers
        self.create_subscription(Wheels, "robot/enc_wheels",
                                 self.wheelsCallback,
                                 qos_profile_system_default)
        self.create_subscription(Wheels, "robot/enc_ticks_wheels",
                                 self.wheelsEncCallback,
                                 qos_profile_system_default)
        self.cal_vel_pub = self.create_publisher(Twist, "cal_vel",
                                                 qos_profile_system_default)
        self.odom_pub = self.create_publisher(Odometry, "odom",
                                              qos_profile_system_default)

        # 5 seconds timer to update parameters
        self.create_timer(5, self.parametersCallback)

        # TF2 Broadcaster
        self.odomBroadcaster = TransformBroadcaster(self)

    #############################################################################
    def init(self):
        #############################################################################

        # Internal data
        self.enc_left = None  # wheel encoder readings
        self.enc_right = None
        self.lvel = 0.0
        self.rvel = 0.0
        self.lmult = 0.0
        self.rmult = 0.0
        self.prev_lencoder = 0.0
        self.prev_rencoder = 0.0
        self.encoder_low_wrap = (self.encoder_max -
                                 self.encoder_min) * 0.3 + self.encoder_min
        self.encoder_high_wrap = (self.encoder_max -
                                  self.encoder_min) * 0.7 + self.encoder_min

        # Actual values coming back from robot
        self.left = 0.0
        self.right = 0.0

        # Position in xy plane
        self.x = 0.0
        self.y = 0.0
        self.th = 0.0
        # Linear and angular velocities
        self.linear_accumulator = {
            "sum": 0.0,
            "buffer": np.zeros(10),
            "next_insert": 0,
            "buffer_filled": False
        }
        self.angular_accumulator = {
            "sum": 0.0,
            "buffer": np.zeros(10),
            "next_insert": 0,
            "buffer_filled": False
        }
        self.dt_accumulator = {
            "sum": 0.0,
            "buffer": np.zeros(5),
            "next_insert": 0,
            "buffer_filled": False
        }

        self.then = self.get_clock().now()

    #############################################################################
    def update(self):
        #############################################################################
        now = self.get_clock().now()  # Current time

        # Elapsed time [nanoseconds]
        elapsed = now.nanoseconds - self.then.nanoseconds
        elapsed = float(elapsed) / 1e9  # Elapsed time [seconds]
        self.then = now  # Update previous time
        self.dt_accumulator = self.accumulateMean(self.dt_accumulator, elapsed)

        self.calculateOdometry(self.getRollingMean(
            self.dt_accumulator))  # Calculate Odometry
        self.publishCalVel(self.getRollingMean(
            self.dt_accumulator))  # Publish Calculated Velocities
        self.publishOdometry(now)  # Publish Odometry

    #############################################################################
    def calculateOdometry(self, elapsed):
        #############################################################################

        # calculate odometry
        if self.ticks_mode:
            if self.enc_left == None:
                d_left = 0
                d_right = 0
            else:
                d_left = (self.left - self.enc_left) / self.ticks_meter
                d_right = (self.right - self.enc_right) / self.ticks_meter
            self.enc_left = self.left
            self.enc_right = self.right
        else:
            d_left = self.left * self.radius
            d_right = self.right * self.radius

        # Linear velocity
        d = (d_left + d_right) / 2
        # Angular velocity
        th = (d_right - d_left) / (2 * self.base_width)
        # calculate velocities
        dx = d / elapsed
        dr = th / elapsed

        self.linear_accumulator = self.accumulateMean(self.linear_accumulator,
                                                      dx)
        self.angular_accumulator = self.accumulateMean(
            self.angular_accumulator, dr)

        # Accumulate

        # Calculate distance traveled in x and y
        x = cos(th) * d
        y = -sin(th) * d

        # Calculate the final position of the robot
        self.x = self.x + (cos(self.th) * x - sin(self.th) * y)
        self.y = self.y + (sin(self.th) * x + cos(self.th) * y)
        self.th = self.th + th

    #############################################################################
    def accumulateMean(self, dict, val):
        #############################################################################
        dict["sum"] = dict["sum"] - dict["buffer"][dict["next_insert"]]
        dict["sum"] = dict["sum"] + val
        dict["buffer"][dict["next_insert"]] = val
        dict["next_insert"] = dict["next_insert"] + 1
        dict["buffer_filled"] = dict["buffer_filled"] or (
            dict["next_insert"] >= len(dict["buffer"]))
        dict["next_insert"] = dict["next_insert"] % len(dict["buffer"])
        return dict

    #############################################################################
    def getRollingMean(self, dict):
        #############################################################################
        valid_data_count = dict["buffer_filled"] * len(
            dict["buffer"]) + (not dict["buffer_filled"]) * dict["next_insert"]
        return float(dict["sum"] / valid_data_count)

    #############################################################################
    def publishOdometry(self, now):
        #############################################################################

        # publish the odom information
        quaternion = Quaternion()

        quaternion.x = 0.0
        quaternion.y = 0.0
        quaternion.z = sin(self.th / 2)
        quaternion.w = cos(self.th / 2)

        # TF Broadcaster

        tfOdometry = TransformStamped()
        tfOdometry.header.stamp = now.to_msg()
        tfOdometry.header.frame_id = self.odom_frame_id
        tfOdometry.child_frame_id = self.base_frame_id

        tfOdometry.transform.translation.x = self.x
        tfOdometry.transform.translation.y = self.y
        tfOdometry.transform.translation.z = 0.0

        tfOdometry.transform.rotation.x = quaternion.x
        tfOdometry.transform.rotation.y = quaternion.y
        tfOdometry.transform.rotation.z = quaternion.z
        tfOdometry.transform.rotation.w = quaternion.w

        self.odomBroadcaster.sendTransform(tfOdometry)

        # Odometry

        odom = Odometry()
        odom.header.stamp = now.to_msg()

        odom.header.frame_id = self.odom_frame_id
        odom.pose.pose.position.x = self.x
        odom.pose.pose.position.y = self.y
        odom.pose.pose.position.z = 0.0
        odom.pose.pose.orientation = quaternion

        odom.child_frame_id = self.base_frame_id
        odom.twist.twist.linear.x = self.getRollingMean(
            self.linear_accumulator)
        odom.twist.twist.linear.y = 0.0
        odom.twist.twist.angular.z = self.getRollingMean(
            self.angular_accumulator)

        self.odom_pub.publish(odom)

    #############################################################################
    def publishCalVel(self, elapsed):
        #############################################################################

        cal_vel = Twist()
        cal_vel.linear.x = self.getRollingMean(self.linear_accumulator)
        cal_vel.angular.z = self.getRollingMean(self.angular_accumulator)
        cal_vel.linear.z = elapsed

        self.cal_vel_pub.publish(cal_vel)

    #############################################################################
    def wheelsEncCallback(self, msg):
        #############################################################################

        # Right Wheel Encoder
        encRight = msg.param[0]

        if (encRight < self.encoder_low_wrap
                and self.prev_rencoder > self.encoder_high_wrap):
            self.rmult = self.rmult + 1

        if (encRight > self.encoder_high_wrap
                and self.prev_rencoder < self.encoder_low_wrap):
            self.rmult = self.rmult - 1

        self.right = 1.0 * (encRight + self.rmult *
                            (self.encoder_max - self.encoder_min))
        self.prev_rencoder = encRight

        # Left Wheel Encoder
        encLeft = msg.param[1]

        if (encLeft < self.encoder_low_wrap
                and self.prev_lencoder > self.encoder_high_wrap):
            self.lmult = self.lmult + 1

        if (encLeft > self.encoder_high_wrap
                and self.prev_lencoder < self.encoder_low_wrap):
            self.lmult = self.lmult - 1

        self.left = 1.0 * (encLeft + self.lmult *
                           (self.encoder_max - self.encoder_min))
        self.prev_lencoder = encLeft

        if self.ticks_mode:
            self.update()

    #############################################################################
    def wheelsCallback(self, msg):
        #############################################################################
        self.right = msg.param[0]
        self.left = msg.param[1]

        if not self.ticks_mode:
            self.update()

    #############################################################################
    def parametersCallback(self):
        #############################################################################
        # The wheel radius in meters
        self.radius = float(self.get_parameter('wheels.radius').value)
        # The wheel base width in meters
        self.base_width = float(self.get_parameter('wheels.base_width').value)

        # the name of the base frame of the robot
        self.base_frame_id = self.get_parameter('base_frame_id').value
        # the name of the odometry reference frame
        self.odom_frame_id = self.get_parameter('odom_frame_id').value

        self.ticks_mode = self.get_parameter('ticks.ticks_mode').value
        # The number of wheel encoder ticks per meter of travel
        self.ticks_meter = float(self.get_parameter('ticks.ticks_meter').value)
        self.encoder_min = self.get_parameter('ticks.encoder_min').value
        self.encoder_max = self.get_parameter('ticks.encoder_max').value

        self.encoder_low_wrap = (self.encoder_max -
                                 self.encoder_min) * 0.3 + self.encoder_min
        self.encoder_high_wrap = (self.encoder_max -
                                  self.encoder_min) * 0.7 + self.encoder_min
Example #24
0
class EPuck2Controller(WebotsNode):
    def __init__(self, name, args=None):
        super().__init__(name)
        self.robot = Robot()

        # Parameters
        wheel_distance_param = self.declare_parameter("wheel_distance", 0.0552)
        wheel_radius_param = self.declare_parameter("wheel_radius", 0.021)
        self.timestep = self.declare_parameter("timestep", 64)
        self.wheel_radius = wheel_radius_param.value
        self.wheel_distance = wheel_distance_param.value
        self.set_parameters_callback(self.on_param_changed)

        # Init motors
        self.left_motor = self.robot.getMotor('left wheel motor')
        self.right_motor = self.robot.getMotor('right wheel motor')
        self.left_motor.setPosition(float('inf'))
        self.right_motor.setPosition(float('inf'))
        self.left_motor.setVelocity(0)
        self.right_motor.setVelocity(0)
        self.create_subscription(Twist, '/cmd_vel', self.cmd_vel_callback, 1)
        self.get_logger().info('EPuck Initialized')

        # Initialize odometry
        self.reset_odometry()
        self.left_wheel_sensor = self.robot.getPositionSensor(
            'left wheel sensor')
        self.right_wheel_sensor = self.robot.getPositionSensor(
            'right wheel sensor')
        self.left_wheel_sensor.enable(self.timestep.value)
        self.right_wheel_sensor.enable(self.timestep.value)
        self.odometry_publisher = self.create_publisher(Odometry, '/odom', 1)

        # Intialize distance sensors
        self.sensor_publishers = {}
        self.sensors = {}
        for i in range(8):
            sensor = self.robot.getDistanceSensor('ps{}'.format(i))
            sensor.enable(self.timestep.value)
            sensor_publisher = self.create_publisher(
                Range, '/distance/ps{}'.format(i), 10)
            self.sensors['ps{}'.format(i)] = sensor
            self.sensor_publishers['ps{}'.format(i)] = sensor_publisher

        sensor = self.robot.getDistanceSensor('tof')
        sensor.enable(self.timestep.value)
        sensor_publisher = self.create_publisher(Range, '/distance/tof', 1)
        self.sensors['tof'] = sensor
        self.sensor_publishers['tof'] = sensor_publisher

        self.laser_publisher = self.create_publisher(LaserScan, '/scan', 1)

        # Steps...
        self.create_timer(self.timestep.value / 1000, self.step_callback)

        # Transforms
        self.tf_broadcaster = TransformBroadcaster(self)

        self.tf_laser_scanner = TransformStamped()
        self.tf_laser_scanner.header.frame_id = 'base_footprint'
        self.tf_laser_scanner.child_frame_id = 'laser_scanner'
        self.tf_laser_scanner.transform.translation.x = 0.0
        self.tf_laser_scanner.transform.translation.y = 0.0
        self.tf_laser_scanner.transform.translation.z = 0.0
        self.tf_laser_scanner.transform.rotation = euler_to_quaternion(0, 0, 0)

    def reset_odometry(self):
        self.prev_left_wheel_ticks = 0
        self.prev_right_wheel_ticks = 0
        self.prev_position = (0.0, 0.0)
        self.prev_angle = 0.0

    def on_param_changed(self, params):
        result = SetParametersResult()
        result.successful = True

        for param in params:
            if param.name == "wheel_radius":
                self.reset_odometry()
                self.wheel_radius = param.value
            elif param.name == "wheel_distance":
                self.reset_odometry()
                self.wheel_distance = param.value

        return result

    def step_callback(self):
        self.robot.step(self.timestep.value)

        epoch = time.time()
        stamp = Time()
        stamp.sec = int(epoch)
        stamp.nanosec = int((epoch - int(epoch)) * 1E9)

        self.odometry_callback(stamp)
        self.distance_callback(stamp)
        self.publish_static_transforms(stamp)

    def publish_static_transforms(self, stamp):
        # Pack & publish transforms
        self.tf_laser_scanner.header.stamp = stamp
        self.tf_broadcaster.sendTransform(self.tf_laser_scanner)

    def cmd_vel_callback(self, twist):
        self.get_logger().info('Twist message received')
        left_velocity = (2.0 * twist.linear.x - twist.angular.z *
                         self.wheel_distance) / (2.0 * self.wheel_radius)
        right_velocity = (2.0 * twist.linear.x + twist.angular.z *
                          self.wheel_distance) / (2.0 * self.wheel_radius)
        self.left_motor.setVelocity(left_velocity)
        self.right_motor.setVelocity(right_velocity)

    def odometry_callback(self, stamp):
        encoder_period_s = self.timestep.value / 1000.0
        left_wheel_ticks = self.left_wheel_sensor.getValue()
        right_wheel_ticks = self.right_wheel_sensor.getValue()

        # Calculate velocities
        v_left_rad = (left_wheel_ticks -
                      self.prev_left_wheel_ticks) / encoder_period_s
        v_right_rad = (right_wheel_ticks -
                       self.prev_right_wheel_ticks) / encoder_period_s
        v_left = v_left_rad * self.wheel_radius
        v_right = v_right_rad * self.wheel_radius
        v = (v_left + v_right) / 2
        omega = (v_right - v_left) / self.wheel_distance

        # Calculate position & angle
        # Fourth order Runge - Kutta
        # Reference: https://www.cs.cmu.edu/~16311/s07/labs/NXTLabs/Lab%203.html
        k00 = v * cos(self.prev_angle)
        k01 = v * sin(self.prev_angle)
        k02 = omega
        k10 = v * cos(self.prev_angle + encoder_period_s * k02 / 2)
        k11 = v * sin(self.prev_angle + encoder_period_s * k02 / 2)
        k12 = omega
        k20 = v * cos(self.prev_angle + encoder_period_s * k12 / 2)
        k21 = v * sin(self.prev_angle + encoder_period_s * k12 / 2)
        k22 = omega
        k30 = v * cos(self.prev_angle + encoder_period_s * k22 / 2)
        k31 = v * sin(self.prev_angle + encoder_period_s * k22 / 2)
        k32 = omega
        position = [
            self.prev_position[0] + (encoder_period_s / 6) *
            (k00 + 2 * (k10 + k20) + k30), self.prev_position[1] +
            (encoder_period_s / 6) * (k01 + 2 * (k11 + k21) + k31)
        ]
        angle = self.prev_angle + \
            (encoder_period_s / 6) * (k02 + 2 * (k12 + k22) + k32)

        # Update variables
        self.prev_position = position.copy()
        self.prev_angle = angle
        self.prev_left_wheel_ticks = left_wheel_ticks
        self.prev_right_wheel_ticks = right_wheel_ticks

        # Pack & publish odometry
        msg = Odometry()
        msg.header.stamp = stamp
        msg.header.frame_id = 'odom'
        msg.child_frame_id = 'base_footprint'
        msg.twist.twist.linear.x = v
        msg.twist.twist.linear.z = omega
        msg.pose.pose.position.x = position[0]
        msg.pose.pose.position.y = position[1]
        msg.pose.pose.orientation = euler_to_quaternion(0, 0, angle)
        self.odometry_publisher.publish(msg)

        # Pack & publish transforms
        tf = TransformStamped()
        tf.header.stamp = stamp
        tf.header.frame_id = 'odom'
        tf.child_frame_id = 'base_footprint'
        tf.transform.translation.x = position[0]
        tf.transform.translation.y = position[1]
        tf.transform.translation.z = 0.0
        tf.transform.rotation = euler_to_quaternion(0, 0, angle)
        self.tf_broadcaster.sendTransform(tf)

    def distance_callback(self, stamp):
        distance_from_center = 0.035

        for key in self.sensors:
            msg = Range()
            msg.field_of_view = self.sensors[key].getAperture()
            msg.min_range = intensity_to_distance(
                self.sensors[key].getMaxValue() - 8.2) + distance_from_center
            msg.max_range = intensity_to_distance(
                self.sensors[key].getMinValue() + 3.3) + distance_from_center
            msg.range = intensity_to_distance(self.sensors[key].getValue())
            msg.radiation_type = Range.INFRARED
            self.sensor_publishers[key].publish(msg)

        # Max range of ToF sensor is 2m so we put it as maximum laser range.
        # Therefore, for all invalid ranges we put 0 so it get deleted by rviz

        msg = LaserScan()
        msg.header.frame_id = 'laser_scanner'
        msg.header.stamp = stamp
        msg.angle_min = 0.0
        msg.angle_max = 2 * pi
        msg.angle_increment = 15 * pi / 180.0
        msg.scan_time = self.timestep.value / 1000
        msg.range_min = intensity_to_distance(
            self.sensors['ps0'].getMaxValue() - 20) + distance_from_center
        msg.range_max = 1.0 + distance_from_center
        msg.ranges = [
            self.sensors['tof'].getValue(),  # 0
            intensity_to_distance(self.sensors['ps7'].getValue()),  # 15
            0.0,  # 30
            intensity_to_distance(self.sensors['ps6'].getValue()),  # 45
            0.0,  # 60
            0.0,  # 75
            intensity_to_distance(self.sensors['ps5'].getValue()),  # 90
            0.0,  # 105
            0.0,  # 120
            0.0,  # 135
            intensity_to_distance(self.sensors['ps4'].getValue()),  # 150
            0.0,  # 165
            0.0,  # 180
            0.0,  # 195
            intensity_to_distance(self.sensors['ps3'].getValue()),  # 210
            0.0,  # 225
            0.0,  # 240
            0.0,  # 255
            intensity_to_distance(self.sensors['ps2'].getValue()),  # 270
            0.0,  # 285
            0.0,  # 300
            intensity_to_distance(self.sensors['ps1'].getValue()),  # 315
            0.0,  # 330
            intensity_to_distance(self.sensors['ps0'].getValue()),  # 345
            self.sensors['tof'].getValue(),  # 0
        ]
        for i in range(len(msg.ranges)):
            if msg.ranges[i] != 0:
                msg.ranges[i] += distance_from_center

        self.laser_publisher.publish(msg)
Example #25
0
def callback(msg):
    br = TransformBroadcaster()
    br.sendTransform(msg)
class WebotsDifferentialDriveNode(WebotsNode):
    """
    Extends WebotsNode to allow easy integration with differential drive robots.

    Args:
        name (WebotsNode): Webots Robot node.
        args (dict): Arguments passed to ROS2 base node.
        wheel_distance (float): Distance between two wheels (axle length) in meters.
        wheel_radius (float): Radius of both wheels in meters.
        left_joint (str): Name of motor associated with left wheel.
        right_joint (str): Name of motor associated with right wheel.
        left_encoder (str): Name of encoder associated with left wheel.
        right_encoder (str): Name of encoder associated with right wheel.
        command_topic (str): Name of topic to which
            [`geometry_msgs/Twist`](https://github.com/ros2/common_interfaces/blob/master/geometry_msgs/msg/Twist.msg)
            the node is subscribed to.
        odometry_topic (str): Name of topic to which
            [`nav_msgs/Odometry`](https://github.com/ros2/common_interfaces/blob/master/nav_msgs/msg/Odometry.msg)
            messages are published.
        odometry_frame (str): Name of odometry frame.
        robot_base_frame (str): Name of robot's base link.
    """
    def __init__(self,
                 name,
                 args,
                 wheel_distance=0,
                 wheel_radius=0,
                 left_joint='left wheel motor',
                 right_joint='right wheel motor',
                 left_encoder='left wheel sensor',
                 right_encoder='right wheel sensor',
                 command_topic='/cmd_vel',
                 odometry_topic='/odom',
                 odometry_frame='odom',
                 robot_base_frame='base_link'):
        super().__init__(name, args)

        # Parametrise
        wheel_distance_param = self.declare_parameter('wheel_distance',
                                                      wheel_distance)
        wheel_radius_param = self.declare_parameter('wheel_radius',
                                                    wheel_radius)
        left_joint_param = self.declare_parameter('left_joint', left_joint)
        right_joint_param = self.declare_parameter('right_joint', right_joint)
        left_encoder_param = self.declare_parameter('left_encoder',
                                                    left_encoder)
        right_encoder_param = self.declare_parameter('right_encoder',
                                                     right_encoder)
        command_topic_param = self.declare_parameter('command_topic',
                                                     command_topic)
        odometry_topic_param = self.declare_parameter('odometry_topic',
                                                      odometry_topic)
        odometry_frame_param = self.declare_parameter('odometry_frame',
                                                      odometry_frame)
        robot_base_frame_param = self.declare_parameter(
            'robot_base_frame', robot_base_frame)
        self._wheel_radius = wheel_radius_param.value
        self._wheel_distance = wheel_distance_param.value
        self.set_parameters_callback(self._on_param_changed)
        if self._wheel_radius == 0 or self._wheel_distance == 0:
            self.get_logger().error(
                'Parameters `wheel_distance` and `wheel_radius` have to greater than 0'
            )
            self.destroy_node()
            sys.exit(1)
        self.get_logger().info(
            f'Initializing differential drive node with wheel_distance = {self._wheel_distance} and '
            + f'wheel_radius = {self._wheel_radius}')

        # Store config
        self._odometry_frame = odometry_frame_param.value
        self._robot_base_frame = robot_base_frame_param.value

        # Initialize motors
        self.left_motor = self.robot.getMotor(left_joint_param.value)
        self.right_motor = self.robot.getMotor(right_joint_param.value)
        self.left_motor.setPosition(float('inf'))
        self.right_motor.setPosition(float('inf'))
        self.left_motor.setVelocity(0)
        self.right_motor.setVelocity(0)
        self.create_subscription(Twist, command_topic_param.value,
                                 self._cmd_vel_callback, 1)

        # Initialize odometry
        self.reset_odometry()
        self.left_wheel_sensor = self.robot.getPositionSensor(
            left_encoder_param.value)
        self.right_wheel_sensor = self.robot.getPositionSensor(
            right_encoder_param.value)
        self.left_wheel_sensor.enable(self.timestep)
        self.right_wheel_sensor.enable(self.timestep)
        self._odometry_publisher = self.create_publisher(
            Odometry, odometry_topic_param.value, 1)
        self._tf_broadcaster = TransformBroadcaster(self)

        # Initialize timer
        self._last_odometry_sample_time = self.robot.getTime()

    def _cmd_vel_callback(self, twist):
        self.get_logger().info('Message received')
        right_velocity = twist.linear.x + self._wheel_distance * twist.angular.z / 2
        left_velocity = twist.linear.x - self._wheel_distance * twist.angular.z / 2
        left_omega = left_velocity / (self._wheel_radius)
        right_omega = right_velocity / (self._wheel_radius)
        self.left_motor.setVelocity(left_omega)
        self.right_motor.setVelocity(right_omega)

    def step(self, ms):
        super().step(ms)

        stamp = Time(seconds=self.robot.getTime()).to_msg()

        time_diff_s = self.robot.getTime() - self._last_odometry_sample_time
        left_wheel_ticks = self.left_wheel_sensor.getValue()
        right_wheel_ticks = self.right_wheel_sensor.getValue()

        if time_diff_s == 0.0:
            return

        # Calculate velocities
        v_left_rad = (left_wheel_ticks -
                      self._prev_left_wheel_ticks) / time_diff_s
        v_right_rad = (right_wheel_ticks -
                       self._prev_right_wheel_ticks) / time_diff_s
        v_left = v_left_rad * self._wheel_radius
        v_right = v_right_rad * self._wheel_radius
        v = (v_left + v_right) / 2
        omega = (v_right - v_left) / self._wheel_distance

        # Calculate position & angle
        # Fourth order Runge - Kutta
        # Reference: https://www.cs.cmu.edu/~16311/s07/labs/NXTLabs/Lab%203.html
        k00 = v * cos(self._prev_angle)
        k01 = v * sin(self._prev_angle)
        k02 = omega
        k10 = v * cos(self._prev_angle + time_diff_s * k02 / 2)
        k11 = v * sin(self._prev_angle + time_diff_s * k02 / 2)
        k12 = omega
        k20 = v * cos(self._prev_angle + time_diff_s * k12 / 2)
        k21 = v * sin(self._prev_angle + time_diff_s * k12 / 2)
        k22 = omega
        k30 = v * cos(self._prev_angle + time_diff_s * k22 / 2)
        k31 = v * sin(self._prev_angle + time_diff_s * k22 / 2)
        k32 = omega
        position = [
            self._prev_position[0] + (time_diff_s / 6) *
            (k00 + 2 * (k10 + k20) + k30), self._prev_position[1] +
            (time_diff_s / 6) * (k01 + 2 * (k11 + k21) + k31)
        ]
        angle = self._prev_angle + \
            (time_diff_s / 6) * (k02 + 2 * (k12 + k22) + k32)

        # Update variables
        self._prev_position = position.copy()
        self._prev_angle = angle
        self._prev_left_wheel_ticks = left_wheel_ticks
        self._prev_right_wheel_ticks = right_wheel_ticks
        self._last_odometry_sample_time = self.robot.getTime()

        # Pack & publish odometry
        msg = Odometry()
        msg.header.stamp = stamp
        msg.header.frame_id = self._odometry_frame
        msg.child_frame_id = self._robot_base_frame
        msg.twist.twist.linear.x = v
        msg.twist.twist.angular.z = omega
        msg.pose.pose.position.x = position[0]
        msg.pose.pose.position.y = position[1]
        msg.pose.pose.orientation.z = sin(angle / 2)
        msg.pose.pose.orientation.w = cos(angle / 2)
        self._odometry_publisher.publish(msg)

        # Pack & publish transforms
        tf = TransformStamped()
        tf.header.stamp = stamp
        tf.header.frame_id = self._odometry_frame
        tf.child_frame_id = self._robot_base_frame
        tf.transform.translation.x = position[0]
        tf.transform.translation.y = position[1]
        tf.transform.translation.z = 0.0
        tf.transform.rotation.z = sin(angle / 2)
        tf.transform.rotation.w = cos(angle / 2)
        self._tf_broadcaster.sendTransform(tf)

    def _on_param_changed(self, params):
        result = SetParametersResult()
        result.successful = True

        for param in params:
            if param.name == "wheel_radius":
                self.reset_odometry()
                self._wheel_radius = param.value
            elif param.name == "wheel_distance":
                self.reset_odometry()
                self._wheel_distance = param.value

        return result

    def reset_odometry(self):
        self._prev_left_wheel_ticks = 0
        self._prev_right_wheel_ticks = 0
        self._prev_position = (0.0, 0.0)
        self._prev_angle = 0.0
Example #27
0
class StatePublisher(Node):
    def __init__(self):
        rclpy.init()
        super().__init__("state_publisher")

        qos_profile = QoSProfile(depth=10)
        self.joint_pub = self.create_publisher(JointState, "joint_states", qos_profile)
        self.broadcaster = TransformBroadcaster(self, qos=qos_profile)
        self.nodeName = self.get_name()
        self.get_logger().info("{0} started".format(self.nodeName))

        degree = pi / 180.0
        loop_rate = self.create_rate(30)

        # robot state
        tilt = 0.0
        tinc = degree
        swivel = 0.0
        angle = 0.0
        height = 0.0
        hinc = 0.005

        # message declarations
        odom_trans = TransformStamped()
        odom_trans.header.frame_id = "odom"
        odom_trans.child_frame_id = "base_link"
        joint_state = JointState()

        joint_names = [
            "joint_tracks_gantry",
            "joint_gantry_crossSlide",
            "joint_cross_slide_z_axis",
            "joint_gantry_crossSlide2",
        ]
        default_position = [0.0, 0.0, 0.0, 0.0]

        try:
            while rclpy.ok():
                rclpy.spin_once(self)

                # update joint_state
                now = self.get_clock().now()
                joint_state.header.stamp = now.to_msg()
                joint_state.name = joint_names
                joint_state.position = default_position

                """
                # update transform
                # (moving in a circle with radius=2)
                odom_trans.header.stamp = now.to_msg()
                odom_trans.transform.translation.x = cos(angle) * 2
                odom_trans.transform.translation.y = sin(angle) * 2
                odom_trans.transform.translation.z = 0.7
                odom_trans.transform.rotation = euler_to_quaternion(
                    0, 0, angle + pi / 2
                )  # roll,pitch,yaw
                """
                odom_trans.header.stamp = now.to_msg()
                odom_trans.transform.translation.x = 0.0
                odom_trans.transform.translation.y = 0.0
                odom_trans.transform.translation.z = 0.0
                odom_trans.transform.rotation = euler_to_quaternion(0, 0, 0)

                # send the joint state and transform
                self.joint_pub.publish(joint_state)
                self.broadcaster.sendTransform(odom_trans)

                # Create new robot state
                """
                tilt += tinc
                if tilt < -0.5 or tilt > 0.0:
                    tinc *= -1
                height += hinc
                if height > 0.2 or height < 0.0:
                    hinc *= -1
                swivel += degree
                angle += degree / 4
                """
                # This will adjust as needed per iteration
                loop_rate.sleep()

        except KeyboardInterrupt:
            pass
Example #28
0
class Odometry_pub(Node):
    def __init__(self):
        super().__init__('odom_pub')

        # Create Subscriber
        self.cmd_vel_subscriber = self.create_subscription(
            Twist, 'cmd_vel', self.cmdVel_callback, 1)

        # Create Lidar subscriber
        self.x = 0.0
        self.y = 0.0
        self.th = 0.0
        self.vx = 0.0
        self.vy = 0.0
        self.vth = 0.0
        self.time_step = 0.02
        self.odom_pub = self.create_publisher(Odometry, "odom", 1)
        self.odom_timer = self.create_timer(self.time_step, self.odom_callback)

    def odom_callback(self):
        self.publish_odom()

    def euler_to_quaternion(self, yaw, pitch, roll):

        qx = math.sin(roll / 2) * math.cos(pitch / 2) * math.cos(
            yaw / 2) - math.cos(roll / 2) * math.sin(pitch / 2) * math.sin(
                yaw / 2)
        qy = math.cos(roll / 2) * math.sin(pitch / 2) * math.cos(
            yaw / 2) + math.sin(roll / 2) * math.cos(pitch / 2) * math.sin(
                yaw / 2)
        qz = math.cos(roll / 2) * math.cos(pitch / 2) * math.sin(
            yaw / 2) - math.sin(roll / 2) * math.sin(pitch / 2) * math.cos(
                yaw / 2)
        qw = math.cos(roll / 2) * math.cos(pitch / 2) * math.cos(
            yaw / 2) + math.sin(roll / 2) * math.sin(pitch / 2) * math.sin(
                yaw / 2)

        return [qx, qy, qz, qw]

    def publish_odom(self):
        self.odom_broadcaster = TransformBroadcaster(self)
        # self.current_time = datetime.now().microsecond
        # compute odometry in a typical way given the velocities of the robot
        dt = self.time_step
        delta_x = (self.vx * math.cos(self.th) -
                   self.vy * math.sin(self.th)) * dt
        delta_y = (self.vx * math.sin(self.th) +
                   self.vy * math.cos(self.th)) * dt
        delta_th = self.vth * dt

        self.x += delta_x
        self.y += delta_y
        self.th += delta_th

        # since all odometry is 6DOF we'll need a quaternion created from yaw
        # odom_quat=[0.0,0.0,0.0,1.0]
        odom_quat = self.euler_to_quaternion(self.th, 0, 0)

        # first, we'll publish the transform over tf
        odom_transform = TransformStamped()
        odom_transform.header.stamp = self.get_clock().now().to_msg()

        # odom_transform.header.stamp = Time(seconds=self.getTime().now()).to_msg()
        odom_transform.header.frame_id = 'odom'
        odom_transform.child_frame_id = 'base_link'
        odom_transform.transform.rotation.x = odom_quat[0]
        odom_transform.transform.rotation.y = odom_quat[1]
        odom_transform.transform.rotation.z = odom_quat[2]
        odom_transform.transform.rotation.w = odom_quat[3]
        odom_transform.transform.translation.x = self.x
        odom_transform.transform.translation.y = self.y
        odom_transform.transform.translation.z = 0.0
        # self.get_logger().info('base_link to odom being published : %d' % self.get_clock().now())

        self.odom_broadcaster.sendTransform(odom_transform)

        odom = Odometry()
        odom.header.stamp = self.get_clock().now().to_msg()
        # odom.header.stamp = Time(seconds=self.robot.getTime()).to_msg()
        odom.header.frame_id = "odom"
        odom.child_frame_id = "base_link"
        # set the position
        odom.pose.pose.position.x = self.x
        odom.pose.pose.position.y = self.y
        odom.pose.pose.orientation.x = odom_quat[0]
        odom.pose.pose.orientation.y = odom_quat[1]
        odom.pose.pose.orientation.z = odom_quat[2]
        odom.pose.pose.orientation.w = odom_quat[3]

        # set the velocity
        odom.twist.twist.linear.x = self.vx
        odom.twist.twist.linear.y = self.vy
        odom.twist.twist.angular.z = self.vth

        # publish the message
        self.odom_pub.publish(odom)

        # self.last_time = self.current_time

    def cmdVel_callback(self, msg):
        self.vx = msg.linear.x
        self.vth = msg.angular.z
class StatePublisher(Node):
    def __init__(self):
        rclpy.init()
        super().__init__('state_publisher')

        qos_profile = QoSProfile(depth=10)
        self.joint_pub = self.create_publisher(JointState, 'joint_states',
                                               qos_profile)
        self.broadcaster = TransformBroadcaster(self, qos=qos_profile)
        self.nodeName = self.get_name()
        self.get_logger().info("{0} started".format(self.nodeName))

        loop_rate = self.create_rate(30)

        # robot state
        steering_angle = 0.
        wheel_angle = 0.

        # message declarations
        odom_trans = TransformStamped()
        odom_trans.header.frame_id = 'odom'
        odom_trans.child_frame_id = 'base_link'
        joint_state = JointState()

        try:
            t = 0
            while rclpy.ok():
                t += 1. / 30.
                self.get_logger().info(str(t))
                rclpy.spin_once(self)

                # Set angle
                rotation_names = [
                    'front_left_wheel_joint', 'front_right_wheel_joint',
                    'rear_right_wheel_joint', 'rear_left_wheel_joint'
                ]
                wheel_angle += 2 * pi * t / 50
                wheel_angle = atan2(sin(wheel_angle), cos(wheel_angle))
                rotation_position = [
                    wheel_angle, wheel_angle, wheel_angle, wheel_angle
                ]

                # Set steering
                steering_angle = pi / 3. * sin(t * 2 * pi / 4.)
                steering_names = [
                    'front_left_steering_joint', 'front_right_steering_joint'
                ]
                steering_position = [steering_angle, steering_angle]

                # update joint_state
                now = self.get_clock().now()
                joint_state.header.stamp = now.to_msg()
                joint_state.name = rotation_names + steering_names
                joint_state.position = rotation_position + steering_position

                # update transform
                # (moving in a circle with radius=2)
                angle = t * 2 * pi / 10.
                odom_trans.header.stamp = now.to_msg()
                odom_trans.transform.translation.x = cos(angle) * 2
                odom_trans.transform.translation.y = sin(angle) * 2
                odom_trans.transform.translation.z = 0.7
                odom_trans.transform.rotation = \
                    euler_to_quaternion(0, 0, angle + pi/2)  # roll,pitch,yaw

                # send the joint state and transform
                self.joint_pub.publish(joint_state)
                self.broadcaster.sendTransform(odom_trans)

                # This will adjust as needed per iteration
                loop_rate.sleep()

        except KeyboardInterrupt:
            pass
class nightmare_node():
    def __init__(self):
        self.fixed_frame = rospy.get_param(
            '~fixed_frame', "world"
        )  # set fixed frame relative to world to apply the transform
        self.publish_transform = rospy.get_param(
            '~publish_transform',
            False)  # set true in launch file if the transform is needed
        self.pub_jnt = rospy.Publisher("joint_states",
                                       JointState,
                                       queue_size=10)  # joint state publisher
        self.odom_broadcaster = TransformBroadcaster()

        self.jnt_msg = JointState()  # joint topic structure
        self.odom_trans = TransformStamped()
        self.odom_trans.header.frame_id = 'world'
        self.odom_trans.child_frame_id = 'base_link'

        self.jnt_msg.header = Header()
        self.jnt_msg.name = [
            'Rev103', 'Rev104', 'Rev105', 'Rev106', 'Rev107', 'Rev108',
            'Rev109', 'Rev110', 'Rev111', 'Rev112', 'Rev113', 'Rev114',
            'Rev115', 'Rev116', 'Rev117', 'Rev118', 'Rev119', 'Rev120',
            'Rev121'
        ]
        """self.jnt_msg.name = ['Rev105', 'Rev111', 'Rev117', 'Rev104', 'Rev110', 'Rev116',
                             'Rev103', 'Rev109', 'Rev115', 'Rev108', 'Rev114', 'Rev120',
                             'Rev107', 'Rev113', 'Rev119', 'Rev106', 'Rev112', 'Rev118', 
                             'Rev121']"""
        self.jnt_msg.velocity = []
        self.jnt_msg.effort = []

        rospy.on_shutdown(self.shutdown_node)

        rospy.loginfo("Ready for publishing")

        rate = rospy.Rate(50)

        while not rospy.is_shutdown():
            self.odom_trans.header.stamp = rospy.Time.now()
            self.odom_trans.transform.translation.x = 0
            self.odom_trans.transform.translation.y = 0
            self.odom_trans.transform.translation.z = 0
            self.odom_trans.transform.rotation = euler_to_quaternion(
                0, 0, 0)  # roll,pitch,yaw

            self.odom_broadcaster.sendTransform(self.odom_trans)
            self.publish_jnt()

            rate.sleep()

    def publish_jnt(self):
        angles = [0] * 18

        for leg_num in range(6):
            for servo_num in range(3):
                angles[(leg_num * 3) +
                       servo_num] = legs[leg_num].servo[servo_num].angle

        #print(angles)

        self.jnt_msg.position = angles

        self.jnt_msg.position = [
            angles[2], angles[8], angles[14], angles[1], angles[7], angles[13],
            angles[0], angles[6], angles[12], angles[5], angles[11],
            angles[17], angles[4], angles[10], angles[16], angles[3],
            angles[9], angles[15], 0
        ]

        self.jnt_msg.header.stamp = rospy.Time.now()
        self.pub_jnt.publish(self.jnt_msg)

    def shutdown_node(self):
        rospy.loginfo("shutting down hardware handler node")