Exemplo n.º 1
0
    def _broadcastTransform(self, Pose, parentName, childName):
        br = TransformBroadcaster()
        tr = TransformStamped()

        tr.header.stamp = Time.now()
        tr.header.frame_id = parentName
        tr.child_frame_id = childName
        tr.transform.translation = Pose.position
        tr.transform.rotation = Pose.orientation

        br.sendTransform(tr)
Exemplo n.º 2
0
    def publish(self):
        now = self.get_clock().now()
        self.odometry.updatePose(now)
        pose = self.odometry.getPose()

        #q = quaternion_from_euler(0, 0, pose.theta)
        #self.tfPub.sendTransform(
        #    (pose.x, pose.y, 0),
        #    (q[0], q[1], q[2], q[3]),
        #    now,
        #    self.baseFrameID,
        #    self.odomFrameID
        #)

        # Translate it
        odom_trans = TransformStamped()
        odom_trans.header.stamp = now.to_msg()
        odom_trans.header.frame_id = self.odomFrameID
        odom_trans.child_frame_id = self.baseFrameID
        odom_trans.transform.translation.x = float(pose.x)
        odom_trans.transform.translation.y = float(pose.y)
        odom_trans.transform.translation.z = 0.0
        self.tfPub.sendTransform(odom_trans)

        #odom = Odometry()
        #odom.header.stamp = now
        #odom.header.frame_id = self.odomFrameID
        #odom.child_frame_id = self.baseFrameID
        #odom.pose.pose.position.x = pose.x
        #odom.pose.pose.position.y = pose.y
        #odom.pose.pose.orientation.x = q[0]
        #odom.pose.pose.orientation.y = q[1]
        #odom.pose.pose.orientation.z = q[2]
        #odom.pose.pose.orientation.w = q[3]
        #odom.twist.twist.linear.x = pose.xVel
        #odom.twist.twist.angular.z = pose.thetaVel
        #self.odomPub.publish(odom)

        odom = Odometry()
        odom.header.stamp = now.to_msg()
        odom.header.frame_id = self.odomFrameID
        odom.child_frame_id = self.baseFrameID
        odom.pose.pose.position.x = pose.x
        odom.pose.pose.position.y = pose.y
        odom.pose.pose.position.z = 0.0
        quaternion = Quaternion(x=0.0,
                                y=0.0,
                                z=sin(pose.theta / 2.0),
                                w=cos(pose.theta / 2.0))
        odom.pose.pose.orientation = quaternion
        odom.twist.twist.linear.x = pose.xVel
        odom.twist.twist.linear.y = 0.0
        odom.twist.twist.angular.z = pose.thetaVel
        self.odomPub.publish(odom)
    def setUp(self):
        self.point_cloud_in = point_cloud2.PointCloud2()
        self.point_cloud_in.fields = [
            PointField('x', 0, PointField.FLOAT32, 1),
            PointField('y', 4, PointField.FLOAT32, 1),
            PointField('z', 8, PointField.FLOAT32, 1)
        ]

        self.point_cloud_in.point_step = 4 * 3
        self.point_cloud_in.height = 1
        # we add two points (with x, y, z to the cloud)
        self.point_cloud_in.width = 2
        self.point_cloud_in.row_step = self.point_cloud_in.point_step * self.point_cloud_in.width

        points = [1, 2, 0, 10, 20, 30]
        self.point_cloud_in.data = struct.pack('%sf' % len(points), *points)

        self.transform_translate_xyz_300 = TransformStamped()
        self.transform_translate_xyz_300.transform.translation.x = 300
        self.transform_translate_xyz_300.transform.translation.y = 300
        self.transform_translate_xyz_300.transform.translation.z = 300
        self.transform_translate_xyz_300.transform.rotation.w = 1  # no rotation so we only set w

        assert (list(point_cloud2.read_points(self.point_cloud_in)) == [
            (1.0, 2.0, 0.0), (10.0, 20.0, 30.0)
        ])
Exemplo n.º 4
0
 def __init__(self):
     self._datum = rospy.get_param("~datum", None)
     if self._datum is not None and len(self._datum) < 3:
         self._datum += [0.0]
     self._use_datum = self._datum is not None
     self._earth_frame_id = rospy.get_param("~earth_frame_id", "earth")
     self._utm_frame_id = rospy.get_param("~utm_frame_id", "utm")
     self._map_frame_id = rospy.get_param("~map_frame_id", "map")
     self._odom_frame_id = rospy.get_param("~odom_frame_id", "odom")
     self._body_frame_id = rospy.get_param("~base_frame_id",
                                           "base_link")  # currently unused
     self._tf_broadcast_rate = rospy.get_param("~broadcast_rate", 10.0)
     self._serv = rospy.Service("~set_datum", SetDatum, self._set_datum)
     self._tf_buff = Buffer()
     TransformListener(self._tf_buff)
     self._tf_bcast = TransformBroadcaster()
     self._last_datum = None
     self._static_map_odom = rospy.get_param("~static_map_odom", False)
     self._odom_updated = False
     self._update_odom_serv = rospy.Service("~set_odom", Trigger,
                                            self._handle_set_odom)
     if not self._use_datum:
         rospy.Subscriber("gps_datum", NavSatFix, self._handle_datum)
     if not self._static_map_odom:
         rospy.Subscriber("waterlinked/pose_with_cov_stamped",
                          PoseWithCovarianceStamped, self._handle_odom_tf)
     else:
         StaticTransformBroadcaster().sendTransform(
             TransformStamped(
                 Header(0, rospy.Time.now(), self._map_frame_id),
                 self._odom_frame_id, None, Quaternion(0, 0, 0, 1)))
     self._map_odom_tf = None
     rospy.Timer(rospy.Duration.from_sec(1.0 / self._tf_broadcast_rate),
                 self._send_tf)
    def setUp(self):
        self.point_cloud_in = point_cloud2.PointCloud2()
        self.point_cloud_in.fields = [
            PointField('x', 0, PointField.FLOAT32, 1),
            PointField('y', 4, PointField.FLOAT32, 1),
            PointField('z', 8, PointField.FLOAT32, 1),
            PointField('index', 12, PointField.INT32, 1)
        ]

        self.point_cloud_in.point_step = 4 * 4
        self.point_cloud_in.height = 1
        # we add two points (with x, y, z to the cloud)
        self.point_cloud_in.width = 2
        self.point_cloud_in.row_step = self.point_cloud_in.point_step * self.point_cloud_in.width

        self.points = [(1.0, 2.0, 0.0, 123), (10.0, 20.0, 30.0, 456)]
        for point in self.points:
            self.point_cloud_in.data += struct.pack('3fi', *point)

        self.transform_translate_xyz_300 = TransformStamped()
        self.transform_translate_xyz_300.transform.translation.x = self.TRANSFORM_OFFSET_DISTANCE
        self.transform_translate_xyz_300.transform.translation.y = self.TRANSFORM_OFFSET_DISTANCE
        self.transform_translate_xyz_300.transform.translation.z = self.TRANSFORM_OFFSET_DISTANCE
        self.transform_translate_xyz_300.transform.rotation.w = 1  # no rotation so we only set w

        assert (list(point_cloud2.read_points(
            self.point_cloud_in)) == self.points)
Exemplo n.º 6
0
 def state_callback(self, msg):
     self.pose_array.append(msg)
     now = self.get_clock().now()
     p = msg.pose.position
     q = msg.pose.orientation
     odom_trans = TransformStamped()
     odom_trans.header.frame_id = msg.header.frame_id
     odom_trans.child_frame_id = 'base_link'
     odom_trans.header.stamp = now.to_msg()
     odom_trans.transform.translation.x = p.x
     odom_trans.transform.translation.y = p.y
     odom_trans.transform.translation.z = p.z
     odom_trans.transform.rotation = q
     self.broadcaster.sendTransform(odom_trans)
     path_msg = Path()
     path_msg.header.frame_id = msg.header.frame_id
     path_msg.header.stamp = now.to_msg()
     path_msg.poses = self.pose_array
     self.path_publisher.publish(path_msg)
Exemplo n.º 7
0
 def _get_tfs(self, data):
     if type(data) is NavSatFix:
         earth, utm = self._get_coords(data.latitude, data.longitude)
         header_earth = Header(data.header.seq, data.header.stamp,
                               self._earth_frame_id)
         header_utm = Header(data.header.seq, data.header.stamp,
                             self._utm_frame_id)
     else:
         earth, utm = self._get_coords(self._datum[0], self._datum[1])
         header_earth = Header(0, rospy.Time.now(), self._earth_frame_id)
         header_utm = Header(0, rospy.Time.now(), self._utm_frame_id)
     # Broadcast datum's latitude and longitude
     # Map from ECEF Frame
     earth_to_utm = TransformStamped(
         header_earth, self._utm_frame_id,
         Transform(Vector3(*earth[:3]), Quaternion(*earth[3:])))
     # UTM from ECEF Frame
     utm_to_map = TransformStamped(
         header_utm, self._map_frame_id,
         Transform(Vector3(*utm[:3]), Quaternion(*utm[3:])))
     return earth_to_utm, utm_to_map
Exemplo n.º 8
0
    def __init__(self):
        rclpy.init()
        super().__init__('oakd_publisher')
        qos_profile = QoSProfile(depth=10)
        self.broadcaster = TransformBroadcaster(self, qos=qos_profile)
        
        #Image publisher
        #self.pub_depth_img = self.create_publisher(sensor_msgs.msg.Image, "/depth_img", 10)
        self.pub_rectified_img = self.create_publisher(Image, "/rectified_img", 10)
        
        # Transform for head declaration
        self.transform_head = TransformStamped()
        self.transform_head.header.frame_id = 'oak-d_camera_center'
        self.transform_head.child_frame_id = 'head'

        # Transform for palm declaration
        self.transform_palm = TransformStamped()
        self.transform_palm.header.frame_id = 'oak-d_camera_center'
        self.transform_palm.child_frame_id = 'palm'
        #fixed Identity quaternion (no rotation)

        self.fps = FPS()
        self.advanced_main()
    def __init__(self):
        self.fixed_frame = rospy.get_param(
            '~fixed_frame', "world"
        )  # set fixed frame relative to world to apply the transform
        self.publish_transform = rospy.get_param(
            '~publish_transform',
            False)  # set true in launch file if the transform is needed
        self.pub_jnt = rospy.Publisher("joint_states",
                                       JointState,
                                       queue_size=10)  # joint state publisher
        self.odom_broadcaster = TransformBroadcaster()

        self.jnt_msg = JointState()  # joint topic structure
        self.odom_trans = TransformStamped()
        self.odom_trans.header.frame_id = 'world'
        self.odom_trans.child_frame_id = 'base_link'

        self.jnt_msg.header = Header()
        self.jnt_msg.name = [
            'Rev103', 'Rev104', 'Rev105', 'Rev106', 'Rev107', 'Rev108',
            'Rev109', 'Rev110', 'Rev111', 'Rev112', 'Rev113', 'Rev114',
            'Rev115', 'Rev116', 'Rev117', 'Rev118', 'Rev119', 'Rev120',
            'Rev121'
        ]
        """self.jnt_msg.name = ['Rev105', 'Rev111', 'Rev117', 'Rev104', 'Rev110', 'Rev116',
                             'Rev103', 'Rev109', 'Rev115', 'Rev108', 'Rev114', 'Rev120',
                             'Rev107', 'Rev113', 'Rev119', 'Rev106', 'Rev112', 'Rev118', 
                             'Rev121']"""
        self.jnt_msg.velocity = []
        self.jnt_msg.effort = []

        rospy.on_shutdown(self.shutdown_node)

        rospy.loginfo("Ready for publishing")

        rate = rospy.Rate(50)

        while not rospy.is_shutdown():
            self.odom_trans.header.stamp = rospy.Time.now()
            self.odom_trans.transform.translation.x = 0
            self.odom_trans.transform.translation.y = 0
            self.odom_trans.transform.translation.z = 0
            self.odom_trans.transform.rotation = euler_to_quaternion(
                0, 0, 0)  # roll,pitch,yaw

            self.odom_broadcaster.sendTransform(self.odom_trans)
            self.publish_jnt()

            rate.sleep()
Exemplo n.º 10
0
 def _handle_odom_tf(self, msg):
     # Given the pose of the vehicle in waterlinked frame, output the map -> odom tf transformation
     if not self._odom_updated:
         point = PointStamped(
             Header(0, rospy.Time.from_sec(0.0), msg.header.frame_id),
             msg.pose.pose.position)
         try:
             point_map = self._tf_buff.transform(point, self._map_frame_id)
         except Exception as e:
             rospy.logerr_throttle(
                 5, "{} | {}".format(rospy.get_name(), e.message))
             return
         # Odom is always at same depth as map
         self._map_odom_tf = TransformStamped(
             Header(0, rospy.Time.now(), self._map_frame_id),
             self._odom_frame_id,
             Transform(Vector3(point_map.point.x, point_map.point.y, 0),
                       Quaternion(0, 0, 0, 1)))
         self._odom_updated = True
Exemplo n.º 11
0
    def __init__(self):
        rclpy.init()
        super().__init__("state_publisher")

        qos_profile = QoSProfile(depth=10)
        self.joint_pub = self.create_publisher(JointState, "joint_states", qos_profile)
        self.broadcaster = TransformBroadcaster(self, qos=qos_profile)
        self.nodeName = self.get_name()
        self.get_logger().info("{0} started".format(self.nodeName))

        degree = pi / 180.0
        loop_rate = self.create_rate(30)

        # robot state
        tilt = 0.0
        tinc = degree
        swivel = 0.0
        angle = 0.0
        height = 0.0
        hinc = 0.005

        # message declarations
        odom_trans = TransformStamped()
        odom_trans.header.frame_id = "odom"
        odom_trans.child_frame_id = "base_link"
        joint_state = JointState()

        joint_names = [
            "joint_tracks_gantry",
            "joint_gantry_crossSlide",
            "joint_cross_slide_z_axis",
            "joint_gantry_crossSlide2",
        ]
        default_position = [0.0, 0.0, 0.0, 0.0]

        try:
            while rclpy.ok():
                rclpy.spin_once(self)

                # update joint_state
                now = self.get_clock().now()
                joint_state.header.stamp = now.to_msg()
                joint_state.name = joint_names
                joint_state.position = default_position

                """
                # update transform
                # (moving in a circle with radius=2)
                odom_trans.header.stamp = now.to_msg()
                odom_trans.transform.translation.x = cos(angle) * 2
                odom_trans.transform.translation.y = sin(angle) * 2
                odom_trans.transform.translation.z = 0.7
                odom_trans.transform.rotation = euler_to_quaternion(
                    0, 0, angle + pi / 2
                )  # roll,pitch,yaw
                """
                odom_trans.header.stamp = now.to_msg()
                odom_trans.transform.translation.x = 0.0
                odom_trans.transform.translation.y = 0.0
                odom_trans.transform.translation.z = 0.0
                odom_trans.transform.rotation = euler_to_quaternion(0, 0, 0)

                # send the joint state and transform
                self.joint_pub.publish(joint_state)
                self.broadcaster.sendTransform(odom_trans)

                # Create new robot state
                """
                tilt += tinc
                if tilt < -0.5 or tilt > 0.0:
                    tinc *= -1
                height += hinc
                if height > 0.2 or height < 0.0:
                    hinc *= -1
                swivel += degree
                angle += degree / 4
                """
                # This will adjust as needed per iteration
                loop_rate.sleep()

        except KeyboardInterrupt:
            pass
Exemplo n.º 12
0
    def __init__(self):
        rclpy.init()
        super().__init__('state_publisher')

        qos_profile = QoSProfile(depth=10)
        self.joint_pub = self.create_publisher(JointState, 'joint_states', qos_profile)
        self.broadcaster = TransformBroadcaster(self, qos=qos_profile)
        self.nodeName = self.get_name()
        self.get_logger().info("{0} started".format(self.nodeName))

        degree = pi / 180.0
        loop_rate = self.create_rate(30)

        # robot state
        tilt = 0.
        tinc = degree
        swivel = 0.
        angle = 0.
        height = 0.
        hinc = 0.005

        # message declarations
        odom_trans = TransformStamped()
        odom_trans.header.frame_id = 'odom'
        odom_trans.child_frame_id = 'axis'
        joint_state = JointState()

        try:
            while rclpy.ok():
                rclpy.spin_once(self)

                # update joint_state
                now = self.get_clock().now()
                joint_state.header.stamp = now.to_msg()
                joint_state.name = ['swivel', 'tilt', 'periscope']
                joint_state.position = [swivel, tilt, height]

                # update transform
                # (moving in a circle with radius=2)
                odom_trans.header.stamp = now.to_msg()
                odom_trans.transform.translation.x = cos(angle)*2
                odom_trans.transform.translation.y = sin(angle)*2
                odom_trans.transform.translation.z = 0.7
                odom_trans.transform.rotation = \
                    euler_to_quaternion(0, 0, angle + pi/2) # roll,pitch,yaw

                # send the joint state and transform
                self.joint_pub.publish(joint_state)
                self.broadcaster.sendTransform(odom_trans)

                # Create new robot state
                tilt += tinc
                if tilt < -0.5 or tilt > 0.0:
                    tinc *= -1
                height += hinc
                if height > 0.2 or height < 0.0:
                    hinc *= -1
                swivel += degree
                angle += degree/4

                # This will adjust as needed per iteration
                loop_rate.sleep()

        except KeyboardInterrupt:
            pass
Exemplo n.º 13
0
    master_gps = rospy.Publisher('gps_datum', NavSatFix, queue_size=5)
    master_msg = NavSatFix()
    master_msg.altitude = alt
    master_msg.longitude = lon
    master_msg.latitude = lat
    master_msg.status = NavSatStatus()
    master_msg.status.status = master_msg.status.STATUS_FIX
    master_msg.status.service = master_msg.status.SERVICE_GPS
    master_msg.position_covariance_type = master_msg.COVARIANCE_TYPE_UNKNOWN
    master_msg.position_covariance = [-1, 0, 0, 0, 0, 0, 0, 0, 0]
    pose_pub = rospy.Publisher('waterlinked/pose_with_cov_stamped',
                               PoseWithCovarianceStamped,
                               queue_size=5)
    buff = Buffer()
    TransformListener(buff)
    rospy.Subscriber("mavros/global_position/global", NavSatFix, handle_gps,
                     (pose_pub, buff))
    map_to_waterlink = TransformStamped(
        Header(0, rospy.Time.now(), 'map'), 'waterlinked',
        Transform(
            None,
            Quaternion(*Rotation.from_euler('xyz', [0, 0, 90 - heading_offset],
                                            degrees=True).as_quat().tolist())))
    StaticTransformBroadcaster().sendTransform(map_to_waterlink)
    rate = rospy.Rate(4.0)
    while not rospy.is_shutdown():
        master_msg.header.seq += 1
        master_msg.header.stamp = rospy.Time.now()
        master_gps.publish(master_msg)
        rate.sleep()
    def __init__(self):
        rclpy.init()
        super().__init__('face_detection')
        qos_profile = QoSProfile(depth=10)
        self.broadcaster = TransformBroadcaster(self, qos=qos_profile)
        self.pub_point = self.create_publisher(PointStamped, "/head_detection",
                                               10)
        #Image publisher
        #self.pub_depth_img = self.create_publisher(sensor_msgs.msg.Image, "/depth_img", 10)
        self.pub_rectified_img = self.create_publisher(Image, "/rectified_img",
                                                       10)

        # Transform declaration
        self.transform = TransformStamped()
        self.transform.header.frame_id = 'oak-d_camera_center'
        self.transform.child_frame_id = 'head'

        # Detect face
        syncNN = True
        self.flipRectified = True

        # Get argument first
        model_filename = 'face-detection-retail-0004_openvino_2021.2_4shave.blob'
        nnPath = os.path.join(get_package_share_directory('blindbuy_oakd'),
                              'models', model_filename)
        print(nnPath)

        # Start defining a pipeline
        self.pipeline = dai.Pipeline()

        manip = self.pipeline.createImageManip()
        manip.initialConfig.setResize(300, 300)
        # The NN model expects BGR input. By default ImageManip output type would be same as input (gray in this case)
        manip.initialConfig.setFrameType(dai.RawImgFrame.Type.BGR888p)
        manip.setKeepAspectRatio(
            False)  #Squeeze image without cropping to fit 300x300 nn input

        # Define a neural network that will make predictions based on the source frames
        spatialDetectionNetwork = self.pipeline.createMobileNetSpatialDetectionNetwork(
        )
        spatialDetectionNetwork.setConfidenceThreshold(0.5)

        spatialDetectionNetwork.setBlobPath(nnPath)
        spatialDetectionNetwork.input.setBlocking(False)
        spatialDetectionNetwork.setBoundingBoxScaleFactor(0.5)
        spatialDetectionNetwork.setDepthLowerThreshold(100)
        spatialDetectionNetwork.setDepthUpperThreshold(5000)

        manip.out.link(spatialDetectionNetwork.input)

        # Create outputs
        xoutManip = self.pipeline.createXLinkOut()
        xoutManip.setStreamName("right")
        if (syncNN):
            spatialDetectionNetwork.passthrough.link(xoutManip.input)
        else:
            manip.out.link(xoutManip.input)

        depthRoiMap = self.pipeline.createXLinkOut()
        depthRoiMap.setStreamName("boundingBoxDepthMapping")

        xoutDepth = self.pipeline.createXLinkOut()
        xoutDepth.setStreamName("depth")

        nnOut = self.pipeline.createXLinkOut()
        nnOut.setStreamName("detections")
        spatialDetectionNetwork.out.link(nnOut.input)
        spatialDetectionNetwork.boundingBoxMapping.link(depthRoiMap.input)

        monoLeft = self.pipeline.createMonoCamera()
        monoRight = self.pipeline.createMonoCamera()
        stereo = self.pipeline.createStereoDepth()
        monoLeft.setResolution(
            dai.MonoCameraProperties.SensorResolution.THE_400_P)
        monoLeft.setBoardSocket(dai.CameraBoardSocket.LEFT)
        monoRight.setResolution(
            dai.MonoCameraProperties.SensorResolution.THE_400_P)
        monoRight.setBoardSocket(dai.CameraBoardSocket.RIGHT)
        stereo.setOutputDepth(True)
        stereo.setConfidenceThreshold(255)
        stereo.setOutputRectified(True)

        stereo.rectifiedRight.link(manip.inputImage)

        monoLeft.out.link(stereo.left)
        monoRight.out.link(stereo.right)

        stereo.depth.link(spatialDetectionNetwork.inputDepth)
        spatialDetectionNetwork.passthroughDepth.link(xoutDepth.input)

        self.detect_face()
Exemplo n.º 15
0
    def fast_callback(self, event):
        """ Callback function that requests Waterlinked position and orientation
        information at a fast rate. """
        # Request current time and use it for all messages
        tnow = rospy.Time.now()
        if self._is_first_fast_loop:
            self._is_first_fast_loop = False
            self.f_cum_fast = 0
            self.n_fast = 0
            self._fast_t0 = tnow.to_sec()
        else:
            f = 1 / (tnow.to_sec() - self._fast_t0)
            self.f_cum_fast += f
            self.n_fast += 1
            f_avg = self.f_cum_fast / self.n_fast
            rospy.logdebug("fast loop (n = %d): f_avg = %.3f Hz" %
                           (self.n_fast, f_avg))

        # Initiate HTTP request to all URLs
        future_list = [
            self._session.get(url, timeout=2.0) for url in self._urls_fast
        ]

        try:
            # WARN: ORIENTATION IS CLOCKWISE REFERENCED FROM MAGNETIC NORTH
            # /waterlinked/external/orientation
            res_external_orientation = future_list[0].result()
            if res_external_orientation.ok:
                data = res_external_orientation.json()
                msg_external_orientation = ExternalOrientation()
                msg_external_orientation.header.stamp = tnow
                msg_external_orientation.orientation = data['orientation']
                self._pub_external_orientation.publish(
                    msg_external_orientation)

            # /waterlinked/position/acoustic/filtered
            res_position_acoustic_filtered = future_list[1].result()

            # WARN: WATERLINKED POSITION IS LEFT HANDED RFD -> X: RIGHT, y: FORWARDS, Z: DOWN
            # DO NOT USE ACOUSTIC_FILTERED FOR NAVIGATION!
            if res_position_acoustic_filtered.ok:
                data = res_position_acoustic_filtered.json()
                msg_position_acoustic_filtered = PositionAcousticFiltered()
                msg_position_acoustic_filtered.header.stamp = tnow
                msg_position_acoustic_filtered.header.frame_id = self._waterlinked_frame_id
                msg_position_acoustic_filtered.std = data['std']
                msg_position_acoustic_filtered.temp = data['temp']
                msg_position_acoustic_filtered.x = data['x']
                msg_position_acoustic_filtered.y = data['y']
                msg_position_acoustic_filtered.z = data['z']
                if self._pub_position_acoustic_filtered.get_num_connections(
                ) > 0:
                    rospy.logwarn_once(
                        "{} | waterlinked/acoustic_filtered is left-handed RFD, don't use for navigation, "
                        "use waterlinked/pose_with_cov_stamped (FLU) instead.")
                self._pub_position_acoustic_filtered.publish(
                    msg_position_acoustic_filtered)

                # Create message of the type geometry_msgs/PoseWithCovariance
                msg_pose_with_cov_stamped = PoseWithCovarianceStamped()
                var_xyz = pow(data['std'],
                              2)  # calculate variance from standard deviation
                msg_pose_with_cov_stamped.header.stamp = tnow
                msg_pose_with_cov_stamped.header.frame_id = self._waterlinked_frame_id
                msg_pose_with_cov_stamped.pose.pose.position.x = data['y']
                msg_pose_with_cov_stamped.pose.pose.position.y = -data['x']
                msg_pose_with_cov_stamped.pose.pose.position.z = -data['z']
                msg_pose_with_cov_stamped.pose.pose.orientation = Quaternion(
                    0, 0, 0, 1)
                msg_pose_with_cov_stamped.pose.covariance = [
                    var_xyz, 0, 0, 0, 0, 0, 0, var_xyz, 0, 0, 0, 0, 0, 0,
                    var_xyz, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
                    0, 0, 0, 0
                ]
                self._pub_pos_with_covariance_stamped.publish(
                    msg_pose_with_cov_stamped)

            # /waterlinked/position/acoustic/raw
            res_position_acoustic_raw = future_list[2].result()
            if res_position_acoustic_raw.ok:
                data = res_position_acoustic_raw.json()
                msg_position_acoustic_raw = PositionAcousticRaw()
                msg_position_acoustic_raw.header.stamp = tnow
                msg_position_acoustic_raw.header.frame_id = self._waterlinked_frame_id
                msg_position_acoustic_raw.std = data['std']
                msg_position_acoustic_raw.temp = data['temp']
                msg_position_acoustic_raw.x = data['x']
                msg_position_acoustic_raw.y = data['y']
                msg_position_acoustic_raw.z = data['z']
                if self._pub_position_acoustic_raw.get_num_connections() > 0:
                    rospy.logwarn_once(
                        "{} | waterlinked/acoustic_raw is left-handed RFD, don't use for navigation, "
                        "use waterlinked/pose_with_cov_stamped (FLU) instead.")
                self._pub_position_acoustic_raw.publish(
                    msg_position_acoustic_raw)

            # /waterlinked/position/global
            res_position_global = future_list[3].result()
            if res_position_global.ok:
                data = res_position_global.json()
                msg_position_global = PositionGlobal()
                msg_position_global.header.stamp = tnow
                msg_position_global.lat = data['lat']
                msg_position_global.lon = data['lon']
                self._pub_position_global.publish(msg_position_global)

            # /waterlinked/position/master
            res_position_master = future_list[4].result()
            msg_position_master = None
            if res_position_master.ok:
                data = res_position_master.json()
                msg_position_master = PositionMaster()
                msg_position_master.header.stamp = tnow
                msg_position_master.cog = data['cog']
                msg_position_master.hdop = data['hdop']
                msg_position_master.lat = data['lat']
                msg_position_master.lon = data['lon']
                msg_position_master.numsats = data['numsats']
                msg_position_master.orientation = data['orientation']
                msg_position_master.sog = data['sog']
                self._pub_position_master.publish(msg_position_master)

            # CONVENTION: UTM -> WATERLINKED IS DEFINED BY UTM POSITION OF MASTER, ROTATED ACCORDING TO MASTER ORIENTATION
            # CONVENTION: UTM -> MAP IS DEFINED BY UTM POSITION OF MASTER, WITHOUT ANY ROTATION (ALIGNED WITH NORTH)
            # CONVENTION: UTM -> MAP CAN ALSO BE DEFINED BY AN EXTERNAL DATUM [LATITUDE, LONGITUDE]
            if self._send_tf and msg_position_master is not None:
                tf_map = TransformStamped()  # Map transformation
                tf_map.header.stamp = tnow
                tf_map.header.frame_id = self._map_frame_id
                tf_map.child_frame_id = self._waterlinked_frame_id
                # ORIENTATION IS PROVIDED AS NORTH REFERENCED CW
                # NEEDS TO BE CONVERTED TO EAST REFERENCED CCW
                q = Rotation.from_euler(
                    'xyz', [0, 0, 90 - msg_position_master.orientation],
                    degrees=True).as_quat()
                tf_map.transform.rotation = Quaternion(*q)
                self._tf_bcast.sendTransform(tf_map)
        except ConnectionError as e:
            self.connection_error()
    def __init__(self):
        rclpy.init()
        super().__init__('state_publisher')

        qos_profile = QoSProfile(depth=10)
        self.joint_pub = self.create_publisher(JointState, 'joint_states',
                                               qos_profile)
        self.broadcaster = TransformBroadcaster(self, qos=qos_profile)
        self.nodeName = self.get_name()
        self.get_logger().info("{0} started".format(self.nodeName))

        loop_rate = self.create_rate(30)

        # robot state
        steering_angle = 0.
        wheel_angle = 0.

        # message declarations
        odom_trans = TransformStamped()
        odom_trans.header.frame_id = 'odom'
        odom_trans.child_frame_id = 'base_link'
        joint_state = JointState()

        try:
            t = 0
            while rclpy.ok():
                t += 1. / 30.
                self.get_logger().info(str(t))
                rclpy.spin_once(self)

                # Set angle
                rotation_names = [
                    'front_left_wheel_joint', 'front_right_wheel_joint',
                    'rear_right_wheel_joint', 'rear_left_wheel_joint'
                ]
                wheel_angle += 2 * pi * t / 50
                wheel_angle = atan2(sin(wheel_angle), cos(wheel_angle))
                rotation_position = [
                    wheel_angle, wheel_angle, wheel_angle, wheel_angle
                ]

                # Set steering
                steering_angle = pi / 3. * sin(t * 2 * pi / 4.)
                steering_names = [
                    'front_left_steering_joint', 'front_right_steering_joint'
                ]
                steering_position = [steering_angle, steering_angle]

                # update joint_state
                now = self.get_clock().now()
                joint_state.header.stamp = now.to_msg()
                joint_state.name = rotation_names + steering_names
                joint_state.position = rotation_position + steering_position

                # update transform
                # (moving in a circle with radius=2)
                angle = t * 2 * pi / 10.
                odom_trans.header.stamp = now.to_msg()
                odom_trans.transform.translation.x = cos(angle) * 2
                odom_trans.transform.translation.y = sin(angle) * 2
                odom_trans.transform.translation.z = 0.7
                odom_trans.transform.rotation = \
                    euler_to_quaternion(0, 0, angle + pi/2)  # roll,pitch,yaw

                # send the joint state and transform
                self.joint_pub.publish(joint_state)
                self.broadcaster.sendTransform(odom_trans)

                # This will adjust as needed per iteration
                loop_rate.sleep()

        except KeyboardInterrupt:
            pass
Exemplo n.º 17
0
        "Incorrect command line parameters! \"-cam\" cannot be used with \"-vid\"!"
    )
elif args.camera is False and args.video is None:
    raise ValueError(
        "Missing inference source! Either use \"-cam\" to run on DepthAI camera or \"-vid <path>\" to run on video file"
    )

rclpy.init(args=None)

node = rclpy.create_node('head_orientation')

qos_profile = QoSProfile(depth=10)
broadcaster = TransformBroadcaster(node, qos=qos_profile)

# Transform declaration
transform = TransformStamped()
transform.header.frame_id = 'oak-d_camera_center'
transform.child_frame_id = 'head'


def pose_to_quaternion(head_pose):
    # roll = head_pose[1] * np.pi / 180
    # pitch = head_pose[2] * np.pi / 180
    # yaw = head_pose[0] * np.pi / 180
    roll = -(head_pose[0] * np.pi / 180)
    pitch = head_pose[2] * np.pi / 180
    yaw = head_pose[1] * np.pi / 180

    qx = np.sin(roll / 2) * np.cos(pitch / 2) * np.cos(yaw / 2) - np.cos(
        roll / 2) * np.sin(pitch / 2) * np.sin(yaw / 2)
    qy = np.cos(roll / 2) * np.sin(pitch / 2) * np.cos(yaw / 2) + np.sin(