Пример #1
0
    def imu_callback(self, msg):
        raw_imu_quat = QuaternionStamped()
        raw_imu_quat.header = msg.header
        raw_imu_quat.quaternion = msg.orientation

        raw_imu_av = Vector3Stamped()
        raw_imu_av.header = msg.header
        raw_imu_av.vector = msg.angular_velocity

        raw_imu_acc = Vector3Stamped()
        raw_imu_acc.header = msg.header
        raw_imu_acc.vector = msg.linear_acceleration
                
        try:
            self.listener.waitForTransform(self.base_link_frame, msg.header.frame_id, msg.header.stamp, rospy.Duration(1.0)) # gyrometer->body
            imu_rootlink_quat = self.listener.transformQuaternion(self.base_link_frame, raw_imu_quat)
            imu_rootlink_av = self.listener.transformVector3(self.base_link_frame, raw_imu_av)
            imu_rootlink_acc = self.listener.transformVector3(self.base_link_frame, raw_imu_acc)
            # todo: convert covariance
        except:
            rospy.logwarn("[%s] failed to solve imu_to_base tf: %s to %s", rospy.get_name(), msg.header.frame_id, self.base_link_frame)
            return

        msg.header = imu_rootlink_quat.header
        msg.orientation = imu_rootlink_quat.quaternion
        msg.angular_velocity = imu_rootlink_av.vector        
        msg.linear_acceleration = imu_rootlink_acc.vector
                
        # publish
        self.pub.publish(msg)
Пример #2
0
def do_transform_imu(imu_msg, transform):
    """
    Transforms the given Imu message into the given target frame
    :param imu_msg:
    :type imu_msg: Imu
    :param transform:
    :type transform: TransformStamped
    :return: Imu message transformed into the target frame
    :rtype: Imu
    """
    src_l = Vector3Stamped()
    src_l.header = imu_msg.header
    src_l.vector = imu_msg.linear_acceleration
    src_a = Vector3Stamped()
    src_a.header = imu_msg.header
    src_a.vector = imu_msg.angular_velocity
    src_o = QuaternionStamped()
    src_o.header = imu_msg.header
    src_o.quaternion = imu_msg.orientation

    tgt_l = do_transform_vector3(src_l, transform)
    tgt_a = do_transform_vector3(src_a, transform)
    tgt_o = do_transform_quaternion(src_o, transform)

    tgt_imu = copy.deepcopy(imu_msg)
    tgt_imu.header.frame_id = transform.child_frame_id
    tgt_imu.linear_acceleration = tgt_l
    tgt_imu.angular_velocity = tgt_a
    tgt_imu.orientation = tgt_o
    return tgt_imu
Пример #3
0
 def publish(self, imu_msg, nav_msg):
     if self.tfBuffer.can_transform(nav_msg.header.frame_id, "odom",
                                    rospy.Time.now(),
                                    rospy.Duration.from_sec(0.5)):
         if self.tfBuffer.can_transform(imu_msg.header.frame_id,
                                        "base_link", rospy.Time.now(),
                                        rospy.Duration.from_sec(0.5)):
             unfiltered_msg = Odometry()
             unfiltered_msg.header.frame_id = "odom"
             unfiltered_msg.child_frame_id = "base_link"
             imu_q = QuaternionStamped()
             imu_q.quaternion = imu_msg.orientation
             imu_q.header = imu_msg.header
             unfiltered_msg.pose.pose.orientation = self.tfListener.transformQuaternion(
                 "base_link",
                 imu_q).quaternion  #TF2 FOR KINETIC JUST AIN'T WORKING
             nav_p = PointStamped()
             nav_p.point = nav_msg.pose.pose.position
             nav_p.header = nav_msg.header
             unfiltered_msg.pose.pose.position = self.tfListener.transformPoint(
                 "odom", nav_p).point
             self.pub.publish(unfiltered_msg)
         else:
             rospy.logwarn("{} to base_link tf unavailable!".format(
                 imu_msg.header.frame_id))
     else:
         rospy.logwarn("{} to odom tf unavailable!".format(
             nav_msg.header.frame_id))
Пример #4
0
def do_transform_quaternion(quaternion_msg, transform):
    # Use PoseStamped to transform the Quaternion
    src_ps = PoseStamped()
    src_ps.header = quaternion_msg.header
    src_ps.pose.orientation = quaternion_msg.quaternion
    tgt_ps = do_transform_pose(src_ps, transform)
    tgt_o = QuaternionStamped()
    tgt_o.header = tgt_ps.header
    tgt_o.quaternion = tgt_ps.pose.orientation
    return tgt_o
Пример #5
0
    def publishData(self):
        head = Header()
        head.stamp = rospy.Time.now()
        head.frame_id = 'local'

        # Publish Attitude
        myQuat = self.getQuat()
        outQuat = QuaternionStamped()
        outQuat.header = head
        outQuat.quaternion = myQuat
        self.attitude_pub_.publish(outQuat)

        # Publish IMU
        imuOut = Imu()
        imuOut.header = head
        imuOut.orientation = myQuat
        imuOut.angular_velocity = Vector3(x=0.0, y=0.0, z=self.yaw_rate_)
        imuOut.linear_acceleration = Vector3(x=self.acc_[0],
                                             y=self.acc_[1],
                                             z=self.acc_[2] + 9.81)
        self.imu_pub_.publish(imuOut)

        # Publish Velocity
        velOut = Vector3Stamped()
        velOut.header = head
        velOut.vector = Vector3(x=self.vel_[0], y=self.vel_[1], z=self.vel_[2])
        self.velocity_pub_.publish(velOut)

        # Publish Position
        posStamp = PointStamped()
        posStamp.header = head
        myPoint = Point(x=self.pos_[0], y=self.pos_[1], z=self.pos_[2])
        posStamp.point = myPoint
        self.position_pub_.publish(posStamp)

        # Publish Height Above Ground
        height = Float32(self.pos_[-1])
        # height.data = self.pos_(-1)
        self.height_pub_.publish(height)
    def inputcb(self, data):
        rospy.logdebug("inputcb triggered")

        # translation from base_link to left string expressed in the
        # base_link frame:
        vb = np.array([[0.0102, 0.0391, 0.086, 0]]).T
        gbs = np.array([[1, 0, 0, vb[0,0]],
                       [0, 0, -1, vb[1,0]],
                       [0, 1, 0, vb[2,0]],
                       [0, 0, 0, 1]])
        gsb = np.linalg.inv(gbs)

        # map to base stuff:
        pbm = np.array([[data.pose.pose.position.x,
                         data.pose.pose.position.y,
                         data.pose.pose.position.z]]).T
        qbm = np.array([[data.pose.pose.orientation.w,
                         data.pose.pose.orientation.x,
                         data.pose.pose.orientation.y,
                         data.pose.pose.orientation.z]]).T
        Rbm = quat_to_rot(qbm)
        gbm = to_se3(Rbm,pbm)
        gmb = np.linalg.inv(gbm)
        vm = np.dot(gmb,np.dot(gbs,np.dot(gsb,vb)))
        vm = np.ravel(vm)[0:3]

        ## so the first thing that we need to do is get the location
        ## of the robot in the "/optimization_frame"
        p = PointStamped()
        p.header = data.pose.header
        p.point = data.pose.pose.position
        p.point.x -= vm[0]
        p.point.y -= vm[1]
        p.point.z -= vm[2]
        quat = QuaternionStamped()
        quat.quaternion = data.pose.pose.orientation
        quat.header = p.header
        try:
            ptrans = self.listener.transformPoint("/optimization_frame", p)
            qtrans = self.listener.transformQuaternion("/optimization_frame", quat)
        except (tf.Exception):
            rospy.loginfo("tf exception caught !")
            return

        ## if we have not initialized the VI, let's do it, otherwise,
        ## let's integrate
        if not self.initialized_flag:
            q = [
                ptrans.point.x,
                ptrans.point.y-h0,
                ptrans.point.z,
                ptrans.point.x,
                ptrans.point.z,
                h0 ]
            self.sys.reset_integration(state=q)
            self.last_time = ptrans.header.stamp
            self.initialized_flag = True
            return

        # get operating_condition:
        if rospy.has_param("/operating_condition"):
            operating = rospy.get_param("/operating_condition")
        else:
            return


        # set string length
        self.len = data.left;

        ## if we are not running, just reset the parameter:
        if operating is not 2:
            self.initialized_flag = False
            return
        else:
            self.initialized_flag = True
            # if we are in the "running mode", let's integrate the VI,
            # and publish the results:
            rho = [ptrans.point.x, ptrans.point.z, self.len]
            dt = (ptrans.header.stamp - self.last_time).to_sec()
            self.last_time = ptrans.header.stamp
            rospy.logdebug("Taking a step! dt = "+str(dt))
            self.sys.take_step(dt, rho)
            ## now we can get the state of the system
            q = self.sys.get_current_configuration()

            ## now add the appropriate amount of noise:
            q += self.noise*np.random.normal(0,1,(self.sys.sys.nQ))
            new_point = PointStamped()
            new_point.point.x = q[0]
            new_point.point.y = q[1]
            new_point.point.z = q[2]
            new_point.header.frame_id = "/optimization_frame"
            new_point.header.stamp = rospy.rostime.get_rostime()
            rospy.logdebug("Publishing mass location")
            self.mass_pub.publish(new_point)

            ## we can also publish the planar results:
            config = PlanarSystemConfig()
            config.header.frame_id = "/optimization_frame"
            config.header.stamp = rospy.get_rostime()
            config.xm = q[0]
            config.ym = q[1]
            config.xr = rho[0]
            config.r = rho[2]
            self.plan_pub.publish(config)

            ## now we can send out the transform
            ns = rospy.get_namespace()
            fr = "mass_location"
            if len(ns) > 1:
                fr = rospy.names.canonicalize_name(ns+fr)

            # here we are going to do a bunch of geometry math to
            # ensure that the orientation of the frame that we are
            # placing at the mass location looks "nice"

            # zvec points from robot to the string
            zvec = np.array([q[0]-ptrans.point.x, q[1]-ptrans.point.y, q[2]-ptrans.point.z])
            zvec = zvec/np.linalg.norm(zvec)
            # get transform from incoming header frame to the optimization frame
            quat = qtrans.quaternion
            qtmp = np.array([quat.x, quat.y, quat.z, quat.w])
            # convert to SO(3), and extract the y-vector for the frame
            R1 = tf.transformations.quaternion_matrix(qtmp)[:3,:3]
            yvec = -R1[:,1]
            yvec[1] = (-yvec[0]*zvec[0]-yvec[2]*zvec[2])/zvec[1]
            # x vector is a cross of y and z
            xvec = np.cross(yvec, zvec)
            # build rotation matrix and send transform
            R = np.column_stack((xvec,yvec,zvec,np.array([0,0,0])))
            R = np.row_stack((R,np.array([0,0,0,1])))
            quat = tuple(tf.transformations.quaternion_from_matrix(R).tolist())
            self.br.sendTransform((new_point.point.x, new_point.point.y, new_point.point.z),
                                  quat,
                                  new_point.header.stamp,
                                  fr,
                                  "/optimization_frame")



        return
Пример #7
0
    def yuv_callback(self, yuv_frame):
        # Use OpenCV to convert the yuv frame to RGB
        info = yuv_frame.info(
        )  # the VideoFrame.info() dictionary contains some useful information such as the video resolution
        rospy.logdebug_throttle(10, "yuv_frame.info = " + str(info))
        cv2_cvt_color_flag = {
            olympe.PDRAW_YUV_FORMAT_I420: cv2.COLOR_YUV2BGR_I420,
            olympe.PDRAW_YUV_FORMAT_NV12: cv2.COLOR_YUV2BGR_NV12,
        }[info["yuv"]["format"]]  # convert pdraw YUV flag to OpenCV YUV flag
        cv2frame = cv2.cvtColor(yuv_frame.as_ndarray(), cv2_cvt_color_flag)

        # Publish image
        msg_image = self.bridge.cv2_to_imgmsg(cv2frame, "bgr8")
        self.pub_image.publish(msg_image)

        # yuv_frame.vmeta() returns a dictionary that contains additional metadata from the drone (GPS coordinates, battery percentage, ...)
        metadata = yuv_frame.vmeta()
        rospy.logdebug_throttle(10, "yuv_frame.vmeta = " + str(metadata))

        if metadata[1] != None:
            header = Header()
            header.stamp = rospy.Time.now()
            header.frame_id = '/body'

            frame_timestamp = metadata[1][
                'frame_timestamp']  # timestamp [millisec]
            msg_time = Time()
            msg_time.data = frame_timestamp  # secs = int(frame_timestamp//1e6), nsecs = int(frame_timestamp%1e6*1e3)
            self.pub_time.publish(msg_time)

            drone_quat = metadata[1]['drone_quat']  # attitude
            msg_attitude = QuaternionStamped()
            msg_attitude.header = header
            msg_attitude.quaternion = Quaternion(drone_quat['x'],
                                                 -drone_quat['y'],
                                                 -drone_quat['z'],
                                                 drone_quat['w'])
            self.pub_attitude.publish(msg_attitude)

            location = metadata[1][
                'location']  # GPS location [500.0=not available] (decimal deg)
            msg_location = PointStamped()
            if location != {}:
                msg_location.header = header
                msg_location.header.frame_id = '/world'
                msg_location.point.x = location['latitude']
                msg_location.point.y = location['longitude']
                msg_location.point.z = location['altitude']
                self.pub_location.publish(msg_location)

            ground_distance = metadata[1]['ground_distance']  # barometer (m)
            self.pub_height.publish(ground_distance)

            speed = metadata[1]['speed']  # opticalflow speed (m/s)
            msg_speed = Vector3Stamped()
            msg_speed.header = header
            msg_speed.header.frame_id = '/world'
            msg_speed.vector.x = speed['north']
            msg_speed.vector.y = -speed['east']
            msg_speed.vector.z = -speed['down']
            self.pub_speed.publish(msg_speed)

            air_speed = metadata[1][
                'air_speed']  # air speed [-1=no data, > 0] (m/s)
            self.pub_air_speed.publish(air_speed)

            link_goodput = metadata[1][
                'link_goodput']  # throughput of the connection (b/s)
            self.pub_link_goodput.publish(link_goodput)

            link_quality = metadata[1]['link_quality']  # [0=bad, 5=good]
            self.pub_link_quality.publish(link_quality)

            wifi_rssi = metadata[1][
                'wifi_rssi']  # signal strength [-100=bad, 0=good] (dBm)
            self.pub_wifi_rssi.publish(wifi_rssi)

            battery_percentage = metadata[1][
                'battery_percentage']  # [0=empty, 100=full]
            self.pub_battery.publish(battery_percentage)

            state = metadata[1][
                'state']  # ['LANDED', 'MOTOR_RAMPING', 'TAKINGOFF', 'HOWERING', 'FLYING', 'LANDING', 'EMERGENCY']
            self.pub_state.publish(state)

            mode = metadata[1][
                'mode']  # ['MANUAL', 'RETURN_HOME', 'FLIGHT_PLAN', 'TRACKING', 'FOLLOW_ME', 'MOVE_TO']
            self.pub_mode.publish(mode)

            msg_pose = PoseStamped()
            msg_pose.header = header
            msg_pose.pose.position = msg_location.point
            msg_pose.pose.position.z = ground_distance
            msg_pose.pose.orientation = msg_attitude.quaternion
            self.pub_pose.publish(msg_pose)

            Rot = R.from_quat([
                drone_quat['x'], -drone_quat['y'], -drone_quat['z'],
                drone_quat['w']
            ])
            drone_rpy = Rot.as_euler('xyz')

            msg_odometry = Odometry()
            msg_odometry.header = header
            msg_odometry.child_frame_id = '/body'
            msg_odometry.pose.pose = msg_pose.pose
            msg_odometry.twist.twist.linear.x = math.cos(
                drone_rpy[2]) * msg_speed.vector.x + math.sin(
                    drone_rpy[2]) * msg_speed.vector.y
            msg_odometry.twist.twist.linear.y = -math.sin(
                drone_rpy[2]) * msg_speed.vector.x + math.cos(
                    drone_rpy[2]) * msg_speed.vector.y
            msg_odometry.twist.twist.linear.z = msg_speed.vector.z
            self.pub_odometry.publish(msg_odometry)

            # log battery percentage
            if battery_percentage >= 30:
                if battery_percentage % 10 == 0:
                    rospy.loginfo_throttle(
                        100, "Battery level: " + str(battery_percentage) + "%")
            else:
                if battery_percentage >= 20:
                    rospy.logwarn_throttle(
                        10, "Low battery: " + str(battery_percentage) + "%")
                else:
                    if battery_percentage >= 10:
                        rospy.logerr_throttle(
                            1, "Critical battery: " + str(battery_percentage) +
                            "%")
                    else:
                        rospy.logfatal_throttle(
                            0.1,
                            "Empty battery: " + str(battery_percentage) + "%")

            # log signal strength
            if wifi_rssi <= -60:
                if wifi_rssi >= -70:
                    rospy.loginfo_throttle(
                        100, "Signal strength: " + str(wifi_rssi) + "dBm")
                else:
                    if wifi_rssi >= -80:
                        rospy.logwarn_throttle(
                            10, "Weak signal: " + str(wifi_rssi) + "dBm")
                    else:
                        if wifi_rssi >= -90:
                            rospy.logerr_throttle(
                                1,
                                "Unreliable signal:" + str(wifi_rssi) + "dBm")
                        else:
                            rospy.logfatal_throttle(
                                0.1,
                                "Unusable signal: " + str(wifi_rssi) + "dBm")
        else:
            rospy.logwarn("Packet lost!")
    def measSensorLoopTimerCallback(self, timer_msg):

        # Get time
        time_stamp_current = rospy.Time.now()

        #
        if (self.flag_robot_pose_set == False):
            return

        # Computing the measurement

        #
        meas_posi = np.zeros((3, ), dtype=float)
        meas_atti_quat = ars_lib_helpers.Quaternion.zerosQuat()

        # Attitude
        noise_atti_ang = np.zeros((3, ), dtype=float)
        noise_atti_ang[0] = np.random.normal(loc=0.0,
                                             scale=math.sqrt(
                                                 self.cov_meas_att['x']))
        noise_atti_ang[1] = np.random.normal(loc=0.0,
                                             scale=math.sqrt(
                                                 self.cov_meas_att['y']))
        noise_atti_ang[2] = np.random.normal(loc=0.0,
                                             scale=math.sqrt(
                                                 self.cov_meas_att['z']))
        noise_atti_quat_tf = tf.transformations.quaternion_from_euler(
            noise_atti_ang[0],
            noise_atti_ang[1],
            noise_atti_ang[2],
            axes='sxyz')
        noise_atti_quat = np.roll(noise_atti_quat_tf, 1)
        meas_atti_quat = ars_lib_helpers.Quaternion.quatProd(
            self.robot_atti_quat, noise_atti_quat)

        # Covariance
        #meas_cov_atti = np.diag(self.cov_meas_att['x'], self.cov_meas_att['y'], self.cov_meas_att['z']])

        # Filling the message

        #
        meas_header_msg = Header()
        meas_robot_atti_msg = Quaternion()
        meas_robot_atti_stamp_msg = QuaternionStamped()

        #
        meas_header_msg.frame_id = self.robot_frame_id
        meas_header_msg.stamp = self.robot_pose_timestamp

        # Attitude
        meas_robot_atti_msg.w = meas_atti_quat[0]
        meas_robot_atti_msg.x = meas_atti_quat[1]
        meas_robot_atti_msg.y = meas_atti_quat[2]
        meas_robot_atti_msg.z = meas_atti_quat[3]

        #
        meas_robot_atti_stamp_msg.header = meas_header_msg
        meas_robot_atti_stamp_msg.quaternion = meas_robot_atti_msg

        #
        self.meas_robot_atti_pub.publish(meas_robot_atti_stamp_msg)

        #
        return
def odom_callback(data):
    q = QuaternionStamped()
    q.header = data.header
    q.quaternion = data.pose.pose.orientation
    attitude_pub.publish(q)