def execute(self):
     while not rospy.is_shutdown():
         with self.tf_lock:
             if len(self.tf_buffer) > 0:
                 self.tf_buffer.sort(key = lambda msg: msg.header.stamp.to_sec()) # It seems that transformations in tfMessage needs to be sorted by timestamp
                 self.broadcaster.publish(tfMessage(self.tf_buffer))
                 self.tf_buffer = []
         self.r.sleep()
Beispiel #2
0
def to_tf(estimate):
    time = rospy.Time.now()
    msg = tfMessage()
    for camera in estimate.cameras:
        msg.transforms.append(pose_to_transform(camera.pose, camera.camera_id, time))
    for target, target_id in zip(estimate.targets, range(len(estimate.targets))):
        msg.transforms.append(pose_to_transform(target, 'target_%d'%target_id, time))
    return msg
Beispiel #3
0
def callback(msg, filt):
    filt = tfMessage()
    for transform in msg.transforms:
        if 'map' in transform.header.frame_id:
            filt.transforms.append(transform)
        if 'odom' in transform.header.frame_id:
            filt.transforms.append(transform)
    if len(filt.transforms) > 0:
        pub.publish(filt)
def read_nav_csv(fname, origin):
    ''' Reads the nav.csv , converts into a local coordinate frame and returns tf msgs '''
    msgs = []
    DEG2RAD = m.pi/180.0  
    with open(fname, 'r') as f:
        # example data 
        # 1354679075.295000000,6248548.85357,332949.839026,-41.4911136152,-0.00740605127066,-0.0505019649863,1.95254564285
        # a Nx7 matrix containing pose [time N E D R P Y] UTM cartesian coordinate system  
        for i, line in enumerate(f):
            words = line.split(',')
            time = words[0]
            [secs, nsecs]  = map(int, time.split('.'))
            x = float(words[1])
            y = float(words[2])
            z = float(words[3])
            R = float(words[4])
            P = float(words[5])
            Y = float(words[6])
            # convert to local coordinate frame, located at the origin 
            x = x - origin[0]
            y = y - origin[1]
            z = z - origin[2]
            # create TF msg 
            tf_msg = tfMessage()
            geo_msg = TransformStamped()
            geo_msg.header.stamp = Time(secs,nsecs) 
            geo_msg.header.seq = i
            geo_msg.header.frame_id = "map"
            geo_msg.child_frame_id = "body"
            geo_msg.transform.translation.x = x
            geo_msg.transform.translation.y = y
            geo_msg.transform.translation.z = z
            angles = tf.transformations.quaternion_from_euler(R,P,Y) 
            geo_msg.transform.rotation.x = angles[0]
            geo_msg.transform.rotation.y = angles[1]
            geo_msg.transform.rotation.z = angles[2]
            geo_msg.transform.rotation.w = angles[3]
            # rviz frame 
            geo_msg2 = TransformStamped()
            geo_msg2.header.stamp = Time(secs,nsecs) 
            geo_msg2.header.seq = i
            geo_msg2.header.frame_id = "map_rviz"
            geo_msg2.child_frame_id = "map"
            geo_msg2.transform.translation.x = 0
            geo_msg2.transform.translation.y = 0
            geo_msg2.transform.translation.z = 0
            angles = tf.transformations.quaternion_from_euler(DEG2RAD*180, 0, 0) # ax, ay, az 
            geo_msg2.transform.rotation.x = angles[0]
            geo_msg2.transform.rotation.y = angles[1]
            geo_msg2.transform.rotation.z = angles[2]
            geo_msg2.transform.rotation.w = angles[3]
            # push all transformations 
            tf_msg.transforms.append(geo_msg)
            tf_msg.transforms.append(geo_msg2)
            msgs.append(tf_msg)

    return msgs
 def execute(self):
     while not rospy.is_shutdown():
         with self.tf_lock:
             if len(self.tf_buffer) > 0:
                 self.tf_buffer.sort(
                     key=lambda msg: msg.header.stamp.to_sec()
                 )  # It seems that transformations in tfMessage needs to be sorted by timestamp
                 self.broadcaster.publish(tfMessage(self.tf_buffer))
                 self.tf_buffer = []
         self.r.sleep()
def ROSPublishTopics(params):
    global messagePages
    global messageSendingPage
    global pageMutex
    global publisherMap
    global oldPI
    startT = timer()
    tfMessages = tfMessage()
    seqTf = 0
    for message in params:
        topic = message['topic']
        params = message['params']
        if ((topic == '/tf') or (topic == 'tf')):
            msg = geometry_msgs.msg.TransformStamped()
            msg.header.seq = seqTf
            msg.header.stamp = rospy.Time.now()
            msg.header.frame_id = params['frame_id']
            msg.child_frame_id = params['child_frame_id']
            msg.transform.translation.x = float(params['x'])
            msg.transform.translation.y = float(params['y'])
            msg.transform.translation.z = float(params['z'])
            msg.transform.rotation.x = float(params['qx'])
            msg.transform.rotation.y = float(params['qy'])
            msg.transform.rotation.z = float(params['qz'])
            msg.transform.rotation.w = float(params['qw'])
            seqTf = seqTf + 1
            tfMessages.transforms.append(msg)
        elif topic in publisherMap:
            publisherEntry = publisherMap[topic]
            publisher = publisherEntry[0]
            reader = publisherEntry[1]
            publisher.publish(reader(params))
        elif (topic == 'huh'):
            print "Placeholder topic"
        else:
            print "Unrecognized topic " + topic
    TFPublisher.publish(tfMessages)

    pageMutex.acquire()
    messagePages[messageSendingPage[0]].clear()
    messageSendingPage[0] = messageSendingPage[0] ^ 1
    pageMutex.release()

    print "PI: " + str(
        messageSendingPage[0]) + " " + str(oldPI ^ messageSendingPage[0])
    if 0 == oldPI ^ messageSendingPage[0]:
        print "!!!!!!! FLIP !!!!!!!"
    oldPI = messageSendingPage[0]
    if messagePages[messageSendingPage[0]] != {}:
        print str({"messages": messagePages[messageSendingPage[0]]})

    endT = timer()
    print "T: " + str(endT - startT)
    return {"messages": messagePages[messageSendingPage[0]]}
Beispiel #7
0
    def __init__(self):
        #POSE RECORDE

        self.pre_st_x = 0
        self.pre_st_y = 0
        self.pre_st_w = 0

        self._pre_pose = Pose()
        self._pre_pose.position.x = 0
        self._pre_pose.position.y = 0
        self._pre_pose.position.z = 0
        self._pre_pose.orientation.x = 0
        self._pre_pose.orientation.y = 0
        self._pre_pose.orientation.z = 0
        self._pre_pose.orientation.w = 0

        # Initialise fields
        self.estimatedpose = PoseWithCovarianceStamped()
        self.occupancy_map = OccupancyGrid()
        self.particlecloud = PoseArray()
        self.tf_message = tfMessage()

        self._update_lock = Lock()

        # Parameters
        self.ODOM_ROTATION_NOISE = 0  # Odometry model rotation noise
        self.ODOM_TRANSLATION_NOISE = 0  # Odometry x axis (forward) noise
        self.ODOM_DRIFT_NOISE = 0  # Odometry y axis (side-side) noise
        self.NUMBER_PREDICTED_READINGS = 20

        # Set 'previous' translation to origin
        # All Transforms are given relative to 0,0,0, not in absolute coords.
        self.prev_odom_x = 0.0  # Previous odometry translation from origin
        self.prev_odom_y = 0.0  # Previous odometry translation from origin
        self.prev_odom_heading = 0.0  # Previous heading from odometry data
        self.last_odom_pose = None

        # Request robot's initial odometry values to be recorded in prev_odom
        self.odom_initialised = False
        self.sensor_model_initialised = False

        # Set default initial pose to initial position and orientation.
        self.estimatedpose.pose.pose.position.x = self.INIT_X
        self.estimatedpose.pose.pose.position.y = self.INIT_Y
        self.estimatedpose.pose.pose.position.z = self.INIT_Z
        self.estimatedpose.pose.pose.orientation = rotateQuaternion(
            Quaternion(w=1.0), self.INIT_HEADING)
        # NOTE: Currently not making use of covariance matrix

        self.estimatedpose.header.frame_id = "/map"
        self.particlecloud.header.frame_id = "/map"

        # Sensor model
        self.sensor_model = sensor_model.SensorModel()
Beispiel #8
0
    def __init__(self):
        #POSE RECORDE

        self.pre_st_x=0
        self.pre_st_y=0
        self.pre_st_w=0
        
        self._pre_pose = Pose()
        self._pre_pose.position.x = 0
        self._pre_pose.position.y = 0
        self._pre_pose.position.z = 0
        self._pre_pose.orientation.x = 0
        self._pre_pose.orientation.y = 0
        self._pre_pose.orientation.z = 0
        self._pre_pose.orientation.w = 0

        # Initialise fields
        self.estimatedpose =  PoseWithCovarianceStamped()
        self.occupancy_map = OccupancyGrid()
        self.particlecloud =  PoseArray()
        self.tf_message = tfMessage()
        
        self._update_lock =  Lock()
        
        # Parameters
        self.ODOM_ROTATION_NOISE = 0 		# Odometry model rotation noise
        self.ODOM_TRANSLATION_NOISE = 0 	# Odometry x axis (forward) noise
        self.ODOM_DRIFT_NOISE = 0 			# Odometry y axis (side-side) noise
        self.NUMBER_PREDICTED_READINGS = 20
    
        # Set 'previous' translation to origin
        # All Transforms are given relative to 0,0,0, not in absolute coords.
        self.prev_odom_x  = 0.0 # Previous odometry translation from origin
        self.prev_odom_y = 0.0  # Previous odometry translation from origin
        self.prev_odom_heading = 0.0 # Previous heading from odometry data
        self.last_odom_pose = None
        
        # Request robot's initial odometry values to be recorded in prev_odom
        self.odom_initialised = False
        self.sensor_model_initialised = False

        # Set default initial pose to initial position and orientation.
        self.estimatedpose.pose.pose.position.x = self.INIT_X
        self.estimatedpose.pose.pose.position.y = self.INIT_Y
        self.estimatedpose.pose.pose.position.z = self.INIT_Z
        self.estimatedpose.pose.pose.orientation = rotateQuaternion(Quaternion(w=1.0),
                                                                    self.INIT_HEADING)
        # NOTE: Currently not making use of covariance matrix
        
        self.estimatedpose.header.frame_id = "/map"
        self.particlecloud.header.frame_id = "/map"
        
        # Sensor model
        self.sensor_model =  sensor_model.SensorModel()
Beispiel #9
0
def to_tf(estimate):
    time = rospy.Time.now()
    msg = tfMessage()
    for camera in estimate.cameras:
        msg.transforms.append(
            pose_to_transform(camera.pose, camera.camera_id, time))
    for target, target_id in zip(estimate.targets,
                                 range(len(estimate.targets))):
        msg.transforms.append(
            pose_to_transform(target, 'target_%d' % target_id, time))
    return msg
 def run(self):
   tf_publisher = rospy.Publisher('tf', tf_msgs.tfMessage)
   status_publisher = rospy.Publisher('status', std_msgs.String)
   rospy.Subscriber('tf', tf_msgs.tfMessage, self.callback)
   while not rospy.is_shutdown():
     tf_publisher.publish(tf_msgs.tfMessage())
     if (rospy.Time.now() - self.time) > rospy.Duration(5):
       status_publisher.publish(std_msgs.String('%.2f Hz' % (self.count.next() / 5.0)))
       with self.lock:
         self.count = itertools.count()
         self.time = rospy.Time.now()
Beispiel #11
0
 def run(self):
     tf_publisher = rospy.Publisher('tf', tf_msgs.tfMessage)
     status_publisher = rospy.Publisher('status', std_msgs.String)
     rospy.Subscriber('tf', tf_msgs.tfMessage, self.callback)
     while not rospy.is_shutdown():
         tf_publisher.publish(tf_msgs.tfMessage())
         if (rospy.Time.now() - self.time) > rospy.Duration(5):
             status_publisher.publish(
                 std_msgs.String('%.2f Hz' % (next(self.count) / 5.0)))
             with self.lock:
                 self.count = itertools.count()
                 self.time = rospy.Time.now()
Beispiel #12
0
def CreateBag(matfile, bagname, frame_id, navsat_topic="/fix"):
    '''Creates the actual bag file by successively adding images'''
    bag = rosbag.Bag(bagname, 'w', compression=rosbag.Compression.BZ2, chunk_threshold=32 * 1024 * 1024)
    data = loadmat(matfile)['rnv']
    rnv = {}
    for i, col in enumerate(data.dtype.names):
        rnv[col] = data.item()[i]
    try:
        ref = None
        x, y = [], []
        for i, t in enumerate(tqdm(rnv['t'])):
            # print("Adding %s" % image_name)

            stamp = rospy.Time.from_sec(t)
            nav_msg = NavSatFix()
            nav_msg.header.stamp = stamp
            nav_msg.header.frame_id = "map"
            nav_msg.header.seq = i
            nav_msg.latitude = rnv['lat'][i][0]
            nav_msg.longitude = rnv['lon'][i][0]
            nav_msg.altitude = -rnv['depth'][i][0]
            nav_msg.position_covariance_type = nav_msg.COVARIANCE_TYPE_UNKNOWN

            transform_msg = TransformStamped()
            transform_msg.header = copy(nav_msg.header)
            utm = fromLatLong(nav_msg.latitude, nav_msg.longitude, nav_msg.altitude)
            if ref is None:
                ref = utm.toPoint()
            x.append(utm.toPoint().x - ref.x)
            y.append(utm.toPoint().y - ref.y)
            dx = x[-1] - sum(x[-min(20, len(x)):]) / min(20, len(x))
            dy = y[-1] - sum(y[-min(20, len(y)):]) / min(20, len(y))
            transform_msg.transform.translation.x = x[-1]
            transform_msg.transform.translation.y = y[-1]
            transform_msg.transform.translation.z = nav_msg.altitude
            transform_msg.transform.rotation = Quaternion(*tf_conversions.transformations.quaternion_from_euler(0, 0,
                                                                                                    math.atan2(dy, dx)))
            transform_msg.child_frame_id = frame_id
            tf_msg = tfMessage(transforms=[transform_msg])

            odometry_msg = Odometry()
            odometry_msg.header = copy(transform_msg.header)
            odometry_msg.child_frame_id = frame_id
            odometry_msg.pose.pose.position = transform_msg.transform.translation
            odometry_msg.pose.pose.orientation = transform_msg.transform.rotation

            bag.write(navsat_topic, nav_msg, stamp)
            bag.write("/tf", tf_msg, stamp)
            bag.write("/transform", transform_msg, stamp)
            bag.write("/odom", odometry_msg, stamp)
    finally:
        bag.close()
Beispiel #13
0
    def recalculate_transform(self, currentTime):
        """
        Creates updated transform from /odom to /map given recent odometry and
        laser data.
        
        :Args:
            | currentTime (rospy.Time()): Time stamp for this update
         """

        transform = Transform()

        T_est = transformations.quaternion_matrix([
            self.estimatedpose.pose.pose.orientation.x,
            self.estimatedpose.pose.pose.orientation.y,
            self.estimatedpose.pose.pose.orientation.z,
            self.estimatedpose.pose.pose.orientation.w
        ])
        T_est[0, 3] = self.estimatedpose.pose.pose.position.x
        T_est[1, 3] = self.estimatedpose.pose.pose.position.y
        T_est[2, 3] = self.estimatedpose.pose.pose.position.z

        T_odom = transformations.quaternion_matrix([
            self.last_odom_pose.pose.pose.orientation.x,
            self.last_odom_pose.pose.pose.orientation.y,
            self.last_odom_pose.pose.pose.orientation.z,
            self.last_odom_pose.pose.pose.orientation.w
        ])
        T_odom[0, 3] = self.last_odom_pose.pose.pose.position.x
        T_odom[1, 3] = self.last_odom_pose.pose.pose.position.y
        T_odom[2, 3] = self.last_odom_pose.pose.pose.position.z
        T = np.dot(T_est, np.linalg.inv(T_odom))
        q = transformations.quaternion_from_matrix(T)  #[:3, :3])

        transform.translation.x = T[0, 3]
        transform.translation.y = T[1, 3]
        transform.translation.z = T[2, 3]
        transform.rotation.x = q[0]
        transform.rotation.y = q[1]
        transform.rotation.z = q[2]
        transform.rotation.w = q[3]

        # ----- Insert new Transform into a TransformStamped object and add to the
        # ----- tf tree
        new_tfstamped = TransformStamped()
        new_tfstamped.child_frame_id = "/odom"
        new_tfstamped.header.frame_id = "/map"
        new_tfstamped.header.stamp = currentTime
        new_tfstamped.transform = transform

        # ----- Add the transform to the list of all transforms
        self.tf_message = tfMessage(transforms=[new_tfstamped])
def tfm(matrix, parent, child, stamp):
    rotation = quaternion_from_matrix(matrix)
    translation = translation_from_matrix(matrix)
    t = TransformStamped()
    t.header.frame_id = parent
    t.header.stamp = stamp
    t.child_frame_id = child
    t.transform.translation.x = translation[0]
    t.transform.translation.y = translation[1]
    t.transform.translation.z = translation[2]
    t.transform.rotation.x = rotation[0]
    t.transform.rotation.y = rotation[1]
    t.transform.rotation.z = rotation[2]
    t.transform.rotation.w = rotation[3]
    return tfMessage([t])
Beispiel #15
0
def tfm(matrix, parent, child, stamp):
    rotation = quaternion_from_matrix(matrix)
    translation = translation_from_matrix(matrix)
    t = TransformStamped()
    t.header.frame_id = parent
    t.header.stamp = stamp
    t.child_frame_id = child
    t.transform.translation.x = translation[0]
    t.transform.translation.y = translation[1]
    t.transform.translation.z = translation[2]
    t.transform.rotation.x = rotation[0]
    t.transform.rotation.y = rotation[1]
    t.transform.rotation.z = rotation[2]
    t.transform.rotation.w = rotation[3]
    return tfMessage([t])
Beispiel #16
0
    def __init__(self):
        # Perhaps publish the object on a separate topic?
        # define object handle
        self.item_list = [
            "object_1", "object_2", "object_3", "object_4", "object_5",
            "object_6", "object_7", "object_8", "object_9", "object_10",
            "object_11", "object_12", "object_13", "object_14", "object_15",
            "object_16", "object_17", "object_18"
        ]
        # create a dict to lookup object class
        self.item_dict = {
            "apple": [
                "object_1",
                "object_2",
                "object_3",
                "object_4",
                "object_5",
                "object_6",
                "object_7",
                "object_8",
                "object_9",
                "object_10",
                "object_11",
                "object_12",
                "object_13",
                "object_14",
                "object_15",
            ],
            "mouse": [
                "object_16", "object_17", "object_18", "object_19",
                "object_20", "object_21", "object_22", "object_23",
                "object_24", "object_25", "object_26", "object_27",
                "object_28", "object_29", "object_30"
            ]
        }
        self.in_map_log = []  # ["mouse", "apple"]

        self.tfMessage = tfMessage()
        self.tf_listener = tf.TransformListener()
        rospy.loginfo('listener made')
        tf_subscriber = rospy.Subscriber("/tf",
                                         tfMessage,
                                         self._tf_callback,
                                         queue_size=1)

        self.tf_publisher = rospy.Publisher("/tf", tfMessage, queue_size=1)
        self.or_pub = rospy.Publisher('/or', PoseStamped, queue_size=3)
        self.or_map_pub = rospy.Publisher('/or_map', PoseStamped, queue_size=3)
Beispiel #17
0
    def test_face_tf(self):
        pub, msg_class = rostopic.create_publisher('/camera/face_locations',
                                                   'pi_face_tracker/Faces',
                                                   True)
        msg = msg_class()
        msg_str = """faces:
- id: 1
  point:
    x: 0.76837966452
    y: -0.132697071241
    z: 0.0561996095957
  attention: 0.990000009537
- id: 2
  point:
    x: 0.807249662369
    y: -0.00428759906469
    z: 0.035407830253
  attention: 0.990000009537"""
        fill_message_args(msg, [yaml.load(msg_str)])
        pub.publish(msg)
        msg = wait_for_message('/tf', tfMessage, 5)
        expect_str = """
transforms:
  -
    header:
      seq: 0
      stamp:
        nsecs: 752310037
      frame_id: camera
    child_frame_id: face_base1
    transform:
      translation:
        x: 0.76837966452
        y: -0.132697071241
        z: 0.0561996095957
      rotation:
        x: 0.0
        y: 0.0
        z: 0.0
        w: 1.0
"""
        expect = tfMessage()
        fill_message_args(expect, [yaml.load(expect_str)])
        #self.assertEqual(msg.transforms[0].child_frame_id,
        #    expect.transforms[0].child_frame_id)
        self.assertEqual(msg.transforms[0].transform,
                         expect.transforms[0].transform)
        pub.unregister()
Beispiel #18
0
    def recalculate_transform(self, currentTime):
        """
        Creates updated transform from /odom to /map given recent odometry and
        laser data.
        
        :Args:
            | currentTime (rospy.Time()): Time stamp for this update
         """
        
        transform = Transform()

        T_est = transformations.quaternion_matrix([self.estimatedpose.pose.pose.orientation.x,
                                                   self.estimatedpose.pose.pose.orientation.y,
                                                   self.estimatedpose.pose.pose.orientation.z,
                                                   self.estimatedpose.pose.pose.orientation.w])
        T_est[0, 3] = self.estimatedpose.pose.pose.position.x
        T_est[1, 3] = self.estimatedpose.pose.pose.position.y
        T_est[2, 3] = self.estimatedpose.pose.pose.position.z
        
        T_odom = transformations.quaternion_matrix([self.last_odom_pose.pose.pose.orientation.x,
                                                   self.last_odom_pose.pose.pose.orientation.y,
                                                   self.last_odom_pose.pose.pose.orientation.z,
                                                   self.last_odom_pose.pose.pose.orientation.w])
        T_odom[0, 3] = self.last_odom_pose.pose.pose.position.x
        T_odom[1, 3] = self.last_odom_pose.pose.pose.position.y
        T_odom[2, 3] = self.last_odom_pose.pose.pose.position.z
        T = np.dot(T_est, np.linalg.inv(T_odom))
        q = transformations.quaternion_from_matrix(T) #[:3, :3])

        transform.translation.x = T[0, 3] 
        transform.translation.y = T[1, 3] 
        transform.translation.z = T[2, 3] 
        transform.rotation.x = q[0]
        transform.rotation.y = q[1]
        transform.rotation.z = q[2]
        transform.rotation.w = q[3]
        

        # Insert new Transform into a TransformStamped object and add to the
        # tf tree
        new_tfstamped = TransformStamped()
        new_tfstamped.child_frame_id = "/odom"
        new_tfstamped.header.frame_id = "/map"
        new_tfstamped.header.stamp = currentTime
        new_tfstamped.transform = transform

        # Add the transform to the list of all transforms
        self.tf_message = tfMessage(transforms=[new_tfstamped])
Beispiel #19
0
    def test_face_tf(self):
        pub, msg_class = rostopic.create_publisher(
            '/camera/face_locations', 'pi_face_tracker/Faces', True)
        msg = msg_class()
        msg_str = """faces:
- id: 1
  point:
    x: 0.76837966452
    y: -0.132697071241
    z: 0.0561996095957
  attention: 0.990000009537
- id: 2
  point:
    x: 0.807249662369
    y: -0.00428759906469
    z: 0.035407830253
  attention: 0.990000009537"""
        fill_message_args(msg, [yaml.load(msg_str)])
        pub.publish(msg)
        msg = wait_for_message('/tf', tfMessage, 5)
        expect_str = """
transforms:
  -
    header:
      seq: 0
      stamp:
        nsecs: 752310037
      frame_id: camera
    child_frame_id: face_base1
    transform:
      translation:
        x: 0.76837966452
        y: -0.132697071241
        z: 0.0561996095957
      rotation:
        x: 0.0
        y: 0.0
        z: 0.0
        w: 1.0
"""
        expect = tfMessage()
        fill_message_args(expect, [yaml.load(expect_str)])
        #self.assertEqual(msg.transforms[0].child_frame_id,
        #    expect.transforms[0].child_frame_id)
        self.assertEqual(msg.transforms[0].transform,
            expect.transforms[0].transform)
        pub.unregister()
 def publish_filtered_tf(self, header):
     yaw = np.median(self.buffer)
     q = tfs.quaternion_from_euler(0, 0, yaw - math.pi)
     ts = TransformStamped()
     ts.header = header
     ts.header.frame_id = self.source
     ts.child_frame_id = self.goal
     ts.transform.translation.x = self.position[0]
     ts.transform.translation.y = self.position[1]
     ts.transform.translation.z = 0
     ts.transform.rotation.x = q[0]
     ts.transform.rotation.y = q[1]
     ts.transform.rotation.z = q[2]
     ts.transform.rotation.w = q[3]
     msg = tfMessage()
     msg.transforms.append(ts)
     self.tf_pub.publish(msg)
Beispiel #21
0
 def object_to_map(self, pose, object_type, time):
     obj_in_map = self.tf_listener.transformPose("/map", pose)
     obj_in_map.header.frame_id = object_type + "_in_map"
     transform = Transform()
     transform.translation = obj_in_map.pose.position
     transform.rotation = obj_in_map.pose.orientation
     # Insert new Transform into a TransformStamped object and add to the tf tree
     tf_to_map = TransformStamped()
     tf_to_map.child_frame_id = object_type + "_in_map"
     tf_to_map.header.frame_id = "/map"
     tf_to_map.transform = transform
     tf_to_map.header.stamp = time
     #rospy.loginfo(new_tfstamped)
     # add to tf list
     self.or_map_pub.publish(obj_in_map)
     self.tf_message = tfMessage(transforms=[tf_to_map])
     self.tf_publisher.publish(self.tf_message)
 def publish_filtered_tf(self, header):
     m = np.median(self.buffer, axis=0)
     if not self.filter_position:
         m[0:3] = self.last_transform[0:3]
     q = quaternion_from_euler(m[3], m[4], m[5])
     ts = TransformStamped()
     ts.header = header
     ts.child_frame_id = self.frame_id + '_filtered'
     ts.transform.translation.x = m[0]
     ts.transform.translation.y = m[1]
     ts.transform.translation.z = m[2]
     ts.transform.rotation.x = q[0]
     ts.transform.rotation.y = q[1]
     ts.transform.rotation.z = q[2]
     ts.transform.rotation.w = q[3]
     msg = tfMessage()
     msg.transforms.append(ts)
     self.tf_pub.publish(msg)
Beispiel #23
0
def pose2tf(pose, timestamp, frame_id, child_frame_id):
    tf_msg = tfMessage()
    geo_msg = TransformStamped()
    geo_msg.header.stamp = timestamp
    geo_msg.header.frame_id = frame_id
    geo_msg.child_frame_id = child_frame_id
    geo_msg.transform.translation.x = pose[0]
    geo_msg.transform.translation.y = pose[1]
    geo_msg.transform.translation.z = pose[2]

    angles = tf.transformations.quaternion_from_euler(pose[3], pose[4],
                                                      pose[5])
    geo_msg.transform.rotation.x = angles[0]
    geo_msg.transform.rotation.y = angles[1]
    geo_msg.transform.rotation.z = angles[2]
    geo_msg.transform.rotation.w = angles[3]
    tf_msg.transforms.append(geo_msg)

    return tf_msg
def publishTransform(transform, timestamp, frame_id, output_bag):
    scale, shear, angles, transl, persp = tf.transformations.decompose_matrix(
        transform)
    rotation = tf.transformations.quaternion_from_euler(*angles)

    trans = TransformStamped()
    trans.header.stamp = timestamp
    trans.header.frame_id = 'world'
    trans.child_frame_id = frame_id
    trans.transform.translation.x = transl[0]
    trans.transform.translation.y = transl[1]
    trans.transform.translation.z = transl[2]
    trans.transform.rotation.x = rotation[0]
    trans.transform.rotation.y = rotation[1]
    trans.transform.rotation.z = rotation[2]
    trans.transform.rotation.w = rotation[3]

    msg = tfMessage()
    msg.transforms.append(trans)
    output_bag.write('/tf', msg, timestamp)
Beispiel #25
0
def write_transform(view_pose, timestamp, frame_id, output_bag, publishers,
                    publish):
    scale, shear, angles, transl, persp = tf.transformations.decompose_matrix(
        camera_to_world_with_pose(view_pose))
    rotation = tf.transformations.quaternion_from_euler(*angles)

    trans = TransformStamped()
    trans.header.stamp = timestamp
    trans.header.frame_id = 'world'
    trans.child_frame_id = frame_id
    trans.transform.translation.x = transl[0]
    trans.transform.translation.y = transl[1]
    trans.transform.translation.z = transl[2]
    trans.transform.rotation.x = rotation[0]
    trans.transform.rotation.y = rotation[1]
    trans.transform.rotation.z = rotation[2]
    trans.transform.rotation.w = rotation[3]

    msg = tfMessage()
    msg.transforms.append(trans)

    write_msg("/tf", msg, output_bag, publishers, publish, timestamp)
def writeTransform(view, timestamp, frame_id, output_bag):
    ground_truth_pose = interpolate_poses(view.shutter_open,
                                          view.shutter_close, 0.5)
    scale, shear, angles, transl, persp = tf.transformations.decompose_matrix(
        camera_to_world_with_pose(ground_truth_pose))
    rotation = tf.transformations.quaternion_from_euler(*angles)

    trans = TransformStamped()
    trans.header.stamp = timestamp
    trans.header.frame_id = 'world'
    trans.child_frame_id = frame_id
    trans.transform.translation.x = transl[0]
    trans.transform.translation.y = transl[1]
    trans.transform.translation.z = transl[2]
    trans.transform.rotation.x = rotation[0]
    trans.transform.rotation.y = rotation[1]
    trans.transform.rotation.z = rotation[2]
    trans.transform.rotation.w = rotation[3]

    msg = tfMessage()
    msg.transforms.append(trans)

    output_bag.write('/tf', msg, timestamp)
Beispiel #27
0
    def tf_callbak(self, data=tfMessage()):
        obstacle_msg = PolygonStamped()
        obstacle_msg.header.stamp = rospy.Time.now()
        obstacle_msg.header.frame_id = "map"  # CHANGE HERE: odom/map
        for i in data.transforms:
            if i.child_frame_id == "camera_link":
                tempPose = PoseStamped()
                tempPose.pose.position.x = -1*i.transform.translation.x #axis reverse
                tempPose.pose.position.y = i.transform.translation.y
                tempPose.pose.position.z = i.transform.translation.z
                x = i.transform.rotation.x
                y = i.transform.rotation.y
                z = i.transform.rotation.z
                w = i.transform.rotation.w
                quaternion = (x,y,z,w)
                self.initialAngle = tf.transformations.euler_from_quaternion(quaternion)[2] #euler to radians
                self.camera_link = tempPose
                try:
                    self.myCord = self.vectortoCord(self.camera_link, self.org.position.x, self.org.position.y, 0.05)
                except AttributeError:
                    print "catch"
                obstacle_msg.polygon.points = []
		#painting the polygon
                length = 50 #cm vehicle dim
                width = 35 #cm vehicle dim
		#the origin point is at the head of the car (y axis), and in the midlle (x axis)
                for i in range (-length, 0): 
                    for j in range (-width/2, width/2):
                        tempPointOrien = Point32()
                        degree = self.initialAngle
			#orientation with multiplication by transform matrix
                        tempPointOrien.x = (i * 0.01 * cos(degree) - j * 0.01 * sin(degree)) - tempPose.pose.position.x
                        tempPointOrien.y = (i * 0.01 * sin(degree) + j * 0.01 * cos(degree)) + tempPose.pose.position.y
                        tempPointOrien.z = 0
                        obstacle_msg.polygon.points.append(tempPointOrien)
                pub2 = rospy.Publisher('/geometry_msgs/Polygon', PolygonStamped, queue_size=10)
                pub2.publish(obstacle_msg) #publish msg
def main():

    parser = argparse.ArgumentParser(description="Parse the images.txt file to "
                                                 "create a rosbag with several "
                                                 "PoseStamped messages.")

    parser.add_argument('-i',  '--input',
                        help='file with poses (images.txt)',
                        metavar='poses_filename',
                        default='./images.txt',
                        type=str,
                        required=True
                        )
    parser.add_argument('-o', '--output',
                        help='output bag file (with location)',
                        metavar='bag_filename',
                        type=str,
                        required=True
                        )
    parser.add_argument('-y',
                        help='if images_adjusted.txt is found in the same folder'
                             'as the supplied filename, then use it without'
                             'asking',
                        metavar='True/False',
                        type=bool,
                        default=False,
                        choices=[True, False]
                        )

    arguments = parser.parse_args()

    images_folder, _ = os.path.split(arguments.input)
    alt_file = os.path.join(images_folder, 'images_adjusted.txt')

    if os.path.exists(alt_file):
        if arguments.y:
            images_file = open(alt_file)
        else:
            reply = None
            while reply not in ['y', 'n']:
                print("The images_adjusted.txt file is in the folder {}, do you"
                      " want to use that instead? [y/n]".format(images_folder))
                reply = raw_input()
            if reply == 'y':
                images_file = open(alt_file)
            else:
                images_file = open(arguments.input)
    else:
        images_file = open(arguments.input)

    print("Processing data from {}...".format(images_file.name))
    with rosbag.Bag(arguments.output, 'w') as outputbag:
        for lineno, line in enumerate(images_file):
            #lines must be of the form:
            #image_basename,t_android,t_movidius,r11,r12,r12,t1,r21,r22,r23,t2,r31,r32,r33,t3
            tokens = line.strip('\n').split(',')

            if (len(tokens)) != 15:
                print("Line {0} appears to be wrong: \"{1}\" ".format(lineno, tokens))
                continue

            #first elements before transformation
            image_name = tokens[0]
            ts1 = float(tokens[1])
            ts2 = float(tokens[2])

            #getting the quaterion out the rotation matrix
            rotation_matrix = np.array(map(float, tokens[3:])).reshape(3, 4)
            rotation_matrix = np.vstack((rotation_matrix, [0, 0, 0, 1]))
            quaternion = transformations.quaternion_from_matrix(rotation_matrix)

            #creating the pose
            p = PoseStamped()
            p.header.frame_id = "/tango/world"
            p.header.stamp = rospy.Time.from_seconds(ts1)
            p.header.seq = lineno
            p.pose.position.x = rotation_matrix[0, 3]
            p.pose.position.y = rotation_matrix[1, 3]
            p.pose.position.z = rotation_matrix[2, 3]
            p.pose.orientation.x = quaternion[0]
            p.pose.orientation.y = quaternion[1]
            p.pose.orientation.z = quaternion[2]
            p.pose.orientation.w = quaternion[3]
            outputbag.write("tango_imu_pose", p, rospy.Time.from_seconds(ts1))

            #creating the odometry entry
            #TODO get covariances from the covariances.txt file
            o = Odometry()
            o.header = p.header
            o.child_frame_id = o.header.frame_id
            o.pose.pose = p.pose
            outputbag.write("tango_imu_odom", o, rospy.Time.from_seconds(ts1))

            #creating the tf messages
            tfmsg = tfMessage()
            tf_stamped = TransformStamped()
            tf_stamped.header.frame_id = "/tango/world"
            tf_stamped.header.seq = lineno
            tf_stamped.header.stamp = rospy.Time.from_seconds(ts1)
            tf_stamped.child_frame_id = "/tango/imu"
            tf_stamped.transform.translation.x = rotation_matrix[0, 3]
            tf_stamped.transform.translation.y = rotation_matrix[1, 3]
            tf_stamped.transform.translation.z = rotation_matrix[2, 3]
            tf_stamped.transform.rotation = p.pose.orientation
            tfmsg.transforms.append(tf_stamped)
            outputbag.write("tf", tfmsg, rospy.Time.from_seconds(ts1))

    print("Bag creation complete, bagfile: {}".format(arguments.output))
Beispiel #29
0
    def __call__(self, channel, data):
        lcm_msg = agile.state_t.decode(data)

        # List of messages and topics to publish
        topics_to_publish = []
        msgs_to_publish = []

        # Populate pose message
        topics_to_publish.append(self.rostopic)
        ros_msg = PoseStamped()
        ros_msg.header = self.make_header(lcm_msg.utime)
        ros_msg.pose.position.x = lcm_msg.position[0]
        ros_msg.pose.position.y = lcm_msg.position[1]
        ros_msg.pose.position.z = lcm_msg.position[2]
        ros_msg.pose.orientation.w = lcm_msg.orient[0]
        ros_msg.pose.orientation.x = lcm_msg.orient[1]
        ros_msg.pose.orientation.y = lcm_msg.orient[2]
        ros_msg.pose.orientation.z = lcm_msg.orient[3]
        msgs_to_publish.append(ros_msg)

        # Populate TF tree message
        tf_packet = tfMessage()

        # Mocap NED <> Mocap ENU
        # topics_to_publish.append("/tf_static")
        # tf_ned_2_enu_msg = TransformStamped()
        # tf_ned_2_enu_msg.header = self.make_header(lcm_msg.utime)
        # tf_ned_2_enu_msg.header.frame_id = "mocap_ENU"
        # tf_ned_2_enu_msg.child_frame_id = "mocap_NED"
        # tf_ned_2_enu_msg.transform.translation = ros_msg.pose.position
        # tf_ned_2_enu_msg.transform.rotation = ros_msg.pose.orientation
        # msgs_to_publish.append(tf_ned_2_enu_msg)

        # [ 0, 0.7071068, 0.7071068, 0 ]

        # Mocap NED <> drone_body message
        tf_drone_msg = TransformStamped()
        tf_drone_msg.header = self.make_header(lcm_msg.utime)
        tf_drone_msg.header.frame_id = "mocap_NED"
        tf_drone_msg.child_frame_id = "body_frame"
        tf_drone_msg.transform.translation = ros_msg.pose.position
        tf_drone_msg.transform.rotation = ros_msg.pose.orientation

        # Append message to transform list
        tf_packet.transforms.append(tf_drone_msg)

        # Static TFs
        # drone_body <> IMU message
        # topics_to_publish.append("/tf_static")
        # tf_imu_msg = TransformStamped()
        # tf_imu_msg.header = self.make_header(lcm_msg.utime)
        # tf_imu_msg.header.frame_id = "body_frame"
        # tf_imu_msg.child_frame_id = "imu"
        # tf_imu_msg.transform.translation.x = 0.0
        # tf_imu_msg.transform.translation.y = 0.0
        # tf_imu_msg.transform.translation.z = 0.0
        # # [ 0.707479362748676   0.002029830683065  -0.007745228390866   0.706688646087723 ]
        # tf_imu_msg.transform.rotation.w = 0.707479362748676
        # tf_imu_msg.transform.rotation.x = 0.002029830683065
        # tf_imu_msg.transform.rotation.y = -0.007745228390866
        # tf_imu_msg.transform.rotation.z = 0.706688646087723
        # msgs_to_publish.append(tf_imu_msg)

        # # drone_body <> camera_l
        # topics_to_publish.append("/tf_static")
        # tf_cam_l_msg = TransformStamped()
        # tf_cam_l_msg.header = self.make_header(lcm_msg.utime)
        # tf_cam_l_msg.header.frame_id = "body_frame"
        # tf_cam_l_msg.child_frame_id = "camera_l"
        # tf_cam_l_msg.transform.translation.x = 0.0
        # tf_cam_l_msg.transform.translation.y = -0.05
        # tf_cam_l_msg.transform.translation.z = 0.0
        # tf_cam_l_msg.transform.rotation.w = 1.0
        # tf_cam_l_msg.transform.rotation.x = 0.0
        # tf_cam_l_msg.transform.rotation.y = 0.0
        # tf_cam_l_msg.transform.rotation.z = 0.0
        # msgs_to_publish.append(tf_cam_l_msg)

        # # drone_body <> camera_r
        # topics_to_publish.append("/tf_static")
        # tf_cam_r_msg = TransformStamped()
        # tf_cam_r_msg.header = self.make_header(lcm_msg.utime)
        # tf_cam_r_msg.header.frame_id = "body_frame"
        # tf_cam_r_msg.child_frame_id = "camera_r"
        # tf_cam_r_msg.transform.translation.x = 0.0
        # tf_cam_r_msg.transform.translation.y = 0.05
        # tf_cam_r_msg.transform.translation.z = 0.0
        # tf_cam_r_msg.transform.rotation.w = 1.0
        # tf_cam_r_msg.transform.rotation.x = 0.0
        # tf_cam_r_msg.transform.rotation.y = 0.0
        # tf_cam_r_msg.transform.rotation.z = 0.0
        # msgs_to_publish.append(tf_cam_r_msg)

        # # drone_body <> camera_d
        # topics_to_publish.append("/tf_static")
        # tf_cam_d_msg = TransformStamped()
        # tf_cam_d_msg.header = self.make_header(lcm_msg.utime)
        # tf_cam_d_msg.header.frame_id = "body_frame"
        # tf_cam_d_msg.child_frame_id = "camera_d"
        # tf_cam_d_msg.transform.translation.x = 0.0
        # tf_cam_d_msg.transform.translation.y = 0.0
        # tf_cam_d_msg.transform.translation.z = 0.0
        # # 0.707,0,-0.707,0
        # tf_cam_d_msg.transform.rotation.w = 0.707
        # tf_cam_d_msg.transform.rotation.x = 0.0
        # tf_cam_d_msg.transform.rotation.y = -0.707
        # tf_cam_d_msg.transform.rotation.z = 0.0
        # msgs_to_publish.append(tf_cam_d_msg)

        # Queue TF tree message for publication
        topics_to_publish.append("/tf")
        msgs_to_publish.append(tf_packet)

        # Populate camera info message (if needed)
        if (lcm_msg.utime in self.image_timestamps):
            cam_info = CameraInfo()
            cam_info.header = self.make_header(lcm_msg.utime)
            cam_info.height = 768
            cam_info.width = 1024
            cam_info.distortion_model = 'plumb_bob'
            cam_info.K = [
                665.107510106, 0., 511.5, 0., 665.107510106, 383.5, 0., 0., 1.
            ]
            cam_info.P = [
                665.107510106, 0., 511.5, 0., 0., 665.107510106, 383.5, 0., 0.,
                0., 1., 0
            ]

            for cam_topic in [
                    "/camera_l/camera_info", "/camera_r/camera_info",
                    "/camera_d/camera_info"
            ]:
                topics_to_publish.append(cam_topic)
                msgs_to_publish.append(cam_info)

        return topics_to_publish, msgs_to_publish
def go():
    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    s.connect((host, port))
    
    first = True
    
    buf = ''
    count = 0
    while True:
        d = s.recv(2**12)
        if not d:
            break
        buf += d
        
        lines = buf.split('\n')
        buf = lines[-1]
        for line in lines[:-1]:
            if first:
                first = False
                continue
            
            if count % decimation == 0:
                d = json.loads(line)
                
                ecef_cov = numpy.array(d['X_position_relative_position_orientation_ecef_covariance'])
                absodom_pub.publish(Odometry(
                    header=Header(
                        stamp=rospy.Time.from_sec(d['timestamp']*1e-9),
                        frame_id='/ecef',
                    ),
                    child_frame_id=child_frame_id,
                    pose=PoseWithCovariance(
                        pose=Pose(
                            position=Point(*d['position_ecef']),
                            orientation=Quaternion(**d['orientation_ecef']),
                        ),
                        covariance=numpy.vstack((
                            numpy.hstack((ecef_cov[0:3, 0:3], ecef_cov[0:3, 6:9])),
                            numpy.hstack((ecef_cov[6:9, 0:3], ecef_cov[6:9, 6:9])),
                        )).flatten(),
                    ),
                    twist=TwistWithCovariance(
                        twist=Twist(
                            linear=Vector3(*d['velocity_body']),
                            angular=Vector3(*d['angular_velocity_body']),
                        ),
                        covariance=numpy.vstack((
                            numpy.hstack((d['X_velocity_body_covariance'], numpy.zeros((3, 3)))),
                            numpy.hstack((numpy.zeros((3, 3)), d['X_angular_velocity_body_covariance'])),
                        )).flatten(),
                    ),
                ))
                odom_pub.publish(Odometry(
                    header=Header(
                        stamp=rospy.Time.from_sec(d['timestamp']*1e-9),
                        frame_id='/enu',
                    ),
                    child_frame_id=child_frame_id,
                    pose=PoseWithCovariance(
                        pose=Pose(
                            position=Point(*d['relative_position_enu']),
                            orientation=Quaternion(**d['orientation_enu']),
                        ),
                        covariance=numpy.array(d['X_relative_position_orientation_enu_covariance']).flatten(),
                    ),
                    twist=TwistWithCovariance(
                        twist=Twist(
                            linear=Vector3(*d['velocity_body']),
                            angular=Vector3(*d['angular_velocity_body']),
                        ),
                        covariance=numpy.vstack((
                            numpy.hstack((d['X_velocity_body_covariance'], numpy.zeros((3, 3)))),
                            numpy.hstack((numpy.zeros((3, 3)), d['X_angular_velocity_body_covariance'])),
                        )).flatten(),
                    ),
                ))
                clock_error_pub.publish(Float64(d['X_clock_error']))
                tf_pub.publish(tfMessage(
                    transforms=[
                        TransformStamped(
                            header=Header(
                                stamp=rospy.Time.from_sec(d['timestamp']*1e-9),
                                frame_id='/enu',
                            ),
                            child_frame_id=child_frame_id,
                            transform=Transform(
                                translation=Point(*d['relative_position_enu']),
                                rotation=Quaternion(**d['orientation_enu']),
                            ),
                        ),
                    ],
                ))
            
            count += 1
Beispiel #31
0
    fr_name = frame_name +str(i)
    msg.child_frame_id  = fr_name
    msg.header.frame_id = fr_name[:-(1 + idepth*bush)]

    tfs.append(msg)

    #recurse
    if depth > 1:
      tfs.extend(get_tree(F, depth - 1, step / 2.0,
                          fr_name, bush, idepth + 1))

  return tfs



depth                  = rospy.get_param('depth', 3)
step                   = rospy.get_param('step', 0.4)
get_tree.branch_factor = rospy.get_param('branch_factor', 5)
get_tree.twig_height   = rospy.get_param('twig_height', 0.1)
bush_topo              = rospy.get_param('bush_topoloy', False)
r                      = rospy.get_param('rate', 10)

rate = rospy.Rate(r)

while not rospy.is_shutdown():
  msg = tfMessage()
  msg.transforms = get_tree(kdl.Frame(), depth, step, '/fr', bush_topo)
  tf_pub.publish(msg)
  rate.sleep()
def main():

    parser = argparse.ArgumentParser(description="Create the tf messages from "
                                                 "the Tango logs.")
    parser.add_argument('-d', '--directory',
                        help="The folder must have images.txt, "
                             "depth_to_imu_transformation.txt, "
                             "fisheye_to_imu_transformation.txt, "
                             "narrow_to_imu_transformation.txt",
                        required=True)
    parser.add_argument('-o', '--output',
                        help='output bag file (with location)',
                        metavar='bag_filename',
                        type=str,
                        required=True
                        )
    parser.add_argument('-y',
                        help='if images_adjusted.txt is found in the same folder'
                             'as the supplied filename, then use it without'
                             'asking',
                        metavar='True/False',
                        type=bool,
                        default=False,
                        choices=[True, False]
                        )

    arguments = parser.parse_args()
    folder_name = arguments.directory
    standard_file = os.path.join(folder_name, 'images.txt')
    alt_file = os.path.join(folder_name, 'images_adjusted.txt')
    if os.path.exists(alt_file):
        if arguments.y:
            images_file = open(alt_file)
        else:
            reply = None
            while reply not in ['y', 'n']:
                print("The images_adjusted.txt file is in the folder {}, do you"
                      " want to use that instead? [y/n]".format(folder_name))
                reply = raw_input()
            if reply == 'y':
                images_file = open(alt_file)
            else:
                images_file = open(standard_file)
    else:
        images_file = open(standard_file)
    print("Processing data from {}...".format(images_file.name))

    #depth to imu
    depth_np = np.loadtxt(os.path.join(folder_name,
                                       "depth_to_imu_transformation.txt",
                                       ),
                          delimiter=',').reshape(3, 4)
    rotation_matrix = np.vstack((depth_np, [0, 0, 0, 1]))
    quaternion = transformations.quaternion_from_matrix(rotation_matrix)
    depth_transform_msg = Transform()
    depth_transform_msg.translation.x = rotation_matrix[0, 3]
    depth_transform_msg.translation.y = rotation_matrix[1, 3]
    depth_transform_msg.translation.z = rotation_matrix[2, 3]
    depth_transform_msg.rotation.x = quaternion[0]
    depth_transform_msg.rotation.y = quaternion[1]
    depth_transform_msg.rotation.z = quaternion[2]
    depth_transform_msg.rotation.w = quaternion[3]

    #fisheye to imu
    depth_np = np.loadtxt(os.path.join(folder_name,
                                       "fisheye_to_imu_transformation.txt"),
                          delimiter=',').reshape(3, 4)
    rotation_matrix = np.vstack((depth_np, [0, 0, 0, 1]))
    quaternion = transformations.quaternion_from_matrix(rotation_matrix)
    fisheye_transform_msg = Transform()
    fisheye_transform_msg.translation.x = rotation_matrix[0, 3]
    fisheye_transform_msg.translation.y = rotation_matrix[1, 3]
    fisheye_transform_msg.translation.z = rotation_matrix[2, 3]
    fisheye_transform_msg.rotation.x = quaternion[0]
    fisheye_transform_msg.rotation.y = quaternion[1]
    fisheye_transform_msg.rotation.z = quaternion[2]
    fisheye_transform_msg.rotation.w = quaternion[3]

    #narrow to imu
    depth_np = np.loadtxt(os.path.join(folder_name,
                                       "narrow_to_imu_transformation.txt"),
                          delimiter=',').reshape(3, 4)
    rotation_matrix = np.vstack((depth_np, [0, 0, 0, 1]))
    quaternion = transformations.quaternion_from_matrix(rotation_matrix)
    narrow_transform_msg = Transform()
    narrow_transform_msg.translation.x = rotation_matrix[0, 3]
    narrow_transform_msg.translation.y = rotation_matrix[1, 3]
    narrow_transform_msg.translation.z = rotation_matrix[2, 3]
    narrow_transform_msg.rotation.x = quaternion[0]
    narrow_transform_msg.rotation.y = quaternion[1]
    narrow_transform_msg.rotation.z = quaternion[2]
    narrow_transform_msg.rotation.w = quaternion[3]

    #for each entry in images.txt, add a transformation with the values above
    #TODO skip some values? we don't need high frame rate!
    with rosbag.Bag(arguments.output, 'w') as outputbag:
        for lineno, line in enumerate(images_file):
            #lines must be of the form:
            #image_basename,t_android,t_movidius,r11,r12,r12,t1,r21,r22,r23,t2,r31,r32,r33,t3
            tokens = line.strip('\n').split(',')

            if (len(tokens)) != 15:
                print("Line {0} appears to be wrong: \"{1}\" ".format(lineno, tokens))
                continue

            msg = tfMessage()

            #using the android_ts as the timestamp
            ts = float(tokens[1])

            #depth
            tf_stamped = TransformStamped()
            tf_stamped.header.frame_id = "/tango/imu"
            tf_stamped.header.seq = lineno
            tf_stamped.header.stamp = rospy.Time.from_seconds(ts)
            tf_stamped.child_frame_id = "/tango/depth"
            tf_stamped.transform = depth_transform_msg
            msg.transforms.append(tf_stamped)

            #fisheye
            tf_stamped = TransformStamped()
            tf_stamped.header.frame_id = "/tango/imu"
            tf_stamped.header.seq = lineno
            tf_stamped.header.stamp = rospy.Time.from_seconds(ts)
            tf_stamped.child_frame_id = "/tango/fisheye"
            tf_stamped.transform = fisheye_transform_msg
            msg.transforms.append(tf_stamped)

            #narrow
            tf_stamped = TransformStamped()
            tf_stamped.header.frame_id = "/tango/imu"
            tf_stamped.header.seq = lineno
            tf_stamped.header.stamp = rospy.Time.from_seconds(ts)
            tf_stamped.child_frame_id = "/tango/narrow"
            tf_stamped.transform = narrow_transform_msg
            msg.transforms.append(tf_stamped)

            outputbag.write("tf", msg, rospy.Time.from_seconds(ts))

    outputbag.close()
    print("Bag creation complete, bagfile: {}".format(outputbag.filename))
Beispiel #33
0
    q.y = sinR * cosP * cosY + cosR * sinP * sinY
    q.z = cosR * sinP * cosY + sinR * cosP * sinY
    q.w = cosR * cosP * sinY + sinR * sinP * cosY
    return q


class Tracker:
    def __init__(self, topic, Msg):
        self.sub = rospy.Subscriber(topic, Msg, self.callback)
        self.msg = Msg()

    def callback(self, msg):
        self.msg = msg


t = TransformStamped()
t.header.frame_id = 'base_link'
t.header.seq = 0
t.child_frame_id = 'high_def_frame'
t.transform.translation = xyz(0, 0, 1.25)
t.transform.rotation = rpy(0, 1.3, 0)

rospy.init_node('fake_camera')
pub_tf = rospy.Publisher('/tf_message', tfMessage)

while not rospy.is_shutdown():
    t.header.stamp = rospy.get_rostime()
    msg = tfMessage([t])
    pub_tf.publish(msg)
    time.sleep(0.1)
from waymo_viewer.msg import LidarArray
from waymo_viewer.msg import VehicleInfo
from sensor_msgs.msg import Image
from sensor_msgs.msg import PointCloud
from visualization_msgs.msg import MarkerArray
from visualization_msgs.msg import Marker
from geometry_msgs.msg import TransformStamped
from tf.msg import tfMessage
import math
from tf.transformations import quaternion_from_euler
import tf

camera_images = CameraImageArray()
lidars = LidarArray()
br = CvBridge()
transforms = tfMessage()

get_camera_image = False
get_lidars = False
get_vehicle = False

vehicle = (0, 0, 255)
pedestrian = (0, 255, 0)
cyclist = (255, 0, 0)
sign = (0, 255, 255)

#camera_front_labelimage_pub = rospy.Publisher("camera_front_labelimage", Image, queue_size = 100)
#camera_frontleft_labelimage_pub = rospy.Publisher("camera_frontleft_labelimage", Image, queue_size = 100)
#camera_frontright_labelimage_pub = rospy.Publisher("camera_frontright_labelimage", Image, queue_size = 100)
#camera_sideleft_labelimage_pub = rospy.Publisher("camera_sideleft_labelimage", Image, queue_size = 100)
#camera_sideright_labelimage_pub = rospy.Publisher("camera_sideright_labelimage", Image, queue_size = 100)
Beispiel #35
0
def convert(inputfile, outputfile):

    tfmsg = tfMessage()
    seq = 0
    csail = False

    #open input
    try:
        inCompleteName = os.path.expanduser(inputfile)
        carmenlog = open(inCompleteName, "r")
    except (IOError, ValueError):
        rospy.logerr("Couldn't open %", inputfile)
        exit(-1)

    #create output
    try:
        outCompleteName = os.path.expanduser(outputfile)
        bag = rosbag.Bag(outCompleteName, "w")
    except (IOError, ValueError):
        rospy.logerr("Couldn't open %", outputfile)
        exit(-1)

    #read all input lines
    for line in carmenlog:

        words = line.split(" ")

        if words[0] == "ODOM":
            values = fillUpOdomMessage(tfmsg, words, seq)
            tfmsg = values[0]
            pose = values[1]
            bag.write("/base_odometry/odom", pose, pose.header.stamp)
            bag.write("/tf", tfmsg, pose.header.stamp)
            seq += 1

        if words[0] == "FLASER" and not csail:
            laser_msg = fillUpOldLaserMessage(words)
            laser_msg.header.seq = laser_msg.header.seq + 1
            bag.write("/base_scan", laser_msg, laser_msg.header.stamp)

        if words[0] == "ROBOTLASER1":
            values = fillUpLaserMessage(tfmsg, words, seq)
            laser_msg = values[2]
            laser_msg.header.seq = laser_msg.header.seq + 1
            pose = values[1]
            tfmsg = values[0]
            #bag.write("/base_odometry/odom", pose , pose.header.stamp)
            #bag.write("/tf", tfmsg , pose.header.stamp)
            bag.write("/base_scan", laser_msg, laser_msg.header.stamp)
            seq += 1

        if words[0] == "FLASER" and csail:
            laser_msg = fillUpOldLaserMessage(words)
            laser_msg.header.seq = laser_msg.header.seq + 1
            bag.write("/base_scan", laser_msg, laser_msg.header.stamp)
            values = fillUpOdomFromFlaser(tfmsg, words, seq)
            tfmsg = values[0]
            pose = values[1]
            bag.write("/base_odometry/odom", pose, pose.header.stamp)
            bag.write("/tf", tfmsg, pose.header.stamp)
            seq += 1

        tfmsg = tfMessage()

    bag.close()
Beispiel #36
0
    def _tf_callback(self, msg):
        """ called whenever a new tfMessage is received. Creates a list of detected objects
        and iterates through these to return their type and location w.r.t. map and odometry frames."""
        detected_objects = []
        for item in self.item_list:
            if self.tf_listener.frameExists(item):
                detected_objects.append(item)
        rospy.loginfo(detected_objects)
        if detected_objects:
            for item in detected_objects:
                object_h = item  # type: str
                obj_type = self.get_type(object_h)
                t = self.tf_listener.getLatestCommonTime(
                    "/camera_link", object_h)
                #rospy.loginfo("Object detected")
                #(pos, rot) = self.tf_listener.lookupTransform("/camera_rgb_optical_frame", , t)
                (pos, rot) = self.tf_listener.lookupTransform(
                    "/camera_link", object_h, t)
                # create the object pose
                obj_pos = geometry_msgs.msg.PoseStamped()
                obj_pos.header.frame_id = "/camera_link"
                obj_pos.pose.position.x = pos[0]
                obj_pos.pose.position.y = pos[1]
                obj_pos.pose.position.z = pos[2]
                obj_pos.pose.orientation.x = rot[0]
                obj_pos.pose.orientation.y = rot[1]
                obj_pos.pose.orientation.z = rot[2]
                obj_pos.pose.orientation.w = rot[3]
                obj_pose = self.tf_listener.transformPose(
                    "/base_link", obj_pos)
                obj_pose.header.frame_id = obj_type

                transform = Transform()
                transform.translation = obj_pose.pose.position
                transform.rotation = obj_pose.pose.orientation
                # Insert new Transform into a TransformStamped object and add to the tf tree
                new_tfstamped = TransformStamped()
                new_tfstamped.child_frame_id = obj_type
                new_tfstamped.header.frame_id = "/base_link"
                new_tfstamped.transform = transform
                new_tfstamped.header.stamp = t
                # add to tf list
                self.tf_message = tfMessage(transforms=[new_tfstamped])
                self.tf_publisher.publish(self.tf_message)
                self.or_pub.publish(obj_pose)

                if obj_type in self.in_map_log:
                    t = self.tf_listener.getLatestCommonTime("/map", object_h)
                    #(pos, rot) = self.tf_listener.lookupTransform("/camera_rgb_optical_frame", object_h, t)
                    (pos, rot) = self.tf_listener.lookupTransform(
                        "/camera_link", object_h, t)
                    # get the object pose
                    obj_pos = geometry_msgs.msg.PoseStamped()
                    obj_pos.header.frame_id = "/camera_link"
                    obj_pos.pose.position.x = pos[0]
                    obj_pos.pose.position.y = pos[1]
                    obj_pos.pose.position.z = pos[2]
                    obj_pos.pose.orientation.x = rot[0]
                    obj_pos.pose.orientation.y = rot[1]
                    obj_pos.pose.orientation.z = rot[2]
                    obj_pos.pose.orientation.w = rot[3]
                    self.object_to_map(obj_pos, obj_type, t)
                    self.in_map_log.remove(obj_type)
Beispiel #37
0
	def __init__(self, _num, _mapTopic, _algorithmName):
		# Initialise fields
		self.estimatedpose =  PoseWithCovarianceStamped()
		self.occupancy_map = OccupancyGrid()
		self.particlecloud =  PoseArray()
		self.tf_message = tfMessage()
		self.weights = []
		self.maxWeight = 0
		self.totalWeight = 0
		self.num = _num
		self.clusterTask = ClusterTask()
		self.floorName = _mapTopic
		self.exploded = False

		# Initialise objects
		self.cloud = UpdateParticleCloud()
		self.estimate = EstimatePose()
		self.init = InitialiseCloud()

		self._weighted_particle_publisher = rospy.Publisher("/weightedParticles", WeightedParticles)
		
		# Parameters
		self.ODOM_ROTATION_NOISE = 0 		# Odometry model rotation noise
		self.ODOM_TRANSLATION_NOISE = 0 	# Odometry x axis (forward) noise
		self.ODOM_DRIFT_NOISE = 0 			# Odometry y axis (side-side) noise
		self.NUMBER_PREDICTED_READINGS = 20

		# Set 'previous' translation to origin
		# All Transforms are given relative to 0,0,0, not in absolute coords.
		self.prev_odom_x  = 0.0 # Previous odometry translation from origin
		self.prev_odom_y = 0.0  # Previous odometry translation from origin
		self.prev_odom_heading = 0.0 # Previous heading from odometry data
		self.last_odom_pose = None

		# Request robot's initial odometry values to be recorded in prev_odom
		self.odom_initialised = False
		self.sensor_model_initialised = False

		# Set default initial pose to initial position and orientation.
		self.estimatedpose.pose.pose.position.x = self.INIT_X
		self.estimatedpose.pose.pose.position.y = self.INIT_Y
		self.estimatedpose.pose.pose.position.z = self.INIT_Z
		self.estimatedpose.pose.pose.orientation = rotateQuaternion(Quaternion(w=1.0),self.INIT_HEADING)

		# NOTE: Currently not making use of covariance matrix

		self.estimatedpose.header.frame_id = "/map"
		self.particlecloud.header.frame_id = "/map"

		# Sensor model
		self.sensor_model =  sensor_model.SensorModel()

		print(_algorithmName)

		# What algorithm do we use?
		if _algorithmName == "async":
			self.asynchronous = True
		elif _algorithmName == "sync":
			self.asynchronous = False
		else:
			print("Invalid argument 3: expected \"sync\" or \"async\"")
			sys.exit(1)

		# Free Point where Robot can be
		self.listFreePoints = []
#!/usr/bin/env python
import roslib
import rospy
from nav_msgs.msg import Odometry
from nav_msgs.msg import OccupancyGrid
from tf.msg import tfMessage
from geometry_msgs.msg import Twist

itf = tfMessage()
igrid = OccupancyGrid()


def callback2(msg):
    #print(msg.?)
    itf = msg


def callback4(msg):
    #print(msg.?)
    igrid = msg


def main():
    rospy.init_node('NavigationNode', anonymous=True)

    pub2 = rospy.Publisher('/simulation_robot/tf', tfMessage, queue_size=10)
    rospy.Subscriber('/tf', tfMessage, callback2)

    pub4 = rospy.Publisher('/simulation_robot/OccupancyGrid',
                           OccupancyGrid,
                           queue_size=10)
Beispiel #39
0
        x_, y_, z_ = transform.TransformPoint(fData['longitude'][i],
                                              fData['latitude'][i],
                                              fData['height'][i])
        geoMsg.transform.translation.x = x_
        geoMsg.transform.translation.y = y_
        geoMsg.transform.translation.z = z_

        roll_imu_ = float(fData['roll'][i]) * math.pi / 180.0
        pitch_imu_ = float(fData['pitch'][i]) * math.pi / 180.0
        azimuth_imu_ = float(fData['azimuth'][i]) * math.pi / 180.0
        quat1 = tf.transformations.quaternion_about_axis(roll_imu_, (1, 0, 0))
        quat2 = tf.transformations.quaternion_about_axis(
            pitch_imu_, (0, -1, 0))
        quat3 = tf.transformations.quaternion_about_axis(
            -azimuth_imu_, (0, 0, 1))
        tf_quat = tf.transformations.quaternion_multiply(
            tf.transformations.quaternion_multiply(quat1, quat2), quat3)

        geoMsg.transform.rotation.x = tf_quat[0]
        geoMsg.transform.rotation.y = tf_quat[1]
        geoMsg.transform.rotation.z = tf_quat[2]
        geoMsg.transform.rotation.w = tf_quat[3]

        tfMsg = tfMessage()
        tfMsg.transforms.append(geoMsg)
        bag.write('/tf', tfMsg, t=rosTime)

    bag.close()
    print('Finished')
def go():
    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    s.connect((host, port))

    first = True

    buf = ''
    count = 0
    while True:
        d = s.recv(2**12)
        if not d:
            break
        buf += d

        lines = buf.split('\n')
        buf = lines[-1]
        for line in lines[:-1]:
            if first:
                first = False
                continue

            if count % decimation == 0:
                d = json.loads(line)

                ecef_cov = numpy.array(d[
                    'X_position_relative_position_orientation_ecef_covariance']
                                       )
                absodom_pub.publish(
                    Odometry(
                        header=Header(
                            stamp=rospy.Time.from_sec(d['timestamp'] * 1e-9),
                            frame_id='/ecef',
                        ),
                        child_frame_id=child_frame_id,
                        pose=PoseWithCovariance(
                            pose=Pose(
                                position=Point(*d['position_ecef']),
                                orientation=Quaternion(
                                    **d['orientation_ecef']),
                            ),
                            covariance=numpy.vstack((
                                numpy.hstack(
                                    (ecef_cov[0:3, 0:3], ecef_cov[0:3, 6:9])),
                                numpy.hstack(
                                    (ecef_cov[6:9, 0:3], ecef_cov[6:9, 6:9])),
                            )).flatten(),
                        ),
                        twist=TwistWithCovariance(
                            twist=Twist(
                                linear=Vector3(*d['velocity_body']),
                                angular=Vector3(*d['angular_velocity_body']),
                            ),
                            covariance=numpy.vstack((
                                numpy.hstack((d['X_velocity_body_covariance'],
                                              numpy.zeros((3, 3)))),
                                numpy.hstack(
                                    (numpy.zeros((3, 3)),
                                     d['X_angular_velocity_body_covariance'])),
                            )).flatten(),
                        ),
                    ))
                odom_pub.publish(
                    Odometry(
                        header=Header(
                            stamp=rospy.Time.from_sec(d['timestamp'] * 1e-9),
                            frame_id='/enu',
                        ),
                        child_frame_id=child_frame_id,
                        pose=PoseWithCovariance(
                            pose=Pose(
                                position=Point(*d['relative_position_enu']),
                                orientation=Quaternion(**d['orientation_enu']),
                            ),
                            covariance=numpy.array(d[
                                'X_relative_position_orientation_enu_covariance']
                                                   ).flatten(),
                        ),
                        twist=TwistWithCovariance(
                            twist=Twist(
                                linear=Vector3(*d['velocity_body']),
                                angular=Vector3(*d['angular_velocity_body']),
                            ),
                            covariance=numpy.vstack((
                                numpy.hstack((d['X_velocity_body_covariance'],
                                              numpy.zeros((3, 3)))),
                                numpy.hstack(
                                    (numpy.zeros((3, 3)),
                                     d['X_angular_velocity_body_covariance'])),
                            )).flatten(),
                        ),
                    ))
                clock_error_pub.publish(Float64(d['X_clock_error']))
                tf_pub.publish(
                    tfMessage(transforms=[
                        TransformStamped(
                            header=Header(
                                stamp=rospy.Time.from_sec(d['timestamp'] *
                                                          1e-9),
                                frame_id='/enu',
                            ),
                            child_frame_id=child_frame_id,
                            transform=Transform(
                                translation=Point(*d['relative_position_enu']),
                                rotation=Quaternion(**d['orientation_enu']),
                            ),
                        ),
                    ], ))

            count += 1
Beispiel #41
0
iterations = 10000

t = tf.Transformer()


def mkm():
    m = geometry_msgs.msg.TransformStamped()
    m.header.frame_id = "PARENT"
    m.child_frame_id = "THISFRAME"
    m.transform.translation.y = 5.0
    m.transform.rotation = geometry_msgs.msg.Quaternion(
        *tf.transformations.quaternion_from_euler(0, 0, 0))
    return m


tm = tfMessage([mkm() for i in range(20)])


def deserel_to_string(o):
    s = StringIO.StringIO()
    o.serialize(s)
    return s.getvalue()


mstr = deserel_to_string(tm)


class Timer:
    def __init__(self, func):
        self.func = func
from tf.msg import tfMessage

import tf

iterations = 10000

t = tf.Transformer()
def mkm():
    m = geometry_msgs.msg.TransformStamped()
    m.header.frame_id = "PARENT"
    m.child_frame_id = "THISFRAME"
    m.transform.translation.y = 5.0
    m.transform.rotation = geometry_msgs.msg.Quaternion(*tf.transformations.quaternion_from_euler(0, 0, 0))
    return m

tm = tfMessage([mkm() for i in range(20)])

def deserel_to_string(o):
    s = StringIO.StringIO()
    o.serialize(s)
    return s.getvalue()

mstr = deserel_to_string(tm)

class Timer:
    def __init__(self, func):
        self.func = func
    def mean(self, iterations = 1000000):
        started = time.time()
        for i in xrange(iterations):
            self.func()