コード例 #1
0
    def get_ros_transform(self,
                          transform=None,
                          frame_id=None,
                          child_frame_id=None):
        """
        Function to provide the current ROS transform

        :return: the ROS transfrom
        :rtype: geometry_msgs.msg.TransformStamped
        """
        tf_msg = TransformStamped()
        if frame_id:
            tf_msg.header = self.get_msg_header(frame_id)
        else:
            tf_msg.header = self.get_msg_header("map")
        if child_frame_id:
            tf_msg.child_frame_id = child_frame_id
        else:
            tf_msg.child_frame_id = self.get_prefix()
        if transform:
            tf_msg.transform = transform
        else:
            tf_msg.transform = trans.carla_transform_to_ros_transform(
                self.carla_actor.get_transform())
        return tf_msg
コード例 #2
0
    def get_tfs(self, sample, ego_pose=None):
        if ego_pose is None:
            sample_lidar = self.nusc.get('sample_data',
                                         sample['data']['LIDAR_TOP'])
            ego_pose = self.nusc.get('ego_pose',
                                     sample_lidar['ego_pose_token'])
        stamp = self.get_time(ego_pose)

        transforms = []

        # create ego transform
        ego_tf = TransformStamped()
        ego_tf.header.frame_id = 'map'
        ego_tf.header.stamp = stamp
        ego_tf.child_frame_id = 'base_link'
        ego_tf.transform = self.get_transform(ego_pose)
        transforms.append(ego_tf)

        for (sensor_id, sample_token) in sample['data'].items():
            sample_data = self.nusc.get('sample_data', sample_token)

            # create sensor transform
            sensor_tf = TransformStamped()
            sensor_tf.header.frame_id = 'base_link'
            sensor_tf.header.stamp = stamp
            sensor_tf.child_frame_id = sensor_id
            sensor_tf.transform = self.get_transform(
                self.nusc.get('calibrated_sensor',
                              sample_data['calibrated_sensor_token']))
            transforms.append(sensor_tf)

        return transforms
コード例 #3
0
def save_dynamic_tf(bag, kitti_type, kitti, initial_time):
    print("Exporting time dependent transformations")
    if kitti_type.find("raw") != -1:
        for timestamp, oxts in zip(kitti.timestamps, kitti.oxts):
            tf_oxts_msg = TFMessage()
            tf_oxts_transform = TransformStamped()
            tf_oxts_transform.header.stamp = rospy.Time.from_sec(float(timestamp.strftime("%s.%f")))
            tf_oxts_transform.header.frame_id = 'world'
            tf_oxts_transform.child_frame_id = 'base_link'

            transform = (oxts.T_w_imu)

            T_imu_to_base_link = np.eye(4, 4)
            T_imu_to_base_link[0:3, 3] = [2.71/2.0+0.05, -0.32, -0.93]
            transform = np.dot(transform,T_imu_to_base_link)

            t = transform[0:3, 3]
            q = tf.transformations.quaternion_from_matrix(transform)
            oxts_tf = Transform()

            oxts_tf.translation.x = t[0]
            oxts_tf.translation.y = t[1]
            oxts_tf.translation.z = t[2]

            oxts_tf.rotation.x = q[0]
            oxts_tf.rotation.y = q[1]
            oxts_tf.rotation.z = q[2]
            oxts_tf.rotation.w = q[3]

            tf_oxts_transform.transform = oxts_tf
            tf_oxts_msg.transforms.append(tf_oxts_transform)

            bag.write('/tf', tf_oxts_msg, tf_oxts_msg.transforms[0].header.stamp)

    elif kitti_type.find("odom") != -1:
        timestamps = map(lambda x: initial_time + x.total_seconds(), kitti.timestamps)
        for timestamp, tf_matrix in zip(timestamps, kitti.poses):
            tf_msg = TFMessage()
            tf_stamped = TransformStamped()
            tf_stamped.header.stamp = rospy.Time.from_sec(timestamp)
            tf_stamped.header.frame_id = 'camera_init'
            tf_stamped.child_frame_id = 'camera_gray_left'
            
            t = tf_matrix[0:3, 3]
            q = tf.transformations.quaternion_from_matrix(tf_matrix)
            transform = Transform()

            transform.translation.x = t[0]
            transform.translation.y = t[1]
            transform.translation.z = t[2]

            transform.rotation.x = q[0]
            transform.rotation.y = q[1]
            transform.rotation.z = q[2]
            transform.rotation.w = q[3]

            tf_stamped.transform = transform
            tf_msg.transforms.append(tf_stamped)

            bag.write('/tf', tf_msg, tf_msg.transforms[0].header.stamp)
コード例 #4
0
    def publish_transforms(self, ts):
        ego_t = Transform()
        ego_t.translation.x = self.ego_pose[0]
        ego_t.translation.y = self.ego_pose[1]
        ego_t.translation.z = 0.0
        ego_quat = quaternion_from_euler(0.0, 0.0, self.ego_pose[2])
        ego_t.rotation.x = ego_quat[0]
        ego_t.rotation.y = ego_quat[1]
        ego_t.rotation.z = ego_quat[2]
        ego_t.rotation.w = ego_quat[3]

        ego_ts = TransformStamped()
        ego_ts.transform = ego_t
        ego_ts.header.stamp = ts
        ego_ts.header.frame_id = '/map'
        ego_ts.child_frame_id = 'ego_racecar/base_link'

        opp_t = Transform()
        opp_t.translation.x = self.opp_pose[0]
        opp_t.translation.y = self.opp_pose[1]
        opp_t.translation.z = 0.0
        opp_quat = quaternion_from_euler(0.0, 0.0, self.opp_pose[2])
        opp_t.rotation.x = opp_quat[0]
        opp_t.rotation.y = opp_quat[1]
        opp_t.rotation.z = opp_quat[2]
        opp_t.rotation.w = opp_quat[3]

        opp_ts = TransformStamped()
        opp_ts.transform = opp_t
        opp_ts.header.stamp = ts
        opp_ts.header.frame_id = '/map'
        opp_ts.child_frame_id = 'opp_racecar/base_link'

        self.br.sendTransform(ego_ts)
        self.br.sendTransform(opp_ts)
コード例 #5
0
    def TFThread(self):
        while not rospy.is_shutdown():
            small_frame_list = []
            big_frame_list = []
            self_frame = self.robot_id + "/map"
            for tf_frame, tf in self.tf_between_robots_dict.items():
                if tf_frame[:len(self_frame)] == self_frame:
                    big_frame_list.append(tf.child_frame_id)
                elif tf_frame[-len(self_frame):] == self_frame:
                    small_frame_list.append(tf.header.frame_id)

            small_frame_list.sort()
            for i in range(1, len(small_frame_list)):
                tf1 = self.tf_between_robots_dict[small_frame_list[i - 1] +
                                                  ' to ' + self_frame]
                tf2 = self.tf_between_robots_dict[small_frame_list[i] +
                                                  ' to ' + self_frame]
                pose1 = posemath.fromMsg(trans2pose(tf1))
                pose2 = posemath.fromMsg(trans2pose(tf2))
                tf = TransformStamped()
                tf.header.stamp = rospy.Time.now()
                tf.header.frame_id = tf1.header.frame_id
                tf.child_frame_id = tf2.header.frame_id
                tf.transform = pose2trans(
                    posemath.toMsg(pose1 * pose2.Inverse()))
                print("publish tf: " + tf.header.frame_id + " to " +
                      tf.child_frame_id)
                self.br.sendTransformMessage(tf)
            if len(small_frame_list) > 0:
                print("publish tf: " + small_frame_list[-1] + ' to ' +
                      self_frame)
                self.br.sendTransformMessage(
                    self.tf_between_robots_dict[small_frame_list[-1] + ' to ' +
                                                self_frame])

            big_frame_list.sort()
            if len(big_frame_list) > 0:
                print("publish tf: " + self_frame + ' to ' + big_frame_list[0])
                self.br.sendTransformMessage(
                    self.tf_between_robots_dict[self_frame + ' to ' +
                                                big_frame_list[0]])
            for i in range(1, len(big_frame_list)):
                tf1 = self.tf_between_robots_dict[self_frame + ' to ' +
                                                  big_frame_list[i - 1]]
                tf2 = self.tf_between_robots_dict[self_frame + ' to ' +
                                                  big_frame_list[i]]
                pose1 = posemath.fromMsg(trans2pose(tf1))
                pose2 = posemath.fromMsg(trans2pose(tf2))
                tf = TransformStamped()
                tf.header.stamp = rospy.Time.now()
                tf.header.frame_id = tf1.child_frame_id
                tf.child_frame_id = tf2.child_frame_id
                tf.transform = pose2trans(
                    posemath.toMsg(pose1.Inverse() * pose2))
                print("publish tf: " + tf.header.frame_id + " to " +
                      tf.child_frame_id)
                self.br.sendTransformMessage(tf)
            time.sleep(1)
コード例 #6
0
ファイル: searcher.py プロジェクト: Reo-Take/rovi_utils
def cb_load(msg):
  global Model,tfReg
#load point cloud
  for n,l in enumerate(Config["scenes"]):
    pcd=o3d.read_point_cloud(Config["path"]+"/"+l+".ply")
    Model[n]=np.reshape(np.asarray(pcd.points),(-1,3))
  rospy.Timer(rospy.Duration(Config["tf_delay"]),cb_master,oneshot=True)
  tfReg=[]
#load TF such as master/camera...
  for n,m in enumerate(Config["master_frame_id"]):
    path=Config["path"]+"/"+m.replace('/','_')+".yaml"
    try:
      f=open(path, "r+")
    except Exception:
      pub_msg.publish("searcher::file error "+path)
    else:
      yd=yaml.load(f)
      f.close()
      trf=tflib.dict2tf(yd)
      tf=TransformStamped()
      tf.header.stamp=rospy.Time.now()
      tf.header.frame_id="world"
      tf.child_frame_id=m
      tf.transform=trf
      print "world->",m
      print trf
      tfReg.append(tf)
  broadcaster.sendTransform(tfReg)
  Param.update(rospy.get_param("~param"))
  solver.learn(Model,Param)
  pub_msg.publish("searcher::model learning completed")
コード例 #7
0
    def _compute_transform(self, sensor_data, cur_time):
        parent_frame_id = "base_link"
        child_frame_id = self.name

        t = TransformStamped()
        t.header.stamp = cur_time
        t.header.frame_id = parent_frame_id
        t.child_frame_id = child_frame_id

        # for camera we reorient it to look at the same axis as the opencv projection
        # in order to get easy depth cloud for RGBD camera
        t.transform = carla_transform_to_ros_transform(
            self.carla_object.get_transform())

        rotation = t.transform.rotation
        quat = [rotation.x, rotation.y, rotation.z, rotation.w]
        roll, pitch, yaw = tf.transformations.euler_from_quaternion(quat)

        roll -= math.pi / 2.0
        yaw -= math.pi / 2.0

        quat = tf.transformations.quaternion_from_euler(roll, pitch, yaw)

        t.transform.rotation.x = quat[0]
        t.transform.rotation.y = quat[1]
        t.transform.rotation.z = quat[2]
        t.transform.rotation.w = quat[3]

        self.process_msg_fun('tf', t)
コード例 #8
0
def publish_transform_stamped_relative(model_name, parent_name, struct_x, struct_y, pub):
    ts = TransformStamped()
    ts.child_frame_id = model_name

    # Header
    ts.header.stamp = rospy.Time.now()
    ts.header.frame_id = parent_name

    # Translation
    translation = Vector3()
    translation.x = struct_x
    translation.y = struct_y
    translation.z = 0

    # Rotation
    quat = Quaternion()
    quat.x = 0
    quat.y = 0
    quat.z = 0
    quat.w = 1

    # Message
    transform = Transform()
    transform.translation = translation
    transform.rotation = quat
    ts.transform = transform

    # Publish a transform stamped message
    pub.sendTransform(ts)
コード例 #9
0
def wjx_save_dynamic_tf(bag, kitti, initial_time):
    timestamps = map(lambda x: initial_time + x.total_seconds(),
                     kitti.timestamps)
    for timestamp, tf_matrix in zip(timestamps, kitti.poses):
        print len(kitti.poses)
        tf_msg = TFMessage()
        tf_stamped = TransformStamped()
        tf_stamped.header.stamp = rospy.Time.from_sec(timestamp)
        tf_stamped.header.frame_id = 'world'
        tf_stamped.child_frame_id = 'camera_left'

        t = tf_matrix[0:3, 3]
        q = tf.transformations.quaternion_from_matrix(tf_matrix)
        transform = Transform()

        transform.translation.x = t[0]
        transform.translation.y = t[1]
        transform.translation.z = t[2]

        transform.rotation.x = q[0]
        transform.rotation.y = q[1]
        transform.rotation.z = q[2]
        transform.rotation.w = q[3]

        tf_stamped.transform = transform
        tf_msg.transforms.append(tf_stamped)

        bag.write('/tf', tf_msg, tf_msg.transforms[0].header.stamp)
コード例 #10
0
def pub_object_transform(foonet_msg):               
    for _found_id in foonet_msg.found_id:
        for msg in marker_msg:
            if _found_id == msg.id:
                tem_dic = compute_transfom(msg.pose.pose,robot_endpoint_msg,table_pose)
                T_object_gripper = TransformStamped()
                T_object_gripper.header = msg.header  
                T_object_gripper.transform = tem_dic["marker_gripper"]
                T_object_gripper.child_frame_id = str(msg.id)

                T_object_table = TransformStamped()
                T_object_table.transform = tem_dic["marker_table"]
                T_object_table.child_frame_id = str(msg.id)
                foonet_msg.object_gripper_transfrom.append(T_object_gripper)
                foonet_msg.object_table_transfrom.append(T_object_table)                        
    return foonet_msg
コード例 #11
0
ファイル: __init__.py プロジェクト: valgur/kitti2bag
def save_dynamic_tf(bag, timestamps, tf_matrices, child_frame_id):
    print("Exporting time dependent transformations")
    for timestamp, tf_matrix in zip(timestamps, tf_matrices):
        tf_msg = TFMessage()
        tf_stamped = TransformStamped()
        tf_stamped.header.stamp = to_rostime(timestamp)
        tf_stamped.header.frame_id = 'world'
        tf_stamped.child_frame_id = child_frame_id

        t = tf_matrix[0:3, 3]
        q = quaternion_from_matrix(tf_matrix)
        transform = Transform()

        transform.translation.x = t[0]
        transform.translation.y = t[1]
        transform.translation.z = t[2]

        transform.rotation.x = q[0]
        transform.rotation.y = q[1]
        transform.rotation.z = q[2]
        transform.rotation.w = q[3]

        tf_stamped.transform = transform
        tf_msg.transforms.append(tf_stamped)

        bag.write('/tf', tf_msg, tf_msg.transforms[0].header.stamp)
コード例 #12
0
def transform_cloud_to_map(cloud):
    rospy.loginfo("to map from " + cloud.header.frame_id)

    transformation_store = TransformListener()
    rospy.sleep(2)

    t = transformation_store.getLatestCommonTime("map", cloud.header.frame_id)
    tr_r = transformation_store.lookupTransform("map", cloud.header.frame_id,
                                                t)

    tr = Transform()
    tr.translation = Vector3(tr_r[0][0], tr_r[0][1], tr_r[0][2])
    tr.rotation = Quaternion(tr_r[1][0], tr_r[1][1], tr_r[1][2], tr_r[1][3])
    tr_s = TransformStamped()
    tr_s.header = std_msgs.msg.Header()
    tr_s.header.stamp = rospy.Time.now()
    tr_s.header.frame_id = "map"
    tr_s.child_frame_id = cloud.header.frame_id
    tr_s.transform = tr
    t_kdl = transform_to_kdl(tr_s)
    points_out = []
    for p_in in pc2.read_points(cloud, field_names=["x", "y", "z", "rgb"]):
        p_out = t_kdl * PyKDL.Vector(p_in[0], p_in[1], p_in[2])
        points_out.append([p_out[0], p_out[1], p_out[2]])

    cloud.header.frame_id = "map"
    res = pc2.create_cloud(cloud.header, cloud.fields, points_out)
    rospy.loginfo(cloud.header)
    return res
コード例 #13
0
 def convert_pose_to_ros_msg(pose, name):
     transform_msg = TransformStamped()
     transform_msg.header.stamp = rospy.Time.now()
     transform_msg.header.frame_id = 'map'
     transform_msg.child_frame_id = name
     transform_msg.transform = convert_pose_to_tf_msg(pose)
     return transform_msg
コード例 #14
0
    def _compute_transform(self, sensor_data, cur_time):
        parent_frame_id = "base_link"
        child_frame_id = self.name

        t = TransformStamped()
        t.header.stamp = cur_time
        t.header.frame_id = parent_frame_id
        t.child_frame_id = child_frame_id

        # for camera we reorient it to look at the same axis as the opencv projection
        # in order to get easy depth cloud for RGBD camera
        t.transform = carla_transform_to_ros_transform(
            self.carla_object.get_transform())

        rotation = t.transform.rotation
        quat = [rotation.x, rotation.y, rotation.z, rotation.w]
        quat_swap = tf.transformations.quaternion_from_matrix([[0, 0, 1, 0],
                                                               [-1, 0, 0, 0],
                                                               [0, -1, 0, 0],
                                                               [0, 0, 0, 1]])
        quat = tf.transformations.quaternion_multiply(quat, quat_swap)

        t.transform.rotation.x = quat[0]
        t.transform.rotation.y = quat[1]
        t.transform.rotation.z = quat[2]
        t.transform.rotation.w = quat[3]

        self.process_msg_fun('tf', t)
コード例 #15
0
ファイル: imu_to_pose.py プロジェクト: ori-drs/imu_tools
def callback(msg):
    print "got imu"
    # Offset to put the cloud in a different place than the map:
    x_offset = 120
    y_offset = 0
    p = PoseWithCovarianceStamped()
    p.header = msg.header
    p.header.frame_id = "map"

    p.pose.pose.orientation = msg.orientation
    p.pose.pose.position.x = x_offset
    p.pose.pose.position.y = y_offset

    pub.publish(p)

    tfmsg = TFMessage()
    transformS = TransformStamped()

    transformS.header = msg.header
    transform = Transform()
    transform.rotation = msg.orientation
    transform.translation.x = x_offset
    transform.translation.y = y_offset
    transformS.transform = transform
    transformS.child_frame_id = "base"
    tfmsg.transforms.append(transformS)
    pub_tf.publish(tfmsg)
コード例 #16
0
    def filter_transform(self, transform, K):
        # TODO: Kalman filter, for now fixed kalman gain

        Kx, Ky, K_rot = K.diagonal()
        #Kx, Ky, K_rot = 0.5, 0.5, 1
        t = transform

        new_t = TransformStamped()
        new_t.header.stamp = t.header.stamp
        new_t.header.frame_id = "map"
        new_t.child_frame_id = "map_filtered"
        new_t.transform = transform.transform  # remove
        return new_t  # remove
        new_t.transform.translation.x = Kx * t.transform.translation.x
        new_t.transform.translation.y = Ky * t.transform.translation.y
        new_t.transform.translation.z = (
            Kx + Ky
        ) / 2 * t.transform.translation.z  # Keep z in case aruco markers are ill placed irl

        q = [
            t.transform.rotation.x, t.transform.rotation.y,
            t.transform.rotation.z, t.transform.rotation.w
        ]

        a1, a2, a3 = euler_from_quaternion(q)
        a1 = K_rot * a1  #*0 # Multiply by zero, no risk of drift
        a2 = K_rot * a2  #*0 # Multiply by zero, no risk of drift
        a3 = K_rot * a3
        q_new = quaternion_from_euler(a1, a2, a3)
        new_t.transform.rotation.x = q_new[0]
        new_t.transform.rotation.y = q_new[1]
        new_t.transform.rotation.z = q_new[2]
        new_t.transform.rotation.w = q_new[3]

        return new_t
コード例 #17
0
 def calculate_joints(self, position):
    
     self.joint_angle_matricies = []
     for angle in position:
         self.joint_angle_matricies.append(transformations.euler_matrix(0, 0, angle))
     T_c = [np.identity(4)]
     tf_msg = TFMessage() 
     for index in xrange(len(self.urdf_transform_matricies)):
         urdf_transform_matrix = self.urdf_transform_matricies[index]
         joint_angle_matrix = self.joint_angle_matricies[index]
         transform_matrix = np.dot(urdf_transform_matrix, joint_angle_matrix)
         
         tf_stamp = TransformStamped()
         tf_stamp.child_frame_id = self.urdf_transforms[index].child_frame_id
         tf_stamp.header.frame_id = self.urdf_transforms[index].header.frame_id
         if index in (8, 10): #sets parent of fingers to link6
             index = 6
         T_c.append(np.dot(T_c[index], transform_matrix))
         
         
         tf_stamp.transform = msgify(Transform, T_c[-1])
         #tf_stamp.transform = msgify(Transform, transform_matrix)
         tf_msg.transforms.append(tf_stamp)
         #print transforms.header
         #print link_states.name[-1]
         #print link_states.pose[-1]
         #print '-----------------------------------------------'
     
     return tf_msg
コード例 #18
0
ファイル: sensors.py プロジェクト: cyy1991/carla
    def _compute_transform(self, sensor_data, cur_time):
        parent_frame_id = "base_link"
        child_frame_id = self.name

        t = TransformStamped()
        t.header.stamp = cur_time
        t.header.frame_id = parent_frame_id
        t.child_frame_id = child_frame_id

        # for camera we reorient it to look at the same axis as the opencv projection
        # in order to get easy depth cloud for RGBD camera
        t.transform = carla_transform_to_ros_transform(
            self.carla_object.get_transform())

        rotation = t.transform.rotation
        quat = [rotation.x, rotation.y, rotation.z, rotation.w]
        quat_swap = tf.transformations.quaternion_from_matrix(
            [[0, 0, 1, 0],
            [-1, 0, 0, 0],
            [0, -1, 0, 0],
            [0, 0, 0, 1]])
        quat = tf.transformations.quaternion_multiply(quat, quat_swap)

        t.transform.rotation.x = quat[0]
        t.transform.rotation.y = quat[1]
        t.transform.rotation.z = quat[2]
        t.transform.rotation.w = quat[3]

        self.process_msg_fun('tf', t)
コード例 #19
0
def transform_to_transform_stamped(trans, name, child_name):
    trans_stamped = TransformStamped()
    trans_stamped.header.stamp = rospy.Time.now()
    trans_stamped.header.frame_id = name
    trans_stamped.child_frame_id = child_name
    trans_stamped.transform = trans
    return trans_stamped
コード例 #20
0
ファイル: test_rospy.py プロジェクト: pwais/psegs-ros-ext
def gen_transform(nanostamp):
    import tf
    from tf2_msgs.msg import TFMessage
    from geometry_msgs.msg import Transform
    from geometry_msgs.msg import TransformStamped

    tf_msg = TFMessage()
    tf_transform = TransformStamped()
    tf_transform.header.stamp = nanostamp_to_rostime(nanostamp)

    tf_transform.header.frame_id = 'src_frame'
    tf_transform.child_frame_id = 'child_frame'

    transform = Transform()
    r_4x4 = np.ones((4, 4))
    q = tf.transformations.quaternion_from_matrix(r_4x4)
    transform.rotation.x = q[0]
    transform.rotation.y = q[1]
    transform.rotation.z = q[2]
    transform.rotation.w = q[3]

    transform.translation.x = 1
    transform.translation.y = 2
    transform.translation.z = 3

    tf_transform.transform = transform
    tf_msg.transforms.append(tf_transform)
    return tf_msg
コード例 #21
0
 def publish_transform(self, point):
     t = TransformStamped()
     t.header.stamp = rospy.Time.now()
     t.header.frame_id = self.map_frame
     t.child_frame_id = self.odom_frame
     t.transform = pf_utils.cvt_point2ros_transform(point)
     self.br.sendTransform(t)
コード例 #22
0
def publish_transform(br, point, parent_frame, child_frame):
    t = TransformStamped()
    t.header.stamp = rospy.Time.now()
    t.header.frame_id = parent_frame
    t.child_frame_id = child_frame
    t.transform = cvt_point2ros_transform(point)
    br.sendTransform(t)
コード例 #23
0
    def tag_callback(self, msg_tag):
        # Listen for the transform of the tag in the world
        avg = PoseAverage.PoseAverage()
        for tag in msg_tag.detections:
            try:
                Tt_w = self.tfbuf.lookup_transform(self.world_frame, "tag_{id}".format(id=tag.id), rospy.Time(), rospy.Duration(1))
                Mtbase_w=self.transform_to_matrix(Tt_w.transform)
                Mt_tbase = tr.concatenate_matrices(tr.translation_matrix((0,0,0.17)), tr.euler_matrix(0,0,np.pi))
                Mt_w = tr.concatenate_matrices(Mtbase_w,Mt_tbase)
                Mt_r=self.pose_to_matrix(tag.pose)
                Mr_t=np.linalg.inv(Mt_r)
                Mr_w=np.dot(Mt_w,Mr_t)
                Tr_w = self.matrix_to_transform(Mr_w)
                avg.add_pose(Tr_w)
                self.publish_sign_highlight(tag.id)
            except(tf2_ros.LookupException, tf2_ros.ConnectivityException, tf2_ros.ExtrapolationException) as ex:
                rospy.logwarn("Error looking up transform for tag_%s", tag.id)
                rospy.logwarn(ex.message)

        Tr_w =  avg.get_average() # Average of the opinions

        # Broadcast the robot transform
        if Tr_w is not None:
            # Set the z translation, and x and y rotations to 0
            Tr_w.translation.z = 0
            rot = Tr_w.rotation
            rotz=tr.euler_from_quaternion((rot.x, rot.y, rot.z, rot.w))[2]
            (rot.x, rot.y, rot.z, rot.w) = tr.quaternion_from_euler(0, 0, rotz)
            T = TransformStamped()
            T.transform = Tr_w
            T.header.frame_id = self.world_frame
            T.header.stamp = rospy.Time.now()
            T.child_frame_id = self.duckiebot_frame
            self.pub_tf.publish(TFMessage([T]))
            self.lifetimer = rospy.Time.now()
コード例 #24
0
def publish_transform_stamped(model_name, x, pub):
    ts = TransformStamped()
    ts.child_frame_id = model_name

    # Header
    ts.header.stamp = rospy.Time.now()
    ts.header.frame_id = "world"

    # Translation
    translation = Vector3()
    translation.x = x[0]
    translation.y = x[1]
    translation.z = x[2]

    # Rotation
    quat = Quaternion()
    quat.x = x[6]
    quat.y = x[7]
    quat.z = x[8]
    quat.w = x[9]

    # Message
    transform = Transform()
    transform.translation = translation
    transform.rotation = quat
    ts.transform = transform

    # Publish a transform stamped message
    pub.sendTransform(ts)
コード例 #25
0
def maintain_tf_tree(odom_to_scene_before=None, odom_to_scene_after=None):
    global br, listener, self_ID
    base_to_scene = TransformStamped()
    base_to_scene.transform = trackutils.pose2trans(
        posemath.toMsg(
            kdl.Frame(kdl.Rotation.Quaternion(x=0.5, y=-0.5, z=0.5, w=-0.5))))
    base_to_scene.header.stamp = rospy.Time.now()
    base_to_scene.header.frame_id = "base_link{}".format(self_ID)
    base_to_scene.child_frame_id = "scene{}".format(self_ID)
    br.sendTransformMessage(base_to_scene)

    if odom_to_scene_before == None or odom_to_scene_after == None:
        map_to_odom = TransformStamped()
        map_to_odom.header.stamp = rospy.Time.now()
        map_to_odom.transform.rotation.w = 1.
        map_to_odom.header.frame_id = "/map{}".format(self_ID)
        map_to_odom.child_frame_id = "/odom{}".format(self_ID)
        print(map_to_odom)
        br.sendTransformMessage(map_to_odom)  #发送map和odom的TF树
    elif odom_to_scene_before == odom_to_scene_after:
        listener.waitForTransform("map{}".format(self_ID),
                                  "odom{}".format(self_ID), rospy.Time(),
                                  rospy.Duration(4.0))
        map_to_odom = listener._buffer.lookup_transform(
            "map{}".format(self_ID), "odom{}".format(self_ID), rospy.Time(0))
        map_to_odom.header.stamp = rospy.Time.now()
        br.sendTransformMessage(map_to_odom)  #发送map和odom的TF树
    else:
        listener.waitForTransform("map{}".format(self_ID),
                                  "odom{}".format(self_ID), rospy.Time(),
                                  rospy.Duration(4.0))
        map_to_odom = listener._buffer.lookup_transform(
            "map{}".format(self_ID), "odom{}".format(self_ID), rospy.Time(0))
        map_to_odom.header.stamp = rospy.Time.now()
        map_to_odom_kdl = posemath.fromMsg(
            trackutils.trans2pose(map_to_odom.transform))
        init_pose = kdl.Frame(
            kdl.Rotation.Quaternion(x=0.5, y=-0.5, z=0.5, w=-0.5))
        odom_to_scene_after_kdl = init_pose * posemath.fromMsg(
            odom_to_scene_after)
        odom_to_scene_before_kdl = init_pose * posemath.fromMsg(
            odom_to_scene_before)
        map_to_odom_kdl = map_to_odom_kdl * odom_to_scene_before_kdl * odom_to_scene_after_kdl.Inverse(
        )
        map_to_odom.transform = trackutils.pose2trans(
            posemath.toMsg(map_to_odom_kdl))
        br.sendTransformMessage(map_to_odom)  #发送map和odom的TF树
コード例 #26
0
    def timer_cb(self, event):
        # move robot (calculate new pose)
        dt = rospy.Time.now() - self.timestamp_last_update
        self.timestamp_last_update = rospy.Time.now()
        time_since_last_twist = rospy.Time.now() - self.timestamp_last_twist

        #print "dt =", dt.to_sec(), ". duration since last twist =", time_since_last_twist.to_sec()
        # we assume we're not moving any more if there is no new twist after 0.1 sec
        if time_since_last_twist < rospy.Duration(0.1):
            new_pose = copy.deepcopy(self.odom.pose.pose)
            yaw = tf.transformations.euler_from_quaternion([
                self.odom.pose.pose.orientation.x,
                self.odom.pose.pose.orientation.y,
                self.odom.pose.pose.orientation.z,
                self.odom.pose.pose.orientation.w
            ])[2] + self.twist.angular.z * dt.to_sec()
            quat = tf.transformations.quaternion_from_euler(0, 0, yaw)
            new_pose.orientation.x = quat[0]
            new_pose.orientation.y = quat[1]
            new_pose.orientation.z = quat[2]
            new_pose.orientation.w = quat[3]
            new_pose.position.x += self.twist.linear.x * dt.to_sec() * math.cos(
                yaw) - self.twist.linear.y * dt.to_sec() * math.sin(yaw)
            new_pose.position.y += self.twist.linear.x * dt.to_sec() * math.sin(
                yaw) + self.twist.linear.y * dt.to_sec() * math.cos(yaw)
            self.odom.pose.pose = new_pose

            # we're moving, so we set a non-zero twist
            self.odom.twist.twist.linear.x = self.twist.linear.x
            self.odom.twist.twist.linear.y = self.twist.linear.y
            self.odom.twist.twist.angular.z = self.twist.angular.z
        else:
            # reset twist as we're not moving anymore
            self.odom.twist.twist = Twist()

        # publish odometry
        odom = copy.deepcopy(self.odom)
        odom.header.stamp = rospy.Time.now()  # update to current time stamp
        self.pub_odom.publish(odom)

        # publish tf
        # pub base_footprint --> odom_combined
        t_loc = TransformStamped()
        t_loc.header.stamp = rospy.Time.now()
        t_loc.header.frame_id = "odom_combined"
        t_loc.child_frame_id = "base_footprint"
        t_loc.transform.translation = self.odom.pose.pose.position
        t_loc.transform.rotation = self.odom.pose.pose.orientation

        # pub odom_combined --> map
        t_odom = TransformStamped()
        t_odom.header.stamp = rospy.Time.now()
        t_odom.header.frame_id = "map"
        t_odom.child_frame_id = "odom_combined"
        t_odom.transform = self.initial_pose

        transforms = [t_loc, t_odom]

        self.br.sendTransform(transforms)
コード例 #27
0
    def get_icv_transform(self, transform=None):
        """
        Function to provide the current icv transform

        :return: the icv transfrom
        :rtype: geometry_msgs.msg.TransformStamped
        """
        tf_msg = TransformStamped()
        tf_msg.header = self.get_msg_header("map")
        tf_msg.child_frame_id = self.get_prefix()
        if transform:
            tf_msg.transform = trans.carla_transform_to_icv_transform(
                transform)
        else:
            tf_msg.transform = trans.carla_transform_to_icv_transform(
                self.carla_actor.get_transform())
        return tf_msg
コード例 #28
0
def transform_helper(vec3, frame):
    pos = TransformStamped()
    pos.child_frame_id = frame
    pos.header = Header()
    pos.transform = Transform()
    pos.transform.translation = vec3

    return pos
コード例 #29
0
ファイル: scan_scene.py プロジェクト: HLP-R/hlpr_lookat
def transform_helper(vec3, frame):
    pos = TransformStamped()
    pos.child_frame_id = frame
    pos.header = Header()
    pos.transform = Transform()
    pos.transform.translation = vec3

    return pos
コード例 #30
0
    def poseCB(self, p):
        self.odom_calibrating = True
        robot_pose = Pose()
        robot_pose = p.pose.pose
        robot_pose.position.x, robot_pose.position.y = self.projection(
            p.pose.pose.position.x, p.pose.pose.position.y)
        robot_pose.orientation.x = -p.pose.pose.orientation.x
        robot_pose.orientation.y = -p.pose.pose.orientation.y
        robot_pose.orientation.z = -p.pose.pose.orientation.z
        robot_pose.orientation = quat_rot(robot_pose.orientation, 0, 0, 90)
        debug_info(self.debug_output, "Odom calib tf received")

        # odom to reference
        while 1:
            try:
                odo_ref_trans = self.tfBuffer.lookup_transform(
                    self.robot_frame, self.odom_frame, rospy.Time())
                tf_odo_ref = TransformStamped()
                tf_odo_ref.header.frame_id = "odom_utm_calib"
                tf_odo_ref.child_frame_id = self.odom_frame
                tf_odo_ref.header.stamp = rospy.Time.now()
                tf_odo_ref.transform = odo_ref_trans.transform
                tfmsg_odo_ref = tf2_msgs.msg.TFMessage([tf_odo_ref])
                self.tf_pub.publish(tfmsg_odo_ref)
            except (tf2_ros.LookupException, tf2_ros.ConnectivityException,
                    tf2_ros.ExtrapolationException):
                continue

            # reference to utm
            tf_ref_utm = TransformStamped()
            tf_ref_utm.header.frame_id = self.gps_frame
            tf_ref_utm.child_frame_id = "odom_utm_calib"
            tf_ref_utm.header.stamp = rospy.Time.now()
            tf_ref_utm.transform.translation.x = robot_pose.position.x
            tf_ref_utm.transform.translation.y = robot_pose.position.y
            tf_ref_utm.transform.translation.z = robot_pose.position.z
            tf_ref_utm.transform.rotation = robot_pose.orientation
            tfmsg_ref_utm = tf2_msgs.msg.TFMessage([tf_ref_utm])
            self.tf2_pub.publish(tfmsg_ref_utm)
            self.rate.sleep()

            # Check the tf exists correctly
            try:
                trans = self.tfBuffer.lookup_transform(self.gps_frame,
                                                       "odom_utm_calib",
                                                       rospy.Time())
                trans2 = self.tfBuffer.lookup_transform(
                    "odom_utm_calib", self.odom_frame, rospy.Time())
                if math.fabs(trans.transform.translation.x -
                             robot_pose.position.x) > 0.1 or math.fabs(
                                 trans.transform.translation.y -
                                 robot_pose.position.y) > 0.1:
                    continue
                self.odom_calibrating = False
                debug_info(self.debug_output, "Odometry calibrated")
                break
            except:
                continue
コード例 #31
0
def fid_tf_cb(msg):
    global all_fids
    tfs = msg.transforms
    f_id_list = []
    update = False
    if tfs:
        bcaster = tf2.TransformBroadcaster()
        for tf in tfs:
            # broadcast tf from camera to fid. This lets us look up the tf later and it does all the hairy math for us.
            t2 = TransformStamped()
            t2.transform = tf.transform
            t2.header.stamp = rospy.Time.now()
            t2.header.frame_id = cam_frame
            t2.child_frame_id = "fid{}".format(tf.fiducial_id)
            bcaster.sendTransform(t2)
            f_id_list.append("fid{}".format(tf.fiducial_id))
        for fid in f_id_list:
            try:
                # get the tf from the fiducial to the camera or odom (if available)
                fid_shift, fid_rot = listener.lookupTransform(
                    frame, fid, rospy.Time(0))
                fid_rot = euler_from_quaternion(fid_rot)
                if fid not in all_fids.keys():  # automatically add new fids
                    all_fids[fid] = {
                        "fid": fid.strip("fid"),
                        "pose": {
                            "location": fid_shift,
                            "orientation": fid_rot
                        }
                    }  # update list of all seen fiducials
                    update = True
                else:  # only update a pose if it is significantly different
                    old_shift = all_fids[fid]["pose"]["location"]
                    old_rot = all_fids[fid]["pose"]["orientation"]
                    shift_diff = sum(
                        [abs(i - j) for i, j in zip(old_shift, fid_shift)])
                    rot_diff = sum(
                        [abs(i - j) for i, j in zip(old_rot, fid_rot)])
                    if shift_diff + rot_diff > pose_update_thresh:
                        all_fids[fid] = {
                            "fid": fid.strip("fid"),
                            "pose": {
                                "location": fid_shift,
                                "orientation": fid_rot
                            }
                        }  # update list of all seen fiducials
                        update = True
            except:
                pass
    if update:
        package = json.dumps(
            OrderedDict([("fid_count", len(all_fids.keys())),
                         ("dict", fid_dict), ("frame", frame),
                         ("data", [all_fids[key]
                                   for key in all_fids.keys()])]))
        # push to redis and save most recent json to disk
        redis.set(redis_key, str(package))
        bt.save_json_file("fiddump.json", json.loads(package))
コード例 #32
0
ファイル: common.py プロジェクト: jdewaen/ros_vive
    def func(frame_id, child_frame_id, transform):
        transform_msg = TransformStamped()
        transform_msg.header.stamp = rospy.Time.now()
        transform_msg.header.seq = len(transforms)
        transform_msg.header.frame_id = frame_id
        transform_msg.child_frame_id = child_frame_id
        transform_msg.transform = transform

        transforms.append(transform_msg)
        broadcaster.sendTransform(transforms)
コード例 #33
0
 def publish_transform(self):
     tf_msg = TFMessage()
     tf = TransformStamped()
     with self.lock:
         tf.transform = self.transform
     tf.header.frame_id = self.parent_frame
     tf.header.stamp = rospy.get_rostime()
     tf.child_frame_id = self.child_frame
     tf_msg.transforms.append(tf)
     self.tf_pub.publish(tf_msg)
コード例 #34
0
    def run(self):
        marker_map = rospy.get_param('~marker_map', {})
        args = {
            'device': rospy.get_param('~device'),
            'marker_map': marker_map,
            'callback_global': self.callback_global,
            'callback_local': self.callback_local,
        }
        parameters = self.get_options()

        self.fixed_frame_id = rospy.get_param('~fixed_frame_id', 'map')
        self.robot_frame_id = rospy.get_param('~robot_frame_id', 'base_link')
        self.stargazer_frame_id = rospy.get_param('~stargazer_frame_id', 'stargazer')
        self.map_frame_prefix = rospy.get_param('~map_frame_prefix', 'stargazer/map_')
        self.marker_frame_prefix = rospy.get_param('~marker_frame_prefix', 'stargazer/marker_')

        self.covariance = rospy.get_param('~covariance', None)
        if self.covariance is None:
            raise Exception('The "covariance" parameter is required.')
        elif len(self.covariance) != 36:
            raise Exception('The "covariance" parameter must be a 36 element vector.')

        # Publish static TF frames for the Stargazer map.
        stamp_now = rospy.Time.now()
        map_tf_msg = TFMessage()

        for marker_id, Tmap_marker in marker_map.iteritems():
            marker_tf_msg = TransformStamped()
            marker_tf_msg.header.stamp = stamp_now
            marker_tf_msg.header.frame_id = self.fixed_frame_id
            marker_tf_msg.child_frame_id = self.map_frame_prefix + marker_id
            marker_tf_msg.transform = matrix_to_transform(Tmap_marker)
            map_tf_msg.transforms.append(marker_tf_msg)

        self.tf_static_pub = rospy.Publisher('tf_static', TFMessage, latch=True)
        self.tf_static_pub.publish(map_tf_msg)
        
        # Start publishing Stargazer data.
        with StarGazer(**args) as stargazer:
            # The StarGazer might be streaming data. Turn off streaming mode.
            stargazer.stop_streaming()

            # Set all parameters, possibly to their default values. This is the
            # safest option because the parameters can be corrupted when the
            # StarGazer is powered off.
            for name, value in parameters.iteritems():
                stargazer.set_parameter(name, value)

            # Start streaming. ROS messages will be published in callbacks.
            stargazer.start_streaming()
            rospy.spin()

            # Stop streaming. Try to clean up after ourselves.
            stargazer.stop_streaming()
コード例 #35
0
 def make_transform_stamped(self, stamp, parent_frame, child_frame):
     try:
         (trans,rot) = self.listener.lookupTransform(parent_frame, child_frame, rospy.Time(0)) # current map->base transform
     except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
         rospy.logwarn("[%s]" + " Failed to solve tf: %s to %s", rospy.get_name(), parent_frame, child_frame)
         return None
     tf_stamped = TransformStamped()
     tf_stamped.header.stamp = stamp
     tf_stamped.header.frame_id = parent_frame
     tf_stamped.child_frame_id = child_frame
     tf_stamped.transform = Transform(Vector3(*trans), Quaternion(*rot))
     return tf_stamped
コード例 #36
0
ファイル: markers.py プロジェクト: cyy1991/carla
 def process_msg(self, data, cur_time):
     t = TransformStamped()
     t.header.stamp = cur_time
     t.header.frame_id = self.world_link
     t.child_frame_id = "base_link"
     t.transform = carla_transform_to_ros_transform(
         carla_Transform(data.transform))
     header = Header()
     header.stamp = cur_time
     header.frame_id = self.world_link
     marker = get_vehicle_marker(
         data, header=header, marker_id=0, is_player=True)
     self.process_msg_fun(self.name, marker)
     self.process_msg_fun('tf', t)
コード例 #37
0
ファイル: pose_converter.py プロジェクト: gt-ros-pkg/hrl
 def to_tf_stamped_msg(*args):
     header, homo_mat, quat_rot, _ = PoseConverter._make_generic(args)
     if homo_mat is None:
         rospy.logwarn("[pose_converter] Unknown pose type.")
         return None, None, None, None
     tf_stamped = TransformStamped()
     if header is None:
         tf_stamped.header.stamp = rospy.Time.now()
     else:
         tf_stamped.header.seq = header[0]
         tf_stamped.header.stamp = header[1]
         tf_stamped.header.frame_id = header[2]
     tf_stamped.transform = Transform(Vector3(*homo_mat[:3,3].T.A[0]), Quaternion(*quat_rot))
     return tf_stamped
コード例 #38
0
ファイル: pf_base.py プロジェクト: snowhong/backup
    def recalculate_transform(self, currentTime):
        """
        Creates updated transform from /odom to /map given recent odometry and
        laser data.
        
        :Args:
            | currentTime (rospy.Time()): Time stamp for this update
         """
        
        transform = Transform()

        T_est = transformations.quaternion_matrix([self.estimatedpose.pose.pose.orientation.x,
                                                   self.estimatedpose.pose.pose.orientation.y,
                                                   self.estimatedpose.pose.pose.orientation.z,
                                                   self.estimatedpose.pose.pose.orientation.w])
        T_est[0, 3] = self.estimatedpose.pose.pose.position.x
        T_est[1, 3] = self.estimatedpose.pose.pose.position.y
        T_est[2, 3] = self.estimatedpose.pose.pose.position.z
        
        T_odom = transformations.quaternion_matrix([self.last_odom_pose.pose.pose.orientation.x,
                                                   self.last_odom_pose.pose.pose.orientation.y,
                                                   self.last_odom_pose.pose.pose.orientation.z,
                                                   self.last_odom_pose.pose.pose.orientation.w])
        T_odom[0, 3] = self.last_odom_pose.pose.pose.position.x
        T_odom[1, 3] = self.last_odom_pose.pose.pose.position.y
        T_odom[2, 3] = self.last_odom_pose.pose.pose.position.z
        T = np.dot(T_est, np.linalg.inv(T_odom))
        q = transformations.quaternion_from_matrix(T) #[:3, :3])

        transform.translation.x = T[0, 3] 
        transform.translation.y = T[1, 3] 
        transform.translation.z = T[2, 3] 
        transform.rotation.x = q[0]
        transform.rotation.y = q[1]
        transform.rotation.z = q[2]
        transform.rotation.w = q[3]
        

        # Insert new Transform into a TransformStamped object and add to the
        # tf tree
        new_tfstamped = TransformStamped()
        new_tfstamped.child_frame_id = "/odom"
        new_tfstamped.header.frame_id = "/map"
        new_tfstamped.header.stamp = currentTime
        new_tfstamped.transform = transform

        # Add the transform to the list of all transforms
        self.tf_message = tfMessage(transforms=[new_tfstamped])
コード例 #39
0
ファイル: estimate.py プロジェクト: Paethon/camera_pose
def pose_to_transform(pose, name, time):
    tr = Transform()
    tr.translation.x = pose.position.x
    tr.translation.y = pose.position.y
    tr.translation.z = pose.position.z
    tr.rotation.x = pose.orientation.x
    tr.rotation.y = pose.orientation.y
    tr.rotation.z = pose.orientation.z
    tr.rotation.w = pose.orientation.w

    trs = TransformStamped()
    trs.transform = tr
    trs.header.stamp = time
    trs.header.frame_id = 'world'
    trs.child_frame_id = name
    return trs
コード例 #40
0
  def run(self):
    rospy.init_node('calibrate_bundle')
    next_pub = rospy.Publisher('next_poses', String, queue_size = 1)
    tf_b = tf.TransformBroadcaster()
    tf_l = tf.TransformListener()
    
    rate = rospy.Rate(10)
    
    rospy.Service('commit_next_poses', CommitNextPoses, self.commit_next_poses )
    rospy.Service('save_known_poses', SaveKnownPoses, self.save_known_poses )

    main_frame_avg = self.main_marker + '_avg'
    main_frame_bundle = self.main_marker + '_bundle'
    
    while not rospy.is_shutdown():
      self.next_poses = {}
      for bundle_marker in self.bundle_markers:
        marker_avg_frame = bundle_marker + '_avg'
        marker_bundle_frame = bundle_marker + '_bundle'
        if marker_bundle_frame not in self.known_poses.keys():
          try:
            now = rospy.Time.now()
            if (now - tf_l.getLatestCommonTime(main_frame_avg, marker_avg_frame)) < TIME_THRESH:
              T,R = tf_l.lookupTransform(main_frame_avg, marker_avg_frame, rospy.Time(0))
              tfs = TransformStamped()
              tfs.header.stamp = now
              tfs.header.frame_id = main_frame_bundle
              tfs.child_frame_id = marker_bundle_frame
              tfs.transform = numpy_to_tf_msg(T,R)
              self.next_poses[tfs.child_frame_id] = tfs

          except (
            tf.Exception, tf.LookupException, tf.ConnectivityException,
            tf.ExtrapolationException) as e :
            continue

      tf_b.sendTransform([0,0,0],[0,0,0,1],now, main_frame_bundle, main_frame_avg)
      
      for kp in self.known_poses.values():
        base_frame = kp.header.frame_id
        child_frame = kp.child_frame_id
        T,R = tf_msg_to_numpy(kp)
        tf_b.sendTransform(T,R,now,child_frame,base_frame)
      
      next_pub.publish(str(self.next_poses.keys()))
      
      rate.sleep()
コード例 #41
0
def tf_from_tfmat(mat):
    tf_stamped = TransformStamped()
    _tf = Transform()

    t_vec = tf.transformations.translation_from_matrix(mat)
    _tf.translation.x = t_vec[0]
    _tf.translation.y = t_vec[1]
    _tf.translation.z = t_vec[2]

    q_vec = tf.transformations.quaternion_from_matrix(mat)
    _tf.rotation.x = q_vec[0]
    _tf.rotation.y = q_vec[1]
    _tf.rotation.z = q_vec[2]
    _tf.rotation.w = q_vec[3]
    tf_stamped.transform = _tf
    # header and child_frame_id needs to be defined
    return tf_stamped
コード例 #42
0
ファイル: sensors.py プロジェクト: cyy1991/carla
    def _compute_transform(self, sensor_data, cur_time):
        parent_frame_id = "base_link"
        child_frame_id = self.name

        t = TransformStamped()
        t.header.stamp = cur_time
        t.header.frame_id = parent_frame_id
        t.child_frame_id = child_frame_id
        t.transform = carla_transform_to_ros_transform(
            self.carla_object.get_transform())

        # for some reasons lidar sends already rotated cloud,
        # so it is need to ignore pitch and roll
        r = t.transform.rotation
        quat = [r.x, r.y, r.z, r.w]
        roll, pitch, yaw = tf.transformations.euler_from_quaternion(quat)
        quat = tf.transformations.quaternion_from_euler(0, 0, yaw)
        t.transform.rotation.x = quat[0]
        t.transform.rotation.y = quat[1]
        t.transform.rotation.z = quat[2]
        t.transform.rotation.w = quat[3]
        self.process_msg_fun('tf', t)
コード例 #43
0
def main():
    rospy.init_node('helmet')
    pub = rospy.Publisher('helmet', TransformStamped, queue_size=10)

    tx, ty, tz = 0, 0, 0
    w0, w1, w2 = 0, 1, 0
    theta = 0 #-5 * pi / 180
    r = rospy.Rate(100)
    while True:
        tx, ty, tz = tx+delt(), ty+delt(), tz+delt()
        w0, w1, w2, = w0+delt(), w1+delt(), w2+delt()
        theta = (theta+delt()) % (2*pi)
        
        quat = ks.rot_to_quaternion(ks.rotation_3d(np.array([w0, w1, w2]), theta))
        translation = Vector3(tx, ty, tz)
        rotation = Quaternion(*quat)
        transform = Transform(translation, rotation)
        ts = TransformStamped()
        ts.transform = transform

        pub.publish(ts)
        r.sleep()
コード例 #44
0
def avg_transforms(tf_groups):
  avgs = []
  for tf_group in tf_groups:
    trans = numpy.empty((len(tf_group),3))
    rots = numpy.empty((len(tf_group),4))
    for i in range(len(tf_group)):
      trans[i,:], rots[i,:] = tf_msg_to_numpy(tf_group[i])
  
    avg_trans = trans.mean(axis=0)
    avg_rot = rots.mean(axis=0)
    # Assuming quaternions are close, averaging and normalizing should be close
    # to the desired average quaternion
    avg_rot[3] = (1.0-(avg_rot[:3]**2).sum())**0.5

    avg_tf = TransformStamped()
    avg_tf.header.frame_id = tf_group[0].header.frame_id
    avg_tf.child_frame_id = tf_group[0].child_frame_id
    avg_tf.transform = numpy_to_tf_msg(avg_trans, avg_rot)

    avgs.append(avg_tf)
  
  return avgs
def main():

    parser = argparse.ArgumentParser(description="Create the tf messages from "
                                                 "the Tango logs.")
    parser.add_argument('-d', '--directory',
                        help="The folder must have images.txt, "
                             "depth_to_imu_transformation.txt, "
                             "fisheye_to_imu_transformation.txt, "
                             "narrow_to_imu_transformation.txt",
                        required=True)
    parser.add_argument('-o', '--output',
                        help='output bag file (with location)',
                        metavar='bag_filename',
                        type=str,
                        required=True
                        )
    parser.add_argument('-y',
                        help='if images_adjusted.txt is found in the same folder'
                             'as the supplied filename, then use it without'
                             'asking',
                        metavar='True/False',
                        type=bool,
                        default=False,
                        choices=[True, False]
                        )

    arguments = parser.parse_args()
    folder_name = arguments.directory
    standard_file = os.path.join(folder_name, 'images.txt')
    alt_file = os.path.join(folder_name, 'images_adjusted.txt')
    if os.path.exists(alt_file):
        if arguments.y:
            images_file = open(alt_file)
        else:
            reply = None
            while reply not in ['y', 'n']:
                print("The images_adjusted.txt file is in the folder {}, do you"
                      " want to use that instead? [y/n]".format(folder_name))
                reply = raw_input()
            if reply == 'y':
                images_file = open(alt_file)
            else:
                images_file = open(standard_file)
    else:
        images_file = open(standard_file)
    print("Processing data from {}...".format(images_file.name))

    #depth to imu
    depth_np = np.loadtxt(os.path.join(folder_name,
                                       "depth_to_imu_transformation.txt",
                                       ),
                          delimiter=',').reshape(3, 4)
    rotation_matrix = np.vstack((depth_np, [0, 0, 0, 1]))
    quaternion = transformations.quaternion_from_matrix(rotation_matrix)
    depth_transform_msg = Transform()
    depth_transform_msg.translation.x = rotation_matrix[0, 3]
    depth_transform_msg.translation.y = rotation_matrix[1, 3]
    depth_transform_msg.translation.z = rotation_matrix[2, 3]
    depth_transform_msg.rotation.x = quaternion[0]
    depth_transform_msg.rotation.y = quaternion[1]
    depth_transform_msg.rotation.z = quaternion[2]
    depth_transform_msg.rotation.w = quaternion[3]

    #fisheye to imu
    depth_np = np.loadtxt(os.path.join(folder_name,
                                       "fisheye_to_imu_transformation.txt"),
                          delimiter=',').reshape(3, 4)
    rotation_matrix = np.vstack((depth_np, [0, 0, 0, 1]))
    quaternion = transformations.quaternion_from_matrix(rotation_matrix)
    fisheye_transform_msg = Transform()
    fisheye_transform_msg.translation.x = rotation_matrix[0, 3]
    fisheye_transform_msg.translation.y = rotation_matrix[1, 3]
    fisheye_transform_msg.translation.z = rotation_matrix[2, 3]
    fisheye_transform_msg.rotation.x = quaternion[0]
    fisheye_transform_msg.rotation.y = quaternion[1]
    fisheye_transform_msg.rotation.z = quaternion[2]
    fisheye_transform_msg.rotation.w = quaternion[3]

    #narrow to imu
    depth_np = np.loadtxt(os.path.join(folder_name,
                                       "narrow_to_imu_transformation.txt"),
                          delimiter=',').reshape(3, 4)
    rotation_matrix = np.vstack((depth_np, [0, 0, 0, 1]))
    quaternion = transformations.quaternion_from_matrix(rotation_matrix)
    narrow_transform_msg = Transform()
    narrow_transform_msg.translation.x = rotation_matrix[0, 3]
    narrow_transform_msg.translation.y = rotation_matrix[1, 3]
    narrow_transform_msg.translation.z = rotation_matrix[2, 3]
    narrow_transform_msg.rotation.x = quaternion[0]
    narrow_transform_msg.rotation.y = quaternion[1]
    narrow_transform_msg.rotation.z = quaternion[2]
    narrow_transform_msg.rotation.w = quaternion[3]

    #for each entry in images.txt, add a transformation with the values above
    #TODO skip some values? we don't need high frame rate!
    with rosbag.Bag(arguments.output, 'w') as outputbag:
        for lineno, line in enumerate(images_file):
            #lines must be of the form:
            #image_basename,t_android,t_movidius,r11,r12,r12,t1,r21,r22,r23,t2,r31,r32,r33,t3
            tokens = line.strip('\n').split(',')

            if (len(tokens)) != 15:
                print("Line {0} appears to be wrong: \"{1}\" ".format(lineno, tokens))
                continue

            msg = tfMessage()

            #using the android_ts as the timestamp
            ts = float(tokens[1])

            #depth
            tf_stamped = TransformStamped()
            tf_stamped.header.frame_id = "/tango/imu"
            tf_stamped.header.seq = lineno
            tf_stamped.header.stamp = rospy.Time.from_seconds(ts)
            tf_stamped.child_frame_id = "/tango/depth"
            tf_stamped.transform = depth_transform_msg
            msg.transforms.append(tf_stamped)

            #fisheye
            tf_stamped = TransformStamped()
            tf_stamped.header.frame_id = "/tango/imu"
            tf_stamped.header.seq = lineno
            tf_stamped.header.stamp = rospy.Time.from_seconds(ts)
            tf_stamped.child_frame_id = "/tango/fisheye"
            tf_stamped.transform = fisheye_transform_msg
            msg.transforms.append(tf_stamped)

            #narrow
            tf_stamped = TransformStamped()
            tf_stamped.header.frame_id = "/tango/imu"
            tf_stamped.header.seq = lineno
            tf_stamped.header.stamp = rospy.Time.from_seconds(ts)
            tf_stamped.child_frame_id = "/tango/narrow"
            tf_stamped.transform = narrow_transform_msg
            msg.transforms.append(tf_stamped)

            outputbag.write("tf", msg, rospy.Time.from_seconds(ts))

    outputbag.close()
    print("Bag creation complete, bagfile: {}".format(outputbag.filename))