def test_transformer_wait_for_transform(self): tr = tf.Transformer() tr.setUsingDedicatedThread(1) try: tr.waitForTransform("PARENT", "THISFRAME", rospy.Time().from_sec(4.0), rospy.Duration(3.0)) self.assertFalse("This should throw") except tf.Exception as ex: pass m = geometry_msgs.msg.TransformStamped() m.header.stamp = rospy.Time().from_sec(3.0) m.header.frame_id = "PARENT" m.child_frame_id = "THISFRAME" m.transform.translation.y = 5.0 m.transform.rotation.x = 0.04997917 m.transform.rotation.y = 0 m.transform.rotation.z = 0 m.transform.rotation.w = 0.99875026 tr.setTransform(m) m.header.stamp = rospy.Time().from_sec(5.0) tr.setTransform(m) try: tr.waitForTransform("PARENT", "THISFRAME", rospy.Time().from_sec(4.0), rospy.Duration(3.0)) except tf.Exception as ex: self.assertFalse("This should not throw")
def test_wait_for_transform(self): def elapsed_time_within_epsilon(t, delta, epsilon): self.assertGreater(t - epsilon, delta) self.assertLess(delta, t + epsilon) t = tf.Transformer() self.common(t) timeout = rospy.Duration(0.1) epsilon = 0.05 # Check for dedicated thread exception, existing frames self.assertRaises( tf.Exception, lambda: t.waitForTransform("PARENT", "THISFRAME", rospy.Time(), timeout)) # Check for dedicated thread exception, non-existing frames self.assertRaises( tf.Exception, lambda: t.waitForTransform("MANDALAY", "JUPITER", rospy.Time(), timeout)) t.setUsingDedicatedThread(True) # This will no longer thorw self.assertEqual( t.waitForTransform("PARENT", "THISFRAME", rospy.Time(), timeout), None) # Verify exception stil thrown with non-existing frames near timeout start = time.clock() self.assertRaises( tf.Exception, lambda: t.waitForTransform("MANDALAY", "JUPITER", rospy.Time(), timeout)) elapsed_time_within_epsilon(start, timeout.to_sec(), epsilon)
def create_transformer(rostime_txyz_qxyzw_list, target_frame, source_frame): """ create a ros transformer from a txt file contains poses W_T_B :param rostime_txyz_qxyzw_list: each row [rostime, tx, ty, tz, qx, qy, qz, qw] :param target_frame: label of the world frame {W} :param source_frame: label of the reference body frame {B} :return: the transformer that can be used for querying interpolated poses """ maxduration = rospy.Duration(3600.0) transformer = tf.Transformer(True, maxduration) for row in rostime_txyz_qxyzw_list: monk = geometry_msgs.msg.TransformStamped() monk.header.stamp = row[0] monk.header.frame_id = target_frame monk.child_frame_id = source_frame monk.transform.translation.x = row[1] monk.transform.translation.y = row[2] monk.transform.translation.z = row[3] monk.transform.rotation.x = row[4] monk.transform.rotation.y = row[5] monk.transform.rotation.z = row[6] monk.transform.rotation.w = row[7] transformer.setTransform(monk) return transformer
def __init__(self, mask_topic, cam_imfo_topic, cam_tf_link): #TODO check if is is goot to init the node in the constructor rospy.init_node("mask_processing_node") rospy.loginfo("Starting MaskProcessingNode.") transformer = tf.Transformer() #TODO check the arguments (trans,rot) = transformer.lookupTransform(cam_tf_link,'base_link',rospy.Time(0)) # translation vector from base_link to cam1_link self.base_to_cam_trans_ = trans # rotation vector from base_link to cam1_link in rpy self.base_to_cam_rot_ = euler_from_quaternion(rot) # sensor_msgs.msg.CameraInfo camera_info_msg camera_info_msg = rospy.wait_for_message(cam_imfo_topic, sensor_msgs.msg.CameraInfo) self.mask_height_ = camera_info_msg.height self.mask_width_ = camera_info_msg.width # the full grojection matrix of the camera self.p_matrix_ = camera_info_msg.P # asumes focal lengths are equal for x and y in pixels self.focal_length_ = camera_info_msg.P[0] #[principal points_x, principal_point_y] in pixels self.principal_point_ = [camera_info_msg.P[2], camera_info_msg.P[6]] #TODO check self.base_to_cam_rot_ is in rad and assert pitch is axis down direction im__shape = [self.mask_height_, self.mask_width_,1] cam__height = self.base_to_cam_trans_[2] cam__pitch__deg = np.rad2deg(self.base_to_cam_rot_[1]) #a functor that analyzez the mask self.mask_analyzer = orto_shape_analysis(im__shape, cam_h_fov_deg/2, cam_v_fov_deg/2, \ axis_down_direction=cam__pitch__deg, \ H=cam__height, Y_dist_limit=Y_distance_lim)
def get_datas(path): bag = rosbag.Bag(path) tf_transformer = tf.Transformer(True, rospy.Duration(3600.0)) odom_pos = [] for topic, msg, t in bag.read_messages(topics=['/tf']): for msg_tf in msg.transforms: tf_transformer.setTransform(msg_tf) try: xy, _ = tf_transformer.lookupTransform('map', 'base_footprint', rospy.Time(0)) odom_pos.append({'time': t.to_nsec(), 'x': xy[0], 'y': xy[1]}) except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException): continue real_pos = [] for topic, msg, t in bag.read_messages(topics=['/gazebo/model_states']): # m = ModelStates() real_pos.append({ 'time': t.to_nsec(), 'x': msg.pose[2].position.x, 'y': msg.pose[2].position.y }) bag.close() return odom_pos, real_pos
def __init__(self, name, topics, root_frame, measured_frame, groundtruth, mode, series_mode): """ Class for calculating the acceleration by the given frame in relation to a given root frame. The tf data is sent over the tf topics given in the test_config.yaml. :param root_frame: name of the first frame :type root_frame: string :param measured_frame: name of the second frame. The acceleration will be measured in relation to the root_frame. :type measured_frame: string """ self.name = name self.started = False self.finished = False self.active = False self.groundtruth = groundtruth self.topics = topics self.root_frame = root_frame self.measured_frame = measured_frame self.mode = mode self.series_mode = series_mode self.series = [] self.data = DataStamped() self.trans_old = [] self.rot_old = [] self.time_old = None self.velocity_old = None self.t = tf.Transformer(True, rospy.Duration(10.0))
def __init__(self, name, testblock_name, topics, root_frame, measured_frame, groundtruth, mode, series_mode, unit): """ Class for calculating the jerk by the given frame in relation to a given root frame. The tf data is sent over the tf topics given in the test_config.yaml. :param root_frame: name of the first frame :type root_frame: string :param measured_frame: name of the second frame. The jerk will be measured in relation to the root_frame. :type measured_frame: string """ self.name = name self.testblock_name = testblock_name self.status = TestblockStatus() self.status.name = testblock_name self.groundtruth = groundtruth self.mode = mode self.series_mode = series_mode self.series = [] self.unit = unit self.topics = topics self.root_frame = root_frame self.measured_frame = measured_frame self.trans_old = [] self.rot_old = [] self.time_old = None self.velocity_old = None self.acceleration_old = None self.t = tf.Transformer(True, rospy.Duration(10.0))
def __init__(self): rospy.init_node('imu_filter', anonymous=True) rospy.Subscriber('/imu_raw', Imu, self.imuCallback, queue_size=1) self.pub_imuOdom = rospy.Publisher('/imu_odom', Odometry, queue_size=1) self.tf = tf.Transformer(True, rospy.Duration(10.0)) # IMU State # Orientation (quaternion parts) N/A 1:4 # Position (NED or ENU) m 5:7 # Velocity (NED or ENU) m/s 8:10 # Gyroscope Bias (XYZ) rad/s 11:13 # Accelerometer Bias (XYZ) m/s2 14:16 # Visual Odometry Scale (XYZ) N/A 17 self.imuState = np.zeros((17, 1)) self.imuState[0] = 1 self.imuState[16] = 1 self.last_stamp = Time() self.stamp = 0 self.flag = 0 rospy.spin()
def test_cache_time(self): # Vary cache_time and confirm its effect on ExtrapolationException from lookupTransform(). for cache_time in range(2, 98): t = tf.Transformer(True, rospy.Duration(cache_time)) m = geometry_msgs.msg.TransformStamped() m.header.frame_id = "PARENT" m.child_frame_id = "THISFRAME" m.transform.translation.y = 5.0 m.transform.rotation = geometry_msgs.msg.Quaternion(*tf.transformations.quaternion_from_euler(0, 0, 0)) t.setTransform(m) afs = t.allFramesAsString() self.assert_(len(afs) != 0) self.assert_("PARENT" in afs) self.assert_("THISFRAME" in afs) self.assert_(t.getLatestCommonTime("THISFRAME", "PARENT").to_sec() == 0) # Set transforms for time 0..100 inclusive for ti in range(101): m.header.stamp = rospy.Time(ti) t.setTransform(m) self.assert_(t.getLatestCommonTime("THISFRAME", "PARENT").to_sec() == ti) self.assertEqual(t.getLatestCommonTime("THISFRAME", "PARENT").to_sec(), 100) # (avoid time of 0 because that means 'latest') for ti in range(1, 100 - cache_time): self.assertRaises(tf.ExtrapolationException, lambda: t.lookupTransform("THISFRAME", "PARENT", rospy.Time(ti))) for ti in range(100 - cache_time, 100): t.lookupTransform("THISFRAME", "PARENT", rospy.Time(ti))
def __init__(self, topics, root_frame, measured_frame, groundtruth, groundtruth_epsilon, series_mode): """ Class for calculating the distance covered by the given frame in relation to a given root frame. The tf data is sent over the tf topics given in the test_config.yaml. :param root_frame: name of the first frame :type root_frame: string :param measured_frame: name of the second frame. The distance will be measured in relation to the root_frame. :type measured_frame: string """ self.name = 'tf_length_rotation' self.started = False self.finished = False self.active = False self.groundtruth = groundtruth self.groundtruth_epsilon = groundtruth_epsilon self.topics = topics self.root_frame = root_frame self.measured_frame = measured_frame self.series_mode = series_mode self.series = [] self.data = DataStamped() self.first_value = True self.trans_old = [] self.rot_old = [] self.t = tf.Transformer(True, rospy.Duration(10.0))
def setup_tf_transformer_from_ros_bag(bag, cache_time_secs=3600, verbose=False): """ Creates a tf::Transformer object to allow for querying tf. Builds it with messages from a log """ tf_t = tf.Transformer(True, rospy.Duration(secs=cache_time_secs)) tf_static_msgs = [] timestamps = [] tf_set = set() for topic, msg, t in bag.read_messages(topics=['/tf_static']): for msg_tf in msg.transforms: child_frame_id = msg_tf.child_frame_id parent_frame_id = msg_tf.header.frame_id key = (child_frame_id, parent_frame_id) if key in tf_set: continue tf_set.add(key) tf_t.setTransform(msg_tf) tf_static_msgs.append(msg_tf) timestamps.append(t) # raise ValueError("donezos") def addStaticMessagesToTransformer(stamp): if verbose: print "len(tf_static_msgs): ", len(tf_static_msgs) for msg in tf_static_msgs: msg.header.stamp = stamp tf_t.setTransform(msg) counter = 0 for topic, msg, t in bag.read_messages(topics=['/tf']): # print "transform msg: \n", msg # print "type(msg)", type(msg) stamp = None counter += 1 if verbose: print "processing tf message %d" % (counter) for msg_tf in msg.transforms: # if ("base" == msg_tf.child_frame_id) or ("base" == msg_tf.header.frame_id): # print "timestamp = ", msg_tf.header.stamp.to_sec() # print msg_tf # print "type(msg_tf)", type(msg_tf) tf_t.setTransform(msg_tf) stamp = msg_tf.header.stamp addStaticMessagesToTransformer(stamp) return tf_t
def test_transformer_wait_for_transform_dedicated_thread(self): tr = tf.Transformer() try: tr.waitForTransform("PARENT", "THISFRAME", rospy.Time().from_sec(4.0), rospy.Duration(3.0)) self.assertFalse("This should throw") except tf.Exception, ex: print "successfully caught" pass
def _handle_transform(self, cone, x_cord, y_cord): t = tf.Transformer() w2c_pos, w2c_quat = t.lookupTransform( camera_frame, 'map', t.getLatestCommonTime('map', camera_frame)) br = tf.TransformBroadcaster() br.sendTransform((x_cord + w2c_pos[0], y_cord + w2c_pos[1], 0), w2c_quat, rospy.Time.now(), 'cone_loc', 'map')
def __init__(self): rospy.init_node('people_publisher') self.sub = rospy.Subscriber('/simulation_state', gazebo_msgs.msg.ModelStates, self.model_state_cb) self.pub = rospy.Publisher('/people_tracker_measurements', PositionMeasurementArray) self.tf = tf.Transformer()
def cog2baselink_cb(data): t = tf.Transformer(True, rospy.Duration(10.0)) t.setTransform(data) (inv_trans, inv_rot) = t.lookupTransform('fc', 'cog', rospy.Time(0)) br = tf.TransformBroadcaster() br.sendTransform(inv_trans, inv_rot, data.header.stamp, data.header.frame_id, data.child_frame_id)
def fill_transformer(bag): print("Loading tfs into transformer...") tf_t = tf.Transformer(True, rospy.Duration(3600)) for topic, msg, t in bag.read_messages(topics=["/tf"]): for msg_tf in msg.transforms: casted_msg = bag_type_to_geometry_msgs(msg_tf) tf_t.setTransform(casted_msg) print("Finished") return tf_t
def test_transformer_wait_for_transform(self): tr = tf.Transformer() tr.setUsingDedicatedThread(1) try: tr.waitForTransform("PARENT", "THISFRAME", rospy.Time().from_sec(4.0), rospy.Duration(3.0)) self.assertFalse("This should throw") except tf.Exception, ex: pass
def __init__(self): rospy.init_node("refinement_node") self.python2to3_function_caller = python2to3FunctionCaller() self.parser = trajectoryParser() self.spl_interp = splineInterpolation() self.br = tf.TransformBroadcaster() self.tf_listener = tf.TransformListener() self.tf_transformer = tf.Transformer(True, rospy.Duration(10.0)) self.dmp_python = dmpPython() self.master_pose = Pose() self.refinementFlag = 0 self.grey_button = 0 self.grey_button_prev = 0 self.grey_button_toggle = 0 self.white_button = 0 self.white_button_prev = 0 self.white_button_toggle = 0 self.end_effector_goal_pub = rospy.Publisher( "/whole_body_kinematic_controller/arm_tool_link_goal", PoseStamped, queue_size=10) self.geo_button_sub = rospy.Subscriber("geo_buttons_m", GeomagicButtonEvent, self._buttonCallbackToggle) self.end_effector_pose_sub = rospy.Subscriber( "/end_effector_pose", PoseStamped, self._end_effector_pose_callback) self.traj_pred_pub = rospy.Publisher( 'trajectory_visualizer/trajectory_predicted', TrajectoryVisualization, queue_size=10) self.traj_ref_pub = rospy.Publisher( 'trajectory_visualizer/trajectory_refined', TrajectoryVisualization, queue_size=10) self.master_pose_sub = rospy.Subscriber('master_control_comm', ControlComm, self._masterPoseCallback) self.force_state_pub = rospy.Publisher('geo_force_state_m', WrenchStamped, queue_size=10) self.effort_pub = rospy.Publisher('refinement_force', WrenchStamped, queue_size=10) self.gripper_wrt_ee_sub = rospy.Subscriber( 'gripper_wrt_ee', PoseStamped, self._gripper_wrt_ee_callback) self.frame_id = '/base_footprint' self.initMasterNormalizePose()
def __init__(self): self.pub_cmd = rospy.Publisher("controller_cmd", ControllerCmd, queue_size=10) self.dt = 0.02 self.pos = (0, 0, 0) self.orientation = (0, 0, 0, 1) self.joy = None self.tfl = tf.TransformListener() self.transformer = tf.Transformer() self.body = 'base_link' self.odom = 'odom' self.sub_joy = rospy.Subscriber('joy', Joy, self.joy_callback)
def test_twist(self): t = tf.Transformer() vel = 3 for ti in range(5): m = geometry_msgs.msg.TransformStamped() m.header.frame_id = "PARENT" m.header.stamp = rospy.Time(ti) m.child_frame_id = "THISFRAME" m.transform.translation.x = ti * vel m.transform.rotation = geometry_msgs.msg.Quaternion(*tf.transformations.quaternion_from_euler(0, 0, 0)) t.setTransform(m) tw0 = t.lookupTwist("THISFRAME", "PARENT", rospy.Time(0.0), rospy.Duration(4.001)) self.assertAlmostEqual(tw0[0][0], vel, 2) tw1 = t.lookupTwistFull("THISFRAME", "PARENT", "PARENT", (0, 0, 0), "THISFRAME", rospy.Time(0.0), rospy.Duration(4.001)) self.assertEqual(tw0, tw1)
def disabled_random(self): import networkx as nx for (r,h) in [ (2,2), (2,5), (3,5) ]: G = nx.balanced_tree(r, h) t = tf.Transformer(True, rospy.Duration(10.0)) for n in G.nodes(): if n != 0: # n has parent p p = min(G.neighbors(n)) setT(t, str(p), str(n), rospy.Time(0), 1) for n in G.nodes(): ((x,_,_), _) = t.lookupTransform("0", str(n), rospy.Time(0)) self.assert_(x == nx.shortest_path_length(G, 0, n)) for i in G.nodes(): for j in G.nodes(): ((x,_,_), _) = t.lookupTransform(str(i), str(j), rospy.Time()) self.assert_(abs(x) == abs(nx.shortest_path_length(G, 0, i) - nx.shortest_path_length(G, 0, j)))
def __init__(self): rospy.init_node('odometry2tfpose') self.sub = rospy.Subscriber('/gps/rtkfix', nav_msgs.msg.Odometry, callback=self.odometry_cb) # Publishes vehicle position in the map frame # QUESTION: Does this have to be in the map frame, or can we give it in GPS frame? # I looked at ndt_estimation, and didn't see it even checking the # frame in the header... self.pose_pub = rospy.Publisher('/gnss_pose', geometry_msgs.msg.PoseStamped, queue_size=1) # Publishes whether fix is valid # QUESTION: Is this required? The autoware config files seem to expect it, # but I couldn't find it anywhere in the code. self.stat_pub = rospy.Publisher('/gnss_stat', std_msgs.msg.Bool, queue_size=1) self.tf = tf.Transformer(True, rospy.Duration(5.0))
def __init__(self, base_frame, flow_frame, fixed_frame): self.base_frame = base_frame self.flow_frame = flow_frame self.fixed_frame = fixed_frame self.flow_ready = False self.last_flow_time = rospy.Time() self.broadcaster = tf.TransformBroadcaster() self.listener = tf.TransformListener() self.transformer = tf.Transformer(interpolating=True) self.odom_msg = Odometry() self.odom_msg.header.frame_id = fixed_frame self.odom_tf = TransformStamped() self.odom_tf.header.frame_id = self.fixed_frame self.odom_tf.child_frame_id = self.base_frame self.odom_tf.transform.rotation.w = 1 # linear velocity from flow self.v_t = np.array((0, 0, 0)) # angular velocity from IMU self.omega_t = np.array((0, 0, 0)) # x,y,z position state vector self.x_t = np.array((0, 0, 0)) self.theta_t = 0 print self.base_frame, self.flow_frame self.listener.waitForTransform('odom_temp', self.flow_frame, rospy.Time(), rospy.Duration(5)) offset = self.listener.lookupTransform('odom_temp', self.flow_frame, rospy.Time())[0] self.x_t = np.array(offset) rospy.Subscriber('flow', OpticalFlow, self.flow_cb) rospy.Subscriber('/imu/data', Imu, self.imu_cb) rospy.Timer(rospy.Duration(0.1), self.publish_odom)
def __init__(self): self.rgb_image_sub = message_filters.Subscriber( '/head_camera/rgb/image_raw/', Image) self.depth_image_sub = message_filters.Subscriber( '/head_camera/depth_registered/image_raw/', Image) self.ts = message_filters.TimeSynchronizer( [self.rgb_image_sub, self.depth_image_sub], 10) self.ts.registerCallback(self.callback) self.enableSub = rospy.Subscriber("/enableVision", Int8, self.callbackForInt) self.locPub = rospy.Publisher("/apple_loc", numpy_msg(Floats), queue_size=10) self.baseLocPub = rospy.Publisher("/fetch_fruit_harvest/apple_pose", Pose, queue_size=10) self.bridge = CvBridge() self.applesSeenMap = [] self.tf_listener_ = TransformListener() self.tx = tf.Transformer(True, rospy.Duration(10.)) self._capture_and_pub = 0
def test_chain(self): t = tf.Transformer() self.common(t) m = geometry_msgs.msg.TransformStamped() m.header.frame_id = "A" m.child_frame_id = "B" m.transform.translation.y = 5.0 m.transform.rotation = geometry_msgs.msg.Quaternion(*tf.transformations.quaternion_from_euler(0, 0, 0)) t.setTransform(m) m.header.frame_id = "B" m.child_frame_id = "C" t.setTransform(m) m.header.frame_id = "B" m.child_frame_id = "C" t.setTransform(m) chain = t.chain("A", rospy.Time(0), "C", rospy.Time(0), "B") print("Chain is {}".format(chain)) self.assert_("C" in chain) self.assert_("B" in chain)
def __init__(self): rospy.loginfo("Initializing Baxter Vision Interface") cv2.namedWindow(PROCESS_WINDOW, WINDOW_AUTOSIZE) cv2.namedWindow(RGB_WINDOW, WINDOW_AUTOSIZE) cv2.namedWindow(DEPTH_WINDOW, WINDOW_AUTOSIZE) self.bridge = CvBridge() self.br = tf.TransformBroadcaster() self.tf = tf.Transformer(True, rospy.Duration(10.0)) self.tf_listener = TransformListener() self.l_hand_state_sub = rospy.Subscriber("/gesture/hand_state/left", Float64, self.l_hand_state_cb) self.r_hand_state_sub = rospy.Subscriber("/gesture/hand_state/right", Float64, self.r_hand_state_cb) self.l_hand_state = 50 self.r_hand_state = 50 self.has_rgb_data = False self.has_depth_data = False self.img_kinect_depth = None self.img_kinect_rgb = None self.img_kinect_gray = None self.img_calibration = None self.sub_kinect_depth = rospy.Subscriber("/camera/depth/image_raw", Image, self.kinect_depth_callback) self.sub_kinect_rgb = rospy.Subscriber("/camera/rgb/image_color", Image, self.kinect_rgb_callback) self.pub_viz_campose = rospy.Publisher("/viz/camera_marker", Marker) self.pub_kinect_tf = rospy.Publisher("/viz/camera_tf", TransformStamped) # camera pose averaging filters self.camera_pos_filters = [dataFilter(3), dataFilter(3), dataFilter(3)] self.camera_rot_filters = [dataFilter(3), dataFilter(3), dataFilter(3)]
def _get_approach_position(param): """ ターゲット座標からアプローチ座標を算出 """ down_dis = TOOL_MOVE_FIXED_Z if scenario_key.PARAM_KEY_ARM_DOWN_DISTANCE in param: down_dis = param[scenario_key.PARAM_KEY_ARM_DOWN_DISTANCE] t = tf.Transformer(True, rospy.Duration(10.0)) # target tp = TransformStamped() tp.header.frame_id = 'BASE_LINK' tp.child_frame_id = 'OBJECT' tp.transform.translation = Vector3(param[scenario_key.PARAM_KEY_COMMON_X], param[scenario_key.PARAM_KEY_COMMON_Y], param[scenario_key.PARAM_KEY_COMMON_Z]) tp.transform.rotation = common.degree_to_quaternion( param[scenario_key.PARAM_KEY_COMMON_RX], param[scenario_key.PARAM_KEY_COMMON_RY], param[scenario_key.PARAM_KEY_COMMON_RZ]) t.setTransform(tp) # approach ap = TransformStamped() ap.header.frame_id = 'OBJECT' ap.child_frame_id = 'APPROACH' ap.transform.translation = Vector3(0, 0, -down_dis) ap.transform.rotation = common.degree_to_quaternion(0, 0, 0) t.setTransform(ap) position, quaternion = t.lookupTransform('BASE_LINK', 'APPROACH', rospy.Time(0)) return position, quaternion
def _reverse_tf(tanslation, euler): ''' # 输入二维码在相机坐标系下的坐标,输出相机base_footprint在二维码坐标系下的坐标 :param tanslation: :param euler: :return: t, euler_angle ''' transform = tf.Transformer(True) m = TransformStamped() m.header.frame_id = 'camera' m.child_frame_id = 'qr_code' m.transform.translation.x = tanslation[0] m.transform.translation.y = tanslation[1] m.transform.translation.z = tanslation[2] quaternion = tf.transformations.quaternion_from_euler(*euler) m.transform.rotation.x = quaternion[0] m.transform.rotation.y = quaternion[1] m.transform.rotation.z = quaternion[2] m.transform.rotation.w = quaternion[3] transform.setTransform(m) m.header.frame_id = 'base_footprint' m.child_frame_id = 'camera' m.transform.translation.x = 0.88 m.transform.translation.y = 0.72 m.transform.translation.z = 0 quaternion = tf.transformations.quaternion_from_euler(0, 0, -1.57) m.transform.rotation.x = quaternion[0] m.transform.rotation.y = quaternion[1] m.transform.rotation.z = quaternion[2] m.transform.rotation.w = quaternion[3] transform.setTransform(m) t, q = transform.lookupTransform('qr_code', 'base_footprint', rospy.Time(0)) euler_angle = tf.transformations.euler_from_quaternion(q) return t, euler_angle
import numpy import unittest import sys import time import StringIO import tf.transformations import geometry_msgs.msg from tf.msg import tfMessage import tf iterations = 10000 t = tf.Transformer() def mkm(): m = geometry_msgs.msg.TransformStamped() m.header.frame_id = "PARENT" m.child_frame_id = "THISFRAME" m.transform.translation.y = 5.0 m.transform.rotation = geometry_msgs.msg.Quaternion( *tf.transformations.quaternion_from_euler(0, 0, 0)) return m tm = tfMessage([mkm() for i in range(20)])
#! /usr/bin/env python import rospy import tf import math import time from numpy import interp, sign from nav_msgs.msg import Odometry from geometry_msgs.msg import Twist, TransformStamped rospy.init_node("rotate_and_move") rate = rospy.Rate(10.0) listener = tf.TransformListener() t = tf.Transformer(True, rospy.Duration(20.0)) pub = rospy.Publisher('cmd_vel', Twist, queue_size=1) def get_transform(from_a, to_b): (trans, rot) = t.lookupTransform(from_a, to_b, rospy.Time(0)) return (trans, rot) def set_transform(from_a, to_b, (trans, rot)): m = TransformStamped() m.header.frame_id = from_a m.child_frame_id = to_b m.transform.translation.x = trans[0]