def rotate_and_publish(pose_stamped): # Transform position position = Vector3Stamped() position.vector.x = pose_stamped.pose.position.x position.vector.y = pose_stamped.pose.position.y position.vector.z = pose_stamped.pose.position.z position_frd = tf2_geometry_msgs.do_transform_vector3( position, transform_frd) # Transform rotation quaternion_xyz = Vector3Stamped() quaternion_xyz.vector.x = pose_stamped.pose.orientation.x quaternion_xyz.vector.y = pose_stamped.pose.orientation.y quaternion_xyz.vector.z = pose_stamped.pose.orientation.z quaternion_xyz_frd = tf2_geometry_msgs.do_transform_vector3( quaternion_xyz, transform_frd) # Construct and publish FRD position pose_stamped_frd = PoseStamped() pose_stamped_frd.header = pose_stamped.header pose_stamped_frd.pose.position.x = position_frd.vector.x pose_stamped_frd.pose.position.y = position_frd.vector.y pose_stamped_frd.pose.position.z = position_frd.vector.z pose_stamped_frd.pose.orientation.x = quaternion_xyz_frd.vector.x pose_stamped_frd.pose.orientation.y = quaternion_xyz_frd.vector.y pose_stamped_frd.pose.orientation.z = quaternion_xyz_frd.vector.z pose_stamped_frd.pose.orientation.w = pose_stamped.pose.orientation.w # WTF vision_pose_pub.publish(pose_stamped_frd) return pose_stamped_frd
def twist_in_frame(tf_buffer, twist_s, frame_id): t = get_transform(tf_buffer, frame_id, twist_s.header.frame_id) if not t: return None h = Header(frame_id=frame_id, stamp=twist_s.header.stamp) vs_l = Vector3Stamped(header=h, vector=twist_s.twist.linear) vs_a = Vector3Stamped(header=h, vector=twist_s.twist.angular) msg = TwistStamped(header=h) msg.twist.linear = tf2_geometry_msgs.do_transform_vector3(vs_l, t).vector msg.twist.angular = tf2_geometry_msgs.do_transform_vector3(vs_a, t).vector return msg
def transform_vector(self, v, target_frame, source_frame, time=0.0): """ Transform a vector from the source frame to the target frame. Parameters ---------- v : iterable, len 3 Input vector in source frame. target_frame : str Name of the frame to transform into. source_frame : str Name of the input frame. time : float, default 0.0 Time at which to get the transform. (0 will get the latest) Returns ------- tuple, len 3 Transformed vector in target frame. """ import rospy import tf2_geometry_msgs from .msg import make_vector_msg, unpack_vector_msg transform = self._buffer.lookup_transform(target_frame, source_frame, rospy.Time.from_sec(time)) v_msg = make_vector_msg(v, source_frame, time) vt_msg = tf2_geometry_msgs.do_transform_vector3(v_msg, transform) return unpack_vector_msg(vt_msg, stamped=True)
def get_forward(self, frame): try: time = rospy.Time.now() while not self.buffer.can_transform(frame, "base_link", time, rospy.Duration(0.5)): time = rospy.Time.now() rospy.logwarn("Waiting for " + frame + "...") if not self.running: sys.exit(0) tf = self.buffer.lookup_transform(frame, "base_link", time, rospy.Duration(1.0)) v = Vector3Stamped() v.vector.x = 0 v.vector.y = 1 v.vector.z = 0 t = TransformStamped() t.transform.rotation.x = tf.transform.rotation.x t.transform.rotation.y = tf.transform.rotation.y t.transform.rotation.z = tf.transform.rotation.z t.transform.rotation.w = tf.transform.rotation.w return do_transform_vector3(v, t).vector except: rospy.logfatal("Sonar transforms not found.") sys.exit(0)
def transform_velocity(self, msg): # init the vector object for linear velocity twist_vel = Vector3Stamped() # get the linear velocity from the msg linear = msg.twist.twist.linear twist_vel.vector.x = linear.x twist_vel.vector.y = linear.y twist_vel.vector.z = linear.z # init the transform object t = TransformStamped() # get the orientation from the msg orientation = msg.pose.pose.orientation # ground truth gives the orientation of the body frame so need inverse of body -> global transformation # it inverts the components x,y,z of orientation by flipping the sign q = tf.transformations.quaternion_inverse( [orientation.x, orientation.y, orientation.z, orientation.w]) t.transform.rotation.x = q[0] t.transform.rotation.y = q[1] t.transform.rotation.z = q[2] t.transform.rotation.w = q[3] # transform the velocity to the vehicle frame res = do_transform_vector3(twist_vel, t) # update message msg.twist.twist.linear = res.vector self.pub_2.publish(msg)
def filter_frame(self, frame, trans, stamp): """ Applies a four-step algorithm to find all the roombas in a given BGR image. :param frame: raw BGR image in which to find roombas :type frame: numpy.ndarray of shape 3 x WIDTH x HEIGHT with dtype uint8 :return: None .. note:: Does not return data, only debugs the result .. note:: The image origin is in the upper-left hand corner """ points = [] for img_coords in self.bound_roombas(frame): cam_ray = pixel_to_ray(img_coords, frame.shape, self.daov) # Convert that camera ray to world space (map frame) # See note in script header about do_transform_vector3 map_ray = tf2_geometry_msgs.do_transform_vector3( cam_ray, copy.deepcopy(trans)) # Scale the direction to hit the ground (plane z=0) # Direction scale should always be positive cam_pos = tf2_geometry_msgs.do_transform_point( PointStamped(point=Point(0, 0, 0)), trans).point direction_scale = -(cam_pos.z - ROOMBA_HEIGHT) / map_ray.vector.z roomba_pos = Point() roomba_pos.x = cam_pos.x + map_ray.vector.x * direction_scale roomba_pos.y = cam_pos.y + map_ray.vector.y * direction_scale roomba_pos.z = 0 # Debug the roomba line points.append(trans.transform.translation) points.append(roomba_pos) # Add the roomba to array and publish self.odom_lock.acquire() sq_tolerance = 0.1 if len(self.odom_array.data) < 10 else 1000 for i in xrange(len(self.odom_array.data)): pt = self.odom_array.data[i].pose.pose.position if (roomba_pos.x - pt.x)**2 + \ (roomba_pos.y - pt.y)**2 < sq_tolerance: self.odom_array.data[i].header.stamp = stamp self.odom_array.data[i].pose.pose.position = roomba_pos break else: item = Odometry() item.child_frame_id = "roomba%d" % len(self.odom_array.data) item.header.frame_id = "map" item.header.stamp = stamp item.pose.pose.position = roomba_pos item.pose.pose.orientation.z = 1 self.odom_array.data.append(item) self.publisher.publish(self.odom_array) self.odom_lock.release() self.debug.rviz_lines(points, 0)
def odom_received(odom): global tfBroadcaster global tfBuffer global odom_publisher global counter base_link_to_zed_left_camera_optical_frame = tfBuffer.lookup_transform( 'base_link', 'zed_left_camera_optical_frame', rospy.Time()) translation = geometry_msgs.msg.Vector3Stamped() translation.header.frame_id = 'zed_left_camera_optical_frame' translation.vector = odom.pose.pose.position translation_base_link = tf2_geometry_msgs.do_transform_vector3( translation, base_link_to_zed_left_camera_optical_frame) rotation_vector = geometry_msgs.msg.Vector3Stamped() rotation_vector.header.frame_id = 'zed_left_camera_optical_frame' rotation_vector.vector.x = odom.pose.pose.orientation.x rotation_vector.vector.y = odom.pose.pose.orientation.y rotation_vector.vector.z = odom.pose.pose.orientation.z rotation_vector_base_link = tf2_geometry_msgs.do_transform_vector3( rotation_vector, base_link_to_zed_left_camera_optical_frame) odom_pose_base_link = geometry_msgs.msg.PoseStamped() odom_pose_base_link.pose.position = translation_base_link.vector odom_pose_base_link.pose.orientation.x = rotation_vector_base_link.vector.x odom_pose_base_link.pose.orientation.y = rotation_vector_base_link.vector.y odom_pose_base_link.pose.orientation.z = rotation_vector_base_link.vector.z odom_pose_base_link.pose.orientation.w = odom.pose.pose.orientation.w odom_pose_base_link.pose.position.y += counter * 0.005 counter += 1 odom_to_base_link = geometry_msgs.msg.TransformStamped() odom_to_base_link.header.stamp = odom.header.stamp odom_to_base_link.header.frame_id = 'odom' odom_to_base_link.child_frame_id = 'base_link' odom_to_base_link.transform.translation = odom_pose_base_link.pose.position odom_to_base_link.transform.rotation = odom_pose_base_link.pose.orientation tfBroadcaster.sendTransform(odom_to_base_link) odom_base_link = odom odom_base_link.header.frame_id = 'odom' odom_base_link.pose.pose = odom_pose_base_link.pose odom_publisher.publish(odom_base_link)
def transformVector(x, y, z, quat): v = Vector3Stamped() v.vector.x = x v.vector.y = y v.vector.z = z t = TransformStamped() t.transform.rotation = quat return do_transform_vector3(v, t).vector
def odometry_in_frame(tf_buffer, odom, frame_id, child_frame_id): # ignoring covariances transform_pose = get_transform(tf_buffer, frame_id, odom.header.frame_id) transform_twist = get_transform(tf_buffer, child_frame_id, odom.child_frame_id) if not (transform_pose and transform_twist): return None h = Header(frame_id=frame_id, stamp=odom.header.stamp) msg = Odometry(header=h, child_frame_id=child_frame_id) vs_l = Vector3Stamped(header=h, vector=odom.twist.twist.linear) vs_a = Vector3Stamped(header=h, vector=odom.twist.twist.angular) pose_s = PoseStamped(header=h, pose=odom.pose.pose) msg.twist.twist.linear = tf2_geometry_msgs.do_transform_vector3( vs_l, transform_twist).vector msg.twist.twist.angular = tf2_geometry_msgs.do_transform_vector3( vs_a, transform_twist).vector msg.pose.pose = tf2_geometry_msgs.do_transform_pose( pose_s, transform_pose).pose return msg
def transform_twist(twist = geometry_msgs.msg.Twist, transform_stamped = geometry_msgs.msg.TransformStamped): transform_stamped_ = copy.deepcopy(transform_stamped) #Inverse real-part of quaternion to inverse rotation transform_stamped_.transform.rotation.w = - transform_stamped_.transform.rotation.w twist_vel = geometry_msgs.msg.Vector3Stamped() twist_rot = geometry_msgs.msg.Vector3Stamped() twist_vel.vector = twist.linear twist_rot.vector = twist.angular out_vel = tf2_geometry_msgs.do_transform_vector3(twist_vel, transform_stamped_) out_rot = tf2_geometry_msgs.do_transform_vector3(twist_rot, transform_stamped_) #Populate new twist message new_twist = geometry_msgs.msg.Twist() new_twist.linear = out_vel.vector new_twist.angular = out_rot.vector return new_twist
def callback_rear(data): global last_rear try: transf = tfBuffer.lookup_transform(BASE_FRAME, 'mouse_rear', rospy.Time()) data = tf2_geometry_msgs.do_transform_vector3(data, transf) last_rear = data except (tf2_ros.LookupException, tf2_ros.ConnectivityException, tf2_ros.ExtrapolationException) as ex: print(ex) pass
def callback(self, msg): transform_inverted = TransformStamped() transform_inverted.child_frame_id = msg.header.frame_id transform_inverted.transform.rotation = msg.orientation transform_inverted.transform.rotation.w = -msg.orientation.w # ang_filtered = tf2_geometry_msgs.do_transform_vector3( Vector3Stamped(msg.header, msg.angular_velocity), transform_inverted) acc_filtered = tf2_geometry_msgs.do_transform_vector3( Vector3Stamped(msg.header, msg.linear_acceleration), transform_inverted) # msg_pub = Imu() msg_pub.header = msg.header msg_pub.orientation.w = 1.0 msg_pub.angular_velocity = ang_filtered.vector msg_pub.linear_acceleration = acc_filtered.vector # self.publisher_.publish(msg_pub)
def tfMap2FrontLaser(self): target = self.workspaceRegion #self.tf_listener = tf.TransformListener() ts = TransformStamped() self.region = np.zeros((target.shape)) #try: if True: #self.tf_listener.waitForTransform("front_laser", "map",rospy.Time(0)-DT_ROBO, rospy.Duration(0.05)); #### VERYYY UNCLEAAAN!!!! --- SOLVE TIME ISSUE -- no time should be added at the end... now = rospy.Time.now() past = rospy.Time.now() - rospy.Duration.from_sec(0.3) start = 0 start = time.clock() self.tf_listener.waitForTransformFull("/front_laser", past, "/map", now, "/odom", rospy.Duration.from_sec(3.0)) self.t_transform = time.clock() - start (trans, rot) = self.tf_listener.lookupTransformFull( "/front_laser", past, "/map", now, "/odom") ts.transform.translation.x = trans[0] ts.transform.translation.y = trans[1] ts.transform.translation.z = trans[2] ts.transform.rotation.x = rot[0] ts.transform.rotation.y = rot[1] ts.transform.rotation.z = rot[2] ts.transform.rotation.w = rot[3] # Create array region = np.zeros((target.shape)) for pp in range(target.shape[1]): target_v3 = Vector3Stamped() target_v3.header.stamp = now target_v3.header.frame_id = "front_laser" target_v3.vector.x = target[0, pp] target_v3.vector.y = target[1, pp] target_v3.vector.z = 0 target_transformed = tf2_geometry_msgs.do_transform_vector3( target_v3, ts) region[0, pp] = target_transformed.vector.x + trans[0] region[1, pp] = target_transformed.vector.y + trans[1] rospy.loginfo( "TF front_laser to map finished. Waited for {} ms.".format( round(self.t_transform * 3, 3))) self.workspaceRegion_tf_baseLink = region
def transform(origin, destination, poseORodom): """Transforms Odometry input from origin frame to destination frame Arguments: origin: the starting frame destination: the frame to trasform to odometry: the odometry message to transform Returns: The transformed odometry message """ tfBuffer = tf2_ros.Buffer() # listener = tf2_ros.TransformListener(tfBuffer) trans = tfBuffer.lookup_transform(destination, origin, rospy.Time(), rospy.Duration(0.5)) if isinstance(poseORodom, PoseStamped): transformed = tf2_geometry_msgs.do_transform_pose(poseORodom, trans) return transformed elif isinstance(poseORodom, Pose): temp_pose_stamped = PoseStamped() temp_pose_stamped.pose = poseORodom transformed = tf2_geometry_msgs.do_transform_pose(temp_pose_stamped, trans) return transformed.pose elif isinstance(poseORodom, Odometry): temp_odom = Odometry() vec3 = Vector3Stamped() ang3 = Vector3Stamped() vec3.vector = poseORodom.twist.twist.linear ang3.vector = poseORodom.twist.twist.angular temp_odom.pose = tf2_geometry_msgs.do_transform_pose(poseORodom.pose, trans) vec3_trans = tf2_geometry_msgs.do_transform_vector3(vec3, trans) ang3_trans = tf2_geometry_msgs.do_transform_vector3(ang3, trans) temp_odom.twist.twist.linear = vec3_trans.vector temp_odom.twist.twist.angular = ang3_trans.vector return temp_odom
def transformVector(self, x, y, z, quat): v = Vector3Stamped() v.vector.x = x v.vector.y = y v.vector.z = z t = TransformStamped() t.transform.rotation.x = quat[0] t.transform.rotation.y = quat[1] t.transform.rotation.z = quat[2] t.transform.rotation.w = quat[3] return do_transform_vector3(v, t)
def transform_vector(target_frame, vector): """ Transforms a pose stamped into a different target frame. :type target_frame: str :type vector: Vector3Stamped :return: Transformed pose of None on loop failure :rtype: Vector3Stamped """ global tfBuffer if tfBuffer is None: init() try: transform = tfBuffer.lookup_transform(target_frame, vector.header.frame_id, # source frame vector.header.stamp, rospy.Duration(5.0)) new_pose = do_transform_vector3(vector, transform) return new_pose except ExtrapolationException as e: rospy.logwarn(e)
def do_transform_msg( msg, target_frame, transform=None ): # type: (TransformableMessage, str, Optional[TransformStamped]) -> TransformableMessage """ Transforms a point, vector, wrench, or pose to the given frame. If no transform is specified the correct transform is looked up automatically. """ if transform is None: transform = lookup_transform_simple(target_frame, msg.header.frame_id) if isinstance(msg, PointStamped): return tf2_geometry_msgs.do_transform_point(msg, transform) if isinstance(msg, Vector3Stamped): return tf2_geometry_msgs.do_transform_vector3(msg, transform) if isinstance(msg, WrenchStamped): return tf2_geometry_msgs.do_transform_wrench(msg, transform) if isinstance(msg, PoseStamped): return tf2_geometry_msgs.do_transform_pose(msg, transform) raise TypeError("Unexpected message type {}".format(type(msg)))
def inverse_transform(transform): inversed_transform = geometry_msgs.msg.TransformStamped() inversed_rotation = geometry_msgs.msg.TransformStamped() inversed_rotation.transform.translation = geometry_msgs.msg.Vector3( 0, 0, 0) inversed_rotation.transform.rotation = transform.transform.rotation inversed_rotation.transform.rotation.w *= -1 inversed_translation = geometry_msgs.msg.Vector3Stamped() inversed_translation.vector = transform.transform.translation inversed_translation.vector.x *= -1 inversed_translation.vector.y *= -1 inversed_translation.vector.z *= -1 inversed_translation = tf2_geometry_msgs.do_transform_vector3( inversed_translation, inversed_rotation) inversed_transform.transform.translation = inversed_translation.vector inversed_transform.transform.rotation = inversed_rotation.transform.rotation return inversed_transform
def TransformVectorToBody(self, vect, q): v = Vector3Stamped() v.vector.x = vect.x v.vector.y = vect.y v.vector.z = vect.z t = TransformStamped() quaternion = np.array((q.x, q.y, q.z, q.w)) quat_conj = np.array((-quaternion[0], -quaternion[1], \ -quaternion[2], quaternion[3])) quat_inv = quat_conj / np.dot(quaternion, quaternion) t.transform.rotation.x = quat_inv[0] t.transform.rotation.y = quat_inv[1] t.transform.rotation.z = quat_inv[2] t.transform.rotation.w = quat_inv[3] vt = do_transform_vector3(v, t) return vt.vector #np.array([vt.vector.x, vt.vector.y, vt.vector.z ])
def controlCallback(self, event): rospy.loginfo("counter : %d", self.count) if self.count >= 5: self.goPos([0, 0, 0], 1) self.count += 1 if self.count >= 10: self.count = 0 return if not self.task_start_: return rospy.loginfo(self.uav_global_xy_pos_) #rospy.loginfo (self.target_xy_pos_) #navigation if self.state_machine_ == self.GO_FOR_STATE_: self.goPos(self.hantei(self.uav_global_xy_pos_)) if self.state_machine_ == self.END_STATE_: #rospy.loginfo ("finish!!") self.goPos([0, 0, 0], 1) #state machine if self.state_machine_ == self.INITIAL_STATE_: self.initial_uav_global_xy_pos_ = self.uav_global_xy_pos_ self.uav_start_pub_.publish() rospy.loginfo("task start") time.sleep(1) self.old_hantei = self.hantei(self.uav_global_xy_pos_) self.state_machine_ = self.GO_FOR_STATE_ elif self.state_machine_ == self.GO_FOR_STATE_: if self.isConvergent(self.target_xy_pos_): self.state_machine_ = self.END_STATE_ elif self.state_machine_ == self.END_STATE_: #tf try: w2obj_tf = self.tfBuffer.lookup_transform( 'world', 'target_object', rospy.Time()) cam2cog_tf = self.tfBuffer.lookup_transform( 'downward_camera_optical_frame', 'cog', rospy.Time(0)) w2cam_tf = self.tfBuffer.lookup_transform( 'world', 'downward_camera_optical_frame', rospy.Time(0)) except (tf2_ros.LookupException, tf2_ros.ConnectivityException, tf2_ros.ExtrapolationException): #rospy.loginfo("no object") return self.delay = self.delay + 1 if self.delay == self.control_rate_: rospy.loginfo("object detect") cam2cog_tf_pos = Vector3Stamped() cam2cog_tf_pos.vector = cam2cog_tf.transform.translation w2obj_tf.transform.rotation = w2cam_tf.transform.rotation target_object_global_xy_pos_temp = tf2_geometry_msgs.do_transform_vector3( cam2cog_tf_pos, w2obj_tf) rospy.loginfo("target cam position: %f %f %f", w2obj_tf.transform.translation.x, w2obj_tf.transform.translation.y, w2obj_tf.transform.translation.z) rospy.loginfo("cam 2 cog: %f %f %f", cam2cog_tf.transform.translation.x, cam2cog_tf.transform.translation.y, cam2cog_tf.transform.translation.z) self.target_object_global_xy_pos_[ 0] = target_object_global_xy_pos_temp.vector.x + w2obj_tf.transform.translation.x self.target_object_global_xy_pos_[ 1] = target_object_global_xy_pos_temp.vector.y + w2obj_tf.transform.translation.y rospy.loginfo("target: %f %f", self.target_object_global_xy_pos_[0], self.target_object_global_xy_pos_[1]) time.sleep(5) self.state_machine_ = self.OBJECT_APPROACHING_STATE_ elif self.state_machine_ == self.OBJECT_APPROACHING_STATE_: self.object_info_update(self.target_object_global_xy_pos_[0], self.target_object_global_xy_pos_[1], "detect_object", "/world") if self.isConvergent(self.target_frame_, self.target_xy_pos_, self.target_z_pos_): self.state_machine_ = self.OBJECT_CATCHING_STATE_ w2obj_tf = self.tfBuffer.lookup_transform( 'world', 'target_object', rospy.Time()) cam2cog_tf = self.tfBuffer.lookup_transform( 'downward_camera_optical_frame', 'cog', rospy.Time(0)) cam2cog_tf_pos = Vector3Stamped() w2cam_tf = self.tfBuffer.lookup_transform( 'world', 'downward_camera_optical_frame', rospy.Time(0)) cam2cog_tf_pos.vector = cam2cog_tf.transform.translation w2obj_tf.transform.rotation = w2cam_tf.transform.rotation target_object_global_xy_pos_temp = tf2_geometry_msgs.do_transform_vector3( cam2cog_tf_pos, w2obj_tf) rospy.loginfo("target cam position: %f %f %f", w2obj_tf.transform.translation.x, w2obj_tf.transform.translation.y, w2obj_tf.transform.translation.z) rospy.loginfo("cam 2 cog: %f %f %f", cam2cog_tf.transform.translation.x, cam2cog_tf.transform.translation.y, cam2cog_tf.transform.translation.z) self.target_object_global_xy_pos_[ 0] = w2obj_tf.transform.translation.x # + 0.1 target_object_global_xy_pos_temp.vector.x + self.target_object_global_xy_pos_[ 1] = w2obj_tf.transform.translation.y #- 0.3target_object_global_xy_pos_temp.vector.y + rospy.loginfo("target: %f %f", self.target_object_global_xy_pos_[0], self.target_object_global_xy_pos_[1]) elif self.state_machine_ == self.OBJECT_CATCHING_STATE_: self.object_info_update(self.target_object_global_xy_pos_[0], self.target_object_global_xy_pos_[1], "detect_object", "/world") self.state_machine_pub_.publish(self.state_name_[self.state_machine_]) self.count += 1
print ('expected:', x, y, z) print ('actual :', v.x, v.y, v.z) # a translation should not modify a Vector3 (only rotation should) # testing with a transform that is a pure translation t = TransformStamped() t.transform.translation.z = 1 t.transform.rotation.w = 1 v = Vector3Stamped() v.vector.x = 1 v.vector.y = 0 v.vector.z = 0 vt = tf2_geometry_msgs.do_transform_vector3(v, t) test_similar('translation should not be applied', vt.vector, v.vector.x, v.vector.y, v.vector.z) # making sure the rotation is applied properly t = TransformStamped() t.transform.translation.x = 0 q = tf.transformations.quaternion_from_euler(0, 0, math.radians(90)) t.transform.rotation.x = q[0] t.transform.rotation.y = q[1] t.transform.rotation.z = q[2] t.transform.rotation.w = q[3] v = Vector3Stamped() v.vector.x = 1 v.vector.y = 0
def test_transform(self): b = tf2_ros.Buffer() t = TransformStamped() t.transform.translation.x = 1.0 t.transform.rotation = Quaternion(w=0.0, x=1.0, y=0.0, z=0.0) t.header.stamp = rclpy.time.Time(seconds=2.0).to_msg() t.header.frame_id = 'a' t.child_frame_id = 'b' b.set_transform(t, 'eitan_rocks') out = b.lookup_transform('a', 'b', rclpy.time.Time(seconds=2.0).to_msg(), rclpy.time.Duration(seconds=2)) self.assertEqual(out.transform.translation.x, 1) self.assertEqual(out.transform.rotation.x, 1) self.assertEqual(out.header.frame_id, 'a') self.assertEqual(out.child_frame_id, 'b') v = PointStamped() v.header.stamp = rclpy.time.Time(seconds=2.0).to_msg() v.header.frame_id = 'a' v.point.x = 1.0 v.point.y = 2.0 v.point.z = 3.0 # b.registration.add(PointStamped) out = b.transform(v, 'b', new_type=PointStamped) self.assertEqual(out.point.x, 0) self.assertEqual(out.point.y, -2) self.assertEqual(out.point.z, -3) v = PoseStamped() v.header.stamp = rclpy.time.Time(seconds=2.0).to_msg() v.header.frame_id = 'a' v.pose.position.x = 1.0 v.pose.position.y = 2.0 v.pose.position.z = 3.0 v.pose.orientation = Quaternion(w=0.0, x=1.0, y=0.0, z=0.0) out = b.transform(v, 'b') self.assertEqual(out.pose.position.x, 0) self.assertEqual(out.pose.position.y, -2) self.assertEqual(out.pose.position.z, -3) # Translation shouldn't affect Vector3 t = TransformStamped() t.transform.translation.x = 1.0 t.transform.translation.y = 2.0 t.transform.translation.z = 3.0 t.transform.rotation = Quaternion(w=1.0, x=0.0, y=0.0, z=0.0) v = Vector3Stamped() v.vector.x = 1.0 v.vector.y = 0.0 v.vector.z = 0.0 out = tf2_geometry_msgs.do_transform_vector3(v, t) self.assertEqual(out.vector.x, 1) self.assertEqual(out.vector.y, 0) self.assertEqual(out.vector.z, 0) # Rotate Vector3 180 deg about y t = TransformStamped() t.transform.translation.x = 1.0 t.transform.translation.y = 2.0 t.transform.translation.z = 3.0 t.transform.rotation = Quaternion(w=0.0, x=0.0, y=1.0, z=0.0) v = Vector3Stamped() v.vector.x = 1.0 v.vector.y = 0.0 v.vector.z = 0.0 out = tf2_geometry_msgs.do_transform_vector3(v, t) self.assertEqual(out.vector.x, -1) self.assertEqual(out.vector.y, 0) self.assertEqual(out.vector.z, 0)
def callback(self, msg): '''callback function for detection result''' # update delta time dt = (msg.header.stamp.sec - self.sec) + (msg.header.stamp.nanosec - self.nanosec) / 1e9 self.sec = msg.header.stamp.sec self.nanosec = msg.header.stamp.nanosec # get detection detections = msg.obstacles num_of_detect = len(detections) num_of_obstacle = len(self.obstacle_list) # kalman predict for obj in self.obstacle_list: obj.predict(dt) # transform to global frame if self.global_frame is not None: try: trans = self.tf_buffer.lookup_transform(self.global_frame, msg.header.frame_id, rclpy.time.Time()) msg.header.frame_id = self.global_frame for i in range(len(detections)): # transform position (point) p = PointStamped() p.point = detections[i].position detections[i].position = do_transform_point(p, trans).point # transform velocity (vector3) v = Vector3Stamped() v.vector = detections[i].velocity detections[i].velocity = do_transform_vector3(v, trans).vector # transform size (vector3) s = Vector3Stamped() s.vector = detections[i].size detections[i].size = do_transform_vector3(s, trans).vector except LookupException: self.get_logger().info('fail to get tf from {} to {}'.format(msg.header.frame_id, self.global_frame)) return # hungarian matching cost = np.zeros((num_of_obstacle, num_of_detect)) for i in range(num_of_obstacle): for j in range(num_of_detect): cost[i, j] = self.obstacle_list[i].distance(detections[j]) obs_ind, det_ind = linear_sum_assignment(cost) # filter assignment according to cost new_obs_ind = [] new_det_ind = [] for o, d in zip(obs_ind, det_ind): if cost[o, d] < self.cost_filter: new_obs_ind.append(o) new_det_ind.append(d) obs_ind = new_obs_ind det_ind = new_det_ind # kalman update for o, d in zip(obs_ind, det_ind): self.obstacle_list[o].correct(detections[d]) # birth of new detection obstacles and death of disappear obstacle self.birth(det_ind, num_of_detect, detections) dead_object_list = self.death(obs_ind, num_of_obstacle) # apply velocity and height filter filtered_obstacle_list = [] for obs in self.obstacle_list: obs_vel = np.linalg.norm(np.array([obs.msg.velocity.x, obs.msg.velocity.y, obs.msg.velocity.z])) obs_height = obs.msg.position.z if obs_vel > self.vel_filter[0] and obs_vel < self.vel_filter[1] and obs_height > self.height_filter[0] and obs_height < self.height_filter[1]: filtered_obstacle_list.append(obs) # construct ObstacleArray if self.tracker_obstacle_pub.get_subscription_count() > 0: obstacle_array = ObstacleArray() obstacle_array.header = msg.header track_list = [] for obs in filtered_obstacle_list: # do not publish obstacles with low speed track_list.append(obs.msg) obstacle_array.obstacles = track_list self.tracker_obstacle_pub.publish(obstacle_array) # rviz visualization if self.tracker_marker_pub.get_subscription_count() > 0: marker_array = MarkerArray() marker_list = [] # add current active obstacles for obs in filtered_obstacle_list: obstacle_uuid = uuid.UUID(bytes=bytes(obs.msg.uuid.uuid)) (r, g, b) = colorsys.hsv_to_rgb(obstacle_uuid.int % 360 / 360., 1., 1.) # encode id with rgb color # make a cube marker = Marker() marker.header = msg.header marker.ns = str(obstacle_uuid) marker.id = 0 marker.type = 1 # CUBE marker.action = 0 marker.color.a = 0.5 marker.color.r = r marker.color.g = g marker.color.b = b marker.pose.position = obs.msg.position angle = np.arctan2(obs.msg.velocity.y, obs.msg.velocity.x) marker.pose.orientation.z = np.float(np.sin(angle / 2)) marker.pose.orientation.w = np.float(np.cos(angle / 2)) marker.scale = obs.msg.size marker_list.append(marker) # make an arrow arrow = Marker() arrow.header = msg.header arrow.ns = str(obstacle_uuid) arrow.id = 1 arrow.type = 0 arrow.action = 0 arrow.color.a = 1.0 arrow.color.r = r arrow.color.g = g arrow.color.b = b arrow.pose.position = obs.msg.position arrow.pose.orientation.z = np.float(np.sin(angle / 2)) arrow.pose.orientation.w = np.float(np.cos(angle / 2)) arrow.scale.x = np.linalg.norm([obs.msg.velocity.x, obs.msg.velocity.y, obs.msg.velocity.z]) arrow.scale.y = 0.05 arrow.scale.z = 0.05 marker_list.append(arrow) # add dead obstacles to delete in rviz for uuid in dead_object_list: marker = Marker() marker.header = msg.header marker.ns = str(uuid) marker.id = 0 marker.action = 2 # delete arrow = Marker() arrow.header = msg.header arrow.ns = str(uuid) arrow.id = 1 arrow.action = 2 marker_list.append(marker) marker_list.append(arrow) marker_array.markers = marker_list self.tracker_marker_pub.publish(marker_array)
def test_transform(self): b = tf2_ros.Buffer() t = TransformStamped() t.transform.translation.x = 1 t.transform.rotation.x = 1 t.header.stamp = rospy.Time(2.0) t.header.frame_id = 'a' t.child_frame_id = 'b' b.set_transform(t, 'eitan_rocks') out = b.lookup_transform('a','b', rospy.Time(2.0), rospy.Duration(2.0)) self.assertEqual(out.transform.translation.x, 1) self.assertEqual(out.transform.rotation.x, 1) self.assertEqual(out.header.frame_id, 'a') self.assertEqual(out.child_frame_id, 'b') v = PointStamped() v.header.stamp = rospy.Time(2) v.header.frame_id = 'a' v.point.x = 1 v.point.y = 2 v.point.z = 3 out = b.transform(v, 'b') self.assertEqual(out.point.x, 0) self.assertEqual(out.point.y, -2) self.assertEqual(out.point.z, -3) v = PoseStamped() v.header.stamp = rospy.Time(2) v.header.frame_id = 'a' v.pose.position.x = 1 v.pose.position.y = 2 v.pose.position.z = 3 v.pose.orientation.x = 1 out = b.transform(v, 'b') self.assertEqual(out.pose.position.x, 0) self.assertEqual(out.pose.position.y, -2) self.assertEqual(out.pose.position.z, -3) # Translation shouldn't affect Vector3 t = TransformStamped() t.transform.translation.x = 1 t.transform.translation.y = 2 t.transform.translation.z = 3 t.transform.rotation.w = 1 v = Vector3Stamped() v.vector.x = 1 v.vector.y = 0 v.vector.z = 0 out = tf2_geometry_msgs.do_transform_vector3(v, t) self.assertEqual(out.vector.x, 1) self.assertEqual(out.vector.y, 0) self.assertEqual(out.vector.z, 0) # Rotate Vector3 180 deg about y t = TransformStamped() t.transform.translation.x = 1 t.transform.translation.y = 2 t.transform.translation.z = 3 t.transform.rotation.y = 1 v = Vector3Stamped() v.vector.x = 1 v.vector.y = 0 v.vector.z = 0 out = tf2_geometry_msgs.do_transform_vector3(v, t) self.assertEqual(out.vector.x, -1) self.assertEqual(out.vector.y, 0) self.assertEqual(out.vector.z, 0)
def test_transform(self): b = tf2_ros.Buffer() t = TransformStamped() t.transform.translation.x = 1 t.transform.rotation = Quaternion(w=0, x=1, y=0, z=0) t.header.stamp = rospy.Time(2.0) t.header.frame_id = 'a' t.child_frame_id = 'b' b.set_transform(t, 'eitan_rocks') out = b.lookup_transform('a', 'b', rospy.Time(2.0), rospy.Duration(2.0)) self.assertEqual(out.transform.translation.x, 1) self.assertEqual(out.transform.rotation.x, 1) self.assertEqual(out.header.frame_id, 'a') self.assertEqual(out.child_frame_id, 'b') v = PointStamped() v.header.stamp = rospy.Time(2) v.header.frame_id = 'a' v.point.x = 1 v.point.y = 2 v.point.z = 3 out = b.transform(v, 'b') self.assertEqual(out.point.x, 0) self.assertEqual(out.point.y, -2) self.assertEqual(out.point.z, -3) v = PoseStamped() v.header.stamp = rospy.Time(2) v.header.frame_id = 'a' v.pose.position.x = 1 v.pose.position.y = 2 v.pose.position.z = 3 v.pose.orientation = Quaternion(w=0, x=1, y=0, z=0) out = b.transform(v, 'b') self.assertEqual(out.pose.position.x, 0) self.assertEqual(out.pose.position.y, -2) self.assertEqual(out.pose.position.z, -3) # Translation shouldn't affect Vector3 t = TransformStamped() t.transform.translation.x = 1 t.transform.translation.y = 2 t.transform.translation.z = 3 t.transform.rotation = Quaterion(w=1, x=0, y=0, z=0) v = Vector3Stamped() v.vector.x = 1 v.vector.y = 0 v.vector.z = 0 out = tf2_geometry_msgs.do_transform_vector3(v, t) self.assertEqual(out.vector.x, 1) self.assertEqual(out.vector.y, 0) self.assertEqual(out.vector.z, 0) # Rotate Vector3 180 deg about y t = TransformStamped() t.transform.translation.x = 1 t.transform.translation.y = 2 t.transform.translation.z = 3 t.transform.rotation = Quaterion(w=0, x=0, y=1, z=0) v = Vector3Stamped() v.vector.x = 1 v.vector.y = 0 v.vector.z = 0 out = tf2_geometry_msgs.do_transform_vector3(v, t) self.assertEqual(out.vector.x, -1) self.assertEqual(out.vector.y, 0) self.assertEqual(out.vector.z, 0)
def test_transform(self): b = tf2_ros.Buffer() t = TransformStamped() t.transform.translation.x = 1 t.transform.rotation.x = 1 t.header.stamp = rospy.Time(2.0) t.header.frame_id = 'a' t.child_frame_id = 'b' b.set_transform(t, 'eitan_rocks') out = b.lookup_transform('a', 'b', rospy.Time(2.0), rospy.Duration(2.0)) self.assertEqual(out.transform.translation.x, 1) self.assertEqual(out.transform.rotation.x, 1) self.assertEqual(out.header.frame_id, 'a') self.assertEqual(out.child_frame_id, 'b') v = TwistStamped() v.header.stamp = rospy.Time(2) v.header.frame_id = 'a' v.twist.linear.x = 0 v.twist.linear.y = 1 v.twist.linear.z = 0 v.twist.angular.x = 0 v.twist.angular.y = 1 v.twist.angular.z = 0 # out = do_transform_twist(v, t) out = b.transform(v, 'b') self.assertEqual(out.twist.linear.y, -1) self.assertEqual(out.twist.angular.y, -1) v = PointStamped() v.header.stamp = rospy.Time(2) v.header.frame_id = 'a' v.point.x = 1 v.point.y = 2 v.point.z = 3 out = b.transform(v, 'b') self.assertEqual(out.point.x, 0) self.assertEqual(out.point.y, -2) self.assertEqual(out.point.z, -3) v = PoseStamped() v.header.stamp = rospy.Time(2) v.header.frame_id = 'a' v.pose.position.x = 1 v.pose.position.y = 2 v.pose.position.z = 3 v.pose.orientation.x = 1 out = b.transform(v, 'b') self.assertEqual(out.pose.position.x, 0) self.assertEqual(out.pose.position.y, -2) self.assertEqual(out.pose.position.z, -3) # Translation shouldn't affect Vector3 t = TransformStamped() t.transform.translation.x = 1 t.transform.translation.y = 2 t.transform.translation.z = 3 t.transform.rotation.w = 1 v = Vector3Stamped() v.vector.x = 1 v.vector.y = 0 v.vector.z = 0 out = tf2_geometry_msgs.do_transform_vector3(v, t) self.assertEqual(out.vector.x, 1) self.assertEqual(out.vector.y, 0) self.assertEqual(out.vector.z, 0) # Rotate Vector3 180 deg about y t = TransformStamped() t.transform.translation.x = 1 t.transform.translation.y = 2 t.transform.translation.z = 3 t.transform.rotation.y = 1 v = Vector3Stamped() v.vector.x = 1 v.vector.y = 0 v.vector.z = 0 out = tf2_geometry_msgs.do_transform_vector3(v, t) self.assertEqual(out.vector.x, -1) self.assertEqual(out.vector.y, 0) self.assertEqual(out.vector.z, 0) v = WrenchStamped() v.wrench.force.x = 1 v.wrench.force.y = 0 v.wrench.force.z = 0 v.wrench.torque.x = 1 v.wrench.torque.y = 0 v.wrench.torque.z = 0 out = tf2_geometry_msgs.do_transform_wrench(v, t) self.assertEqual(out.wrench.force.x, -1) self.assertEqual(out.wrench.force.y, 0) self.assertEqual(out.wrench.force.z, 0) self.assertEqual(out.wrench.torque.x, -1) self.assertEqual(out.wrench.torque.y, 0) self.assertEqual(out.wrench.torque.z, 0)