def graspit_grasp_pose_to_moveit_grasp_pose(move_group_commander,
                                            listener,
                                            graspit_grasp_msg,
                                            grasp_frame='/approach_tran'):
    """
    :param move_group_commander: A move_group command from which to get the end effector link.
    :type move_group_commander: moveit_commander.MoveGroupCommander
    :param listener: A transformer for looking up the transformation
    :type tf.TransformListener
    :param graspit_grasp_msg: A graspit grasp message
    :type graspit_grasp_msg: graspit_msgs.msg.Grasp

    """

    try:
        listener.waitForTransform(grasp_frame,
                                  move_group_commander.get_end_effector_link(),
                                  rospy.Time(0),
                                  timeout=rospy.Duration(1))
        at_to_ee_tran, at_to_ee_rot = listener.lookupTransform(
            grasp_frame, move_group_commander.get_end_effector_link(),
            rospy.Time())
    except:
        rospy.logerr(
            "graspit_grasp_pose_to_moveit_grasp_pose::\n " +
            "Failed to find transform from %s to %s" %
            (grasp_frame, move_group_commander.get_end_effector_link()))
        ipdb.set_trace()

    graspit_grasp_msg_final_grasp_tran_matrix = tf_conversions.toMatrix(
        tf_conversions.fromMsg(graspit_grasp_msg.final_grasp_pose))
    approach_tran_to_end_effector_tran_matrix = tf.TransformerROS(
    ).fromTranslationRotation(at_to_ee_tran, at_to_ee_rot)
    if move_group_commander.get_end_effector_link() == 'l_wrist_roll_link':
        rospy.logerr('This is a PR2\'s left arm so we have to rotate things.')
        pr2_is_weird_mat = tf.TransformerROS().fromTranslationRotation(
            [0, 0, 0],
            tf.transformations.quaternion_from_euler(0, math.pi / 2, 0))
    else:
        pr2_is_weird_mat = tf.TransformerROS().fromTranslationRotation(
            [0, 0, 0], [0, 0, 0])
    actual_ee_pose_matrix = np.dot(graspit_grasp_msg_final_grasp_tran_matrix,
                                   approach_tran_to_end_effector_tran_matrix)
    actual_ee_pose_matrix = np.dot(actual_ee_pose_matrix, pr2_is_weird_mat)
    actual_ee_pose = tf_conversions.toMsg(
        tf_conversions.fromMatrix(actual_ee_pose_matrix))
    rospy.loginfo("actual_ee_pose: " + str(actual_ee_pose))
    return actual_ee_pose
Пример #2
0
def call_back(msg):
    # Global Frame: "/map"
    map_id = msg.header.frame_id
    br = tf.TransformBroadcaster()
    position = [
        msg.pose.pose.position.x, msg.pose.pose.position.y,
        msg.pose.pose.position.z
    ]
    quat = [msg.pose.pose.orientation.x,\
      msg.pose.pose.orientation.y,\
      msg.pose.pose.orientation.z,\
      msg.pose.pose.orientation.w]
    r, p, y = tf.transformations.euler_from_quaternion(quat)
    tfros = tf.TransformerROS()
    M = tfros.fromTranslationRotation(position, quat)
    M_inv = np.linalg.inv(M)
    trans = tf.transformations.translation_from_matrix(M_inv)
    rot = tf.transformations.quaternion_from_matrix(M_inv)
    t = rospy.Time.from_sec(0)
    #t = rospy.Time.now()
    br.sendTransform(trans, rot, t, map_id, "/X1/base_footprint")

    #quat = tf.transformations.quaternion_from_euler (-r, -p, -y)
    br.sendTransform((0, 0, 0), \
         (0, 0, 0, 1), rospy.Time.now(), "map", "X1/odom")
Пример #3
0
    def __init__(self):
        #/vel_based_pos_traj_controller/
        self.client = actionlib.SimpleActionClient('icl_phri_ur5/follow_joint_trajectory', FollowJointTrajectoryAction)
        self.goal = FollowJointTrajectoryGoal()
        self.goal.trajectory = JointTrajectory()
        self.goal.trajectory.joint_names = JOINT_NAMES
        print ("Waiting for server...")
        self.client.wait_for_server()
        print ("Connected to server")
        joint_states = rospy.wait_for_message("joint_states", JointState)
        print(joint_states)
        joint_states = list(deepcopy(joint_states).position)
        del joint_states[-1]
        self.joints_pos_start = np.array(joint_states)
        print ("Init done")
        self.listener = tf.TransformListener()
        self.Transformer = tf.TransformerROS()

        joint_weights = [12,5,4,3,2,1]
        self.ik = InverseKinematicsUR5()
        self.ik.setJointWeights(joint_weights)
        self.ik.setJointLimits(-pi, pi)
        self.sub = rospy.Subscriber('/target_position', Int32MultiArray, self.pickplace_cb)
        self.sub_cancel = rospy.Subscriber('/voice_command', Int32, self.cancel_cb)

        self.gripper_ac = RobotiqActionClient('icl_phri_gripper/gripper_controller')
        self.gripper_ac.wait_for_server()
        self.gripper_ac.initiate()
        self.gripper_ac.send_goal(0.10)
        self.gripper_ac.wait_for_result()
        self.cancel = False
Пример #4
0
    def setupNode(self):
        rospy.init_node("i3dr_set_tf_parent",
                        anonymous=True,
                        disable_signals=True)

        self.map_to_robot_base_tf = None
        self.tf_ready = False

        self.frame_4_id = rospy.get_param(
            '~frame_4_id', "base_link")  #base_link / ur10_base_link
        self.frame_3_id = rospy.get_param('~frame_3_id',
                                          "tool0")  #tool0 / end_effector
        self.frame_1_id = rospy.get_param('~frame_1_id', "map")
        self.frame_2_id = rospy.get_param('~frame_2_id',
                                          "i3dr_stereo_base_link")
        self.output_frame_1_id = rospy.get_param('~output_frame_1_id', "map")
        self.output_frame_2_id = rospy.get_param('~output_frame_2_id',
                                                 "ur10_base_link")
        self.update_mode = rospy.get_param('~update_mode', "continuous")
        self.service_name = rospy.get_param('~service_name',
                                            "i3dr_get_scan_home")

        if self.update_mode == "continuous":
            self.service_update = False
        elif self.udpate_mode == "service":
            self.service_update = True

        if self.service_update:
            rospy.Service(self.service_name, Empty, self.handle_get_tfs)

        self.tfListener = tf.TransformListener()
        self.tfTransformer = tf.TransformerROS()
        self.tfBroadcaster = tf.TransformBroadcaster()
Пример #5
0
    def __init__(
        self
    ):  # This function is called one time as soon as an instance of a class is created
        #self.rate = 100 #[Hz]

        # Declaring a subscriber. Parameters: topic name (string), topic data type, callback function
        # Every time another node publishes a message to "/arbitrary_subscribed_topic_name", we will call the function called self.arbitrary_topic_name_cb
        # The "/" before the topic name is important

        #rospy.Subscriber("/usb_cam/image_raw", Image, self.CameraCalibration_cb) This was me testing
        #rospy.Subscriber("/toppings", DetectionArray, self.CameraCalibration_cb)
        #rospy.Subscriber("/self", DetectionArray, self.CameraCalibration_cb)

        # Declaring a publisher. Parameters: topic name (string), topic data type, queue_size (use queue_size=10 as default)
        #self.CameraCalibration_pub = rospy.Publisher("/worldxyz", Float32MultiArray, queue_size=10)

        # Do any other initialization of class
        #scaling factors for  millimeters to pixel conversion
        #this is related to the
        self.tf_listener = tf.TransformListener()
        self.tf_transformer = tf.TransformerROS(True, rospy.Duration(10.0))

        self.sx = 1 / 642.5793848798857  #fx
        self.sy = 1 / 644.8809875234275  #fy

        self.cx = 320  #center of image in pixels
        self.cy = 240

        self.pixelUV = [320, 240
                        ]  #initialize pixel coords and orientation for testing
        self.pixelOrientation = radians(45)

        self.CameraAngle = radians(
            10)  #angle of camera lens relative to vertical
        self.CameraHeight = 839
        self.z0 = 839 / cos(
            self.CameraAngle
        )  #851.942927372   absolute distance of camera lens from table in millimeters
        self.CameraLenstoOrigin = [271, 36.6, 52]  #in world frame
        #self.CameraLenstoOrigin = [-80 , 36.6 , 52]

        #Rotation is 180 minus camera angle about x
        self.rotationMatrixX = np.array(
            [[1, 0, 0],
             [0, cos(pi - self.CameraAngle), -sin(pi - self.CameraAngle)],
             [0, sin(pi - self.CameraAngle),
              cos(pi - self.CameraAngle)]]
        )  #rotation from world frame to camera frame -should just be a rotation about world x-axis
        #Rotate 90 about z
        self.rotationMatrixZ = np.array([[0, 1, 0], [-1, 0, 0], [0, 0, 1]])
        self.rotationMatrix = np.matmul(self.rotationMatrixZ,
                                        self.rotationMatrixX)
        #        self.rotationMatrix = np.array([[1,0,0],
        #                                        [0,1,0],
        #                                        [0,0,1]])

        self.translationVector = np.array([
            self.CameraLenstoOrigin[0], self.CameraLenstoOrigin[1],
            self.CameraLenstoOrigin[2]
        ])  #translation from world frame to camera frame
Пример #6
0
    def __init__(self):
        self.tf = tf.TransformerROS(True, rospy.Duration(10.0))
        # self.listener = tf.TransformListener()

        t1 = TransformStamped()
        # /alvar /nut_start
        t1.header.frame_id = '/ar_marker_17'
        t1.child_frame_id = '/nutStart'
        # t1.transform.translation = Vector3(-0.057, 0.050, -0.012)
        # t1.transform.translation = Vector3(-0.052, 0.164, -0.009)
        t1.transform.translation = Vector3(-0.052, 0.194, -0.009)
        qua_t1 = self.r_to_q([0, 0, -math.pi / 2], [0, 0, 0, 1])
        t1.transform.rotation = Quaternion(qua_t1[0], qua_t1[1], qua_t1[2],
                                           qua_t1[3])
        '''
		t2 = TransformStamped()
		# /camera /alvar
		(trans, rot) = self.listener.lookupTransform('/movo_camera_color_optical_frame', '/ar_marker_17', rospy.Time(0))
		t2.header.frame_id = '/movo_camera_color_optical_frame'
		t2.child_frame_id = '/ar_marker_17'
		t2.transform.translation = Vector3(trans[0], trans[1], trans[2])
		t2.transform.rotation = Quaternion(rot[0], rot[1], rot[2], rot[3])
		'''

        t3 = TransformStamped()
        # /left_ee /finger
        t3.header.frame_id = '/left_ee_link'
        t3.child_frame_id = '/finger'
        t3.transform.translation.x = 0.043
        t3.transform.translation.z = 0.02
        t3.transform.rotation = Quaternion(0, 0, 0, 1)

        self.tf.setTransform(t1)
        # self.tfROS.setTransform(t2)
        self.tf.setTransform(t3)
Пример #7
0
 def __init__(self, time_, grippers):
     self.inputArgs = ['relativeTemp', 'absoluteTemp', 'self.mode']
     self.verificationArgs = ['self.taskDone']
     self.pointTime = time_  # height from world frame (i.e. table) and not yumi_base_link
     # Otherise the frame is yumi_base_link, [Right, Left], for combined only right is used
     self.transformer = tf.TransformerROS(True, rospy.Duration(1.0))
     self.gripper = grippers  # in mm, [Right, left]
Пример #8
0
    def __init__(self):
        # Variables
        self.node_name = rospy.get_name()
        self.img = None
        self.cv_bridge = CvBridge()
        self.totem_list = []  # (x, y, z, color)
        self.color_detect = ColorDetectHSV()
        self.wamv_position = None

        self.tf_listener = tf.TransformListener()
        self.tf_transformer = tf.TransformerROS()

        self.translation_lidar_camera = [0.0, 0.13, 0.5]
        self.quaternion_lidar_camera = [0, 0, -0.7068851, 0.7073284]
        self.camera_matrix = [
            1400.69, 0.0, 971.463, 0.0, 1400.69, 535.668, 0.0, 0.0, 1.0
        ]
        #self.translation_lidar_camera =  [-0.350, 0.000, 0.291]
        #self.quaternion_lidar_camera =  [ 0, 0, -0.707, 0.707 ]
        #self.camera_matrix = [762.7249337622711, 0.0, 640.5, 0.0, 762.7249337622711, 360.5, 0.0, 0.0, 1.0]

        self.totem_circle_command = []
        self.start = False
        rospy.Timer(rospy.Duration(0.5), self.circle_path_planning)
        #self.add_wp = rospy.ServiceProxy("/add_waypoint", waypoint)
        #self.start_wp = rospy.ServiceProxy("/start_waypoint_nav", Trigger)

        # Ros service
        self.srv_totem_cirle = rospy.Service('~set_totem_cirle',
                                             SetTotemCircle,
                                             self.cb_srv_totem_circle)
        self.srv_totem_cirle = rospy.Service('~clear', Trigger,
                                             self.cb_srv_clear)
        self.srv_totem_cirle = rospy.Service('~start', Trigger,
                                             self.cb_srv_start)

        # Publisher
        self.pub_image_roi = rospy.Publisher("~image_roi", Image, queue_size=1)
        self.pub_image_roi_compressed = rospy.Publisher(
            "~image_roi/compressed", CompressedImage, queue_size=1)
        self.pub_image_mask = rospy.Publisher("~image_mask",
                                              Image,
                                              queue_size=1)
        self.pub_marker = rospy.Publisher("~totem_marker",
                                          MarkerArray,
                                          queue_size=1)

        # Subscriber
        self.sub_image = rospy.Subscriber("~image_raw_compressed",
                                          CompressedImage,
                                          self.cb_image,
                                          queue_size=1)
        self.sub_obj_list = rospy.Subscriber("~obj_list",
                                             ObjectPoseList,
                                             self.cb_obj_list,
                                             queue_size=1)
        self.sub_odometry = rospy.Subscriber("/odometry/filtered",
                                             Odometry,
                                             self.cb_odometry,
                                             queue_size=1)
Пример #9
0
        def getState(self, posData):
                # odomEstimate = self.getOdomEstimate()
                # velxEstimate = odomEstimate.twist.twist.linear.x
                # velyEstimate = odomEstimate.twist.twist.linear.y

                pose = posData.pose[1]
                position = pose.position
                orientation = pose.orientation

                t = tf.TransformerROS(True, rospy.Duration(10.0))
                m = TransformStamped()
                m.header.frame_id = 'world'
                m.child_frame_id = 'base_link'
                m.transform.translation.x = position.x
                m.transform.translation.y = position.y
                m.transform.translation.z = position.z
                m.transform.rotation.x = orientation.x
                m.transform.rotation.y = orientation.y
                m.transform.rotation.z = orientation.z
                m.transform.rotation.w = orientation.w

                (roll, pitch, yaw) = tf.transformations.euler_from_quaternion([orientation.x, orientation.y, orientation.z, orientation.w])
                t.setTransform(m)

                velx = posData.twist[1].linear.x
                vely = posData.twist[1].linear.y
                velz = posData.twist[1].linear.z

                # Transform to body frame velocities
                velVector = Vector3Stamped()
                velVector.vector.x = velx
                velVector.vector.y = vely
                velVector.vector.z = velz
                velVector.header.frame_id = "world"
                # velVectorTransformed = self.tl.transformVector3("base_link", velVector)
                velVectorTransformed = t.transformVector3("base_link", velVector)
                velxBody = velVectorTransformed.vector.x
                velyBody = velVectorTransformed.vector.y
                velzBody = velVectorTransformed.vector.z

                carTangentialSpeed = math.sqrt(velx ** 2 + vely ** 2)
                carAngularVel = posData.twist[1].angular.z

                stateInfo = {
                        "x": position.x, "y": position.y,
                        "i": orientation.x, "j": orientation.y,
                        "k": orientation.z, "w": orientation.w,
                        "xdot": velx, "ydot": vely,
                        "thetadot": carAngularVel,
                        "s": carTangentialSpeed,
                        "xdotbodyframe": velxBody, "ydotbodyframe": velyBody
                }

                state = []
                for key in self.state_info:
                       state.append(stateInfo[key]) 
                
                #rewardState = [stateInfo["thetadot"], stateInfo["xdotbodyframe"], stateInfo["ydotbodyframe"]]

                return np.array(state)
Пример #10
0
    def __init__(self):
        print 'creating Turtlebot'
        self.transform = tf.TransformerROS()

        self.pose = Pose()
        pos = Point()
        quarter = Quaternion()
        self.pose.position.x = 1.23
        self.pose.position.y = 0.92
        self.pose.position.z = 0
        self.path = []

        self.initPos = [0, 0, 0]
        self.initOrient = [0, 0, 0, 0]

        self.hopeful = [0, 0, 0]

        self.isInit = False

        self.pub = rospy.Publisher('cmd_vel_mux/input/teleop',
                                   Twist,
                                   None,
                                   queue_size=10)
        self.goal_sub = rospy.Subscriber('goal_pose',
                                         PoseStamped,
                                         self.executeAStar,
                                         queue_size=1)
        #self.bump_sub = rospy.Subscriber('mobile_base/events/bumper', BumperEvent, self.readBumper, queue_size=1)
        #self.odom_sub = rospy.Subscriber('/odom', Odometry, self.odometryCallback)
        self.odom_tfList = tf.TransformListener()
        self.odom_broad = tf.TransformBroadcaster()
        rospy.Timer(rospy.Duration(.01), self.odometryCallback)

        print 'Completed making Turtlebot'
Пример #11
0
    def call_back(self, msg):
        obj_list = msg
        cluster_num = obj_list.size
        for i in range(cluster_num):
            tf_points = PoseArray()
            tf_points = obj_list.list[i].pcl_points
            centroids = Point()
            centroids = obj_list.list[i].position
            self.image = np.zeros((int(self.height), int(self.width), 3),
                                  np.uint8)
            plane_xy = []
            plane_yz = []
            plane_xz = []
            pcl_size = len(tf_points.poses)

            # ======= Coordinate transform for better project performance ======
            position = [0, 0, 0]
            rad = math.atan2(centroids.y, centroids.x)
            quaternion = tf.transformations.quaternion_from_euler(0., 0., -rad)
            transformer = tf.TransformerROS()
            transpose_matrix = transformer.fromTranslationRotation(
                position, quaternion)
            for m in range(pcl_size):
                new_x = tf_points.poses[m].position.x
                new_y = tf_points.poses[m].position.y
                new_z = tf_points.poses[m].position.z
                orig_point = np.array([new_x, new_y, new_z, 1])
                new_center = np.dot(transpose_matrix, orig_point)
                tf_points.poses[m].position.x = new_center[0]
                tf_points.poses[m].position.y = new_center[1]
                tf_points.poses[m].position.z = new_center[2]

            # ======= project to XY, YZ, XZ plane =======
            for j in range(pcl_size):
                plane_xy.append([
                    tf_points.poses[j].position.x,
                    tf_points.poses[j].position.y
                ])
                plane_yz.append([
                    tf_points.poses[j].position.y,
                    tf_points.poses[j].position.z
                ])
                plane_xz.append([
                    tf_points.poses[j].position.x,
                    tf_points.poses[j].position.z
                ])
            self.toIMG(pcl_size, plane_xy, 'xy')
            self.toIMG(pcl_size, plane_yz, 'yz')
            self.toIMG(pcl_size, plane_xz, 'xz')
            model_type = self.classify()

            # ***************************************************************
            # Add to object list
            # ***************************************************************
            obj_list.list[i].type = model_type
            #cv2.imwrite( "Image" + str(self.index) + ".jpg", self.image)
            #self.index = self.index + 1
            #print "Save image"
        self.pub_obj.publish(obj_list)
        self.drawRviz(obj_list)
Пример #12
0
    def cbOdom(self, msg):
        # Global Frame: "/map"
        # map_id = msg.header.frame_id
        # br = tf.TransformBroadcaster()
        position = [
            msg.pose.pose.position.x, msg.pose.pose.position.y,
            msg.pose.pose.position.z
        ]
        quat = [msg.pose.pose.orientation.x,\
          msg.pose.pose.orientation.y,\
          msg.pose.pose.orientation.z,\
          msg.pose.pose.orientation.w]
        r, p, y = tf.transformations.euler_from_quaternion(quat)
        tfros = tf.TransformerROS()
        M = tfros.fromTranslationRotation(position, quat)
        # M_inv =  np.linalg.inv(M)
        self.odom_trans = tf.transformations.translation_from_matrix(M)
        self.odom_rot = tf.transformations.quaternion_from_matrix(M)
        # t = rospy.Time.from_sec(0)
        # t = rospy.Time.now()
        # br.sendTransform(trans, rot, t, "/robot_base", map_id)

        #quat = tf.transformations.quaternion_from_euler (-r, -p, -y)
        # br.sendTransform((0, 0, 0), \
        # 				 (0, 0, 0, 1), rospy.Time.now(), "/laser", "/robot_base")
        self.get_odom = True
Пример #13
0
    def __init__(self):
        #/vel_based_pos_traj_controller/
        self.client = actionlib.SimpleActionClient(
            'icl_phri_ur5/follow_joint_trajectory',
            FollowJointTrajectoryAction)
        self.goal = FollowJointTrajectoryGoal()
        self.goal.trajectory = JointTrajectory()
        self.goal.trajectory.joint_names = JOINT_NAMES
        print("Waiting for server...")
        self.client.wait_for_server()
        print("Connected to server")
        joint_states = rospy.wait_for_message("joint_states", JointState)
        print(joint_states)
        joint_states = list(deepcopy(joint_states).position)
        del joint_states[-1]
        self.joints_pos_start = np.array(joint_states)
        print("Init done")
        self.listener = tf.TransformListener()
        self.Transformer = tf.TransformerROS()

        joint_weights = [12, 5, 4, 3, 2, 1]
        self.ik = InverseKinematicsUR5()
        self.ik.setJointWeights(joint_weights)
        self.ik.setJointLimits(-pi, pi)

        # self.sub = rospy.Subscriber('/target_position', Int32MultiArray, self.pickplace_cb)
        # pickplace_sub = message_filters.Subscriber('pick', Int32)
        # tag_sub = message_filters.Subscriber('tag', Int32)

        # ts = message_filters.ApproximateTimeSynchronizer([pickplace_sub, tag_sub], 10, 0.1, allow_headerless=True)
        # ts.registerCallback(self.pickplace_cb)

        self.sub_pickplace = rospy.Subscriber('/CHOICE', String,
                                              self.pickplace_cb)
Пример #14
0
	def __init__(self):
		self.rate = 100 #[Hz]

		self.CameraAngle = None#math.radians(self.CAMERA_TILT)  #angle of camera lens relative to vertical
		self.z0 = 0.0
		self.CameraHeight = 839

		rospy.Subscriber("/usb_cam/image_raw", Image, self.image_cb) # Need to use rectified image instead
		self.toppings_pub = rospy.Publisher("/toppings", DetectionArray, queue_size=10)
		self.slots_pub = rospy.Publisher("/slots", DetectionArray, queue_size=10)

		rospy.Subscriber("/camera_angle", Float32, self.camera_angle_cb, queue_size = 1)

		self.bridge = CvBridge()
		self.image_dims = []


		# Camera_Calibration Parameters
		self.tf_listener = tf.TransformListener()
		self.tf_broadcaster = tf.TransformBroadcaster()

		rospy.sleep(0.2)

		self.tf_transformer = tf.TransformerROS(True, rospy.Duration(10.0))

		self.sx = 1/642.5793848798857     #fx
		self.sy = 1/644.8809875234275     #fy

		self.cx = 185 #center of image in pixels
		self.cy = 230  

		'''self.pixelUV = [320,240] #initialize pixel coords and orientation for testing
Пример #15
0
    def test_transformer_ros(self):
        tr = tf.TransformerROS()

        m = geometry_msgs.msg.TransformStamped()
        m.header.stamp = rospy.Time().from_sec(3.0)
        m.header.frame_id = "PARENT"
        m.child_frame_id = "THISFRAME"
        m.transform.translation.y = 5.0
        m.transform.rotation.x = 0.04997917
        m.transform.rotation.y = 0
        m.transform.rotation.z = 0
        m.transform.rotation.w = 0.99875026
        tr.setTransform(m)
        m.header.stamp = rospy.Time().from_sec(5.0)
        tr.setTransform(m)

        # Smoke the various transform* methods

        types = [ "Point", "Pose", "Quaternion", "Vector3" ]
        for t in types:
            msg = getattr(geometry_msgs.msg, "%sStamped" % t)()
            msg.header.frame_id = "THISFRAME"
            msg_t = getattr(tr, "transform%s" % t)("PARENT", msg)
            self.assertEqual(msg_t.header.frame_id, "PARENT")

        # PointCloud is a bit different, so smoke is different

        msg = sensor_msgs.msg.PointCloud()
        msg.header.frame_id = "THISFRAME"
        msg.points = [geometry_msgs.msg.Point32(1,2,3)]
        xmsg = tr.transformPointCloud("PARENT", msg)
        self.assertEqual(xmsg.header.frame_id, "PARENT")
        self.assertEqual(len(msg.points), len(xmsg.points))
        self.assertNotEqual(msg.points[0], xmsg.points[0])

        """
        Two fixed quaternions, a small twist around X concatenated.

        >>> t.quaternion_from_euler(0.1, 0, 0)
        array([ 0.04997917,  0.        ,  0.        ,  0.99875026])
        >>> t.quaternion_from_euler(0.2, 0, 0)
        array([ 0.09983342,  0.        ,  0.        ,  0.99500417])
        """

        # Specific test for quaternion types

        msg = geometry_msgs.msg.QuaternionStamped()
        q = [ 0.04997917,  0.        ,  0.        ,  0.99875026 ]
        msg.quaternion.x = q[0]
        msg.quaternion.y = q[1]
        msg.quaternion.z = q[2]
        msg.quaternion.w = q[3]
        msg.header.stamp = rospy.Time().from_sec(3.0)
        msg.header.frame_id = "THISFRAME"
        msg_t = tr.transformQuaternion("PARENT", msg)
        self.assertEqual(msg_t.header.frame_id, "PARENT")
        for a,v in zip("xyzw", [ 0.09983342,  0.        ,  0.        ,  0.99500417]):
            self.assertAlmostEqual(v,
                                   getattr(msg_t.quaternion, a),
                                   4)
Пример #16
0
    def __init__(self):
        self.pub = rospy.Publisher('cmd_vel_mux/input/teleop',
                                   Twist,
                                   None,
                                   queue_size=10)
        #self.bump_sub = rospy.Subscriber('mobile_base/events/bumper', BumperEvent, self.readBumper, queue_size=1)
        self.goal_sub = rospy.Subscriber('move_base_simple/goal',
                                         PoseStamped,
                                         self.navToPose,
                                         queue_size=1)
        self.odom_sub = rospy.Subscriber('/odom', Odometry, self.readOdom)
        self.odom_lis = tf.TransformListener()
        self.odom_bro = tf.TransformBroadcaster()
        self.odom_bro.sendTransform((0, 0, 0), (0, 0, 0, 1), rospy.Time.now(),
                                    "base_footprint", "odom")

        rospy.sleep

        self.trans = tf.TransformerROS()

        self.posn = [0, 0, 0]
        self.desired = [0, 0, 0]

        self.kp_lin = 1
        self.kp_ang = 1

        self.map = Map()
Пример #17
0
    def __init__(self, bag):
        """
        Create a new BagTfTransformer from an open rosbag or from a file path

        :param bag: an open rosbag or a file path to a rosbag file
        """
        if type(bag) == str:
            bag = rosbag.Bag(bag)
        self.tf_messages = sorted((self._remove_slash_from_frames(tm)
                                   for m in bag if m.topic.strip("/") == 'tf'
                                   for tm in m.message.transforms),
                                  key=lambda tfm: tfm.header.stamp.to_nsec())
        self.tf_static_messages = sorted(
            (self._remove_slash_from_frames(tm)
             for m in bag if m.topic.strip("/") == 'tf_static'
             for tm in m.message.transforms),
            key=lambda tfm: tfm.header.stamp.to_nsec())

        self.tf_times = np.array(
            list((tfm.header.stamp.to_nsec() for tfm in self.tf_messages)))
        self.transformer = tf.TransformerROS()
        self.last_population_range = (rospy.Time(0), rospy.Time(0))
        self.all_frames = None
        self.all_transform_tuples = None
        self.static_transform_tuples = None
Пример #18
0
    def check_transform(self, event):
        t_base_marker = self.getTransform("/arips_base", "/ar_marker_0")

        #print("t_base_marker", t_base_marker

        t_arm_tool0 = self.getTransform("/base_link", "/tool0")
        t_tool0_marker = tf.TransformerROS().fromTranslationRotation([-0.0475, 0.001, 0.03], [0, 0, 0, 1])
        t_arm_marker = np.matmul(t_arm_tool0, t_tool0_marker)
        t_marker_arm = np.linalg.inv(t_arm_marker)

        t_base_arm_corrected = np.matmul(t_base_marker, t_marker_arm);

        np.set_printoptions(formatter={'float': lambda x: "{0:0.4f}".format(x)})

        #print("t_arm_base_corrected"
        #print(t_arm_base_corrected

        trans = tf.transformations.translation_from_matrix(t_base_arm_corrected)
        quat = tf.transformations.quaternion_from_matrix(t_base_arm_corrected)

        print("t_base_arm_corrected")
        print(t_base_arm_corrected)

        print("transform")
        print(trans, quat)
        print()
Пример #19
0
  def getClosestKey(self, req):
    query = req.query
    keys = self.r.keys(query)
    keys = list(filter(lambda x: '/pose' in x and 'target' not in x, keys))

    min_distance = float('inf')
    min_key = None
    for key in keys:
      pose = PoseStamped()
      pose.header.frame_id = self.fixed_frame
      pose.header.stamp = rospy.Time.now()

      pose.pose = self.readPose(key)

      t = tf.TransformerROS()
      p = t.transformPose('/map', pose)

      distance = sqrt(p.pose.position.x**2 + p.pose.position.y**2 + p.pose.position.z**2)
      if distance < min_distance:
        min_distance = distance
        min_key = key
    
    min_key = min_key.replace('/pose', '')
    rospy.loginfo("Key: " + min_key)

    return GetKeyResponse(min_key)
Пример #20
0
	def __init__(self):
		print "making robot"
		self.trans = tf.TransformerROS()

		self.pose = Pose()
		posn = Point()
		quat = Quaternion()
		self.pose.position = posn
		self.pose.orientation = quat
		self.pose.position.x = 0
		self.pose.position.y = 0
		self.pose.position.z = 0

		self.initpos = [0,0,0]
		self.initort = [0,0,0,0]

		self.desired = [0,0,0]

		self.kp_lin = .5
		self.kp_ang = .5

		self.gotInit = False

		self.pub = rospy.Publisher('cmd_vel_mux/input/teleop', Twist, None, queue_size=10)
		#self.bump_sub = rospy.Subscriber('mobile_base/events/bumper', BumperEvent, self.readBumper, queue_size=1)
		self.odom_sub = rospy.Subscriber('/odom', Odometry, self.readOdom)
		self.odom_lis = tf.TransformListener()
		self.odom_bro = tf.TransformBroadcaster()

		self.doWavefront()

		self.goal_sub = rospy.Subscriber('move_base_simple/goal', PoseStamped, self.doAstar, queue_size=1)

		print "made robot"
Пример #21
0
    def __init__(self):
        rospy.init_node('spacenav_remap')

        self.linear_gain = 500
        self.angular_gain = 500

        self.behavior = rospy.get_param('~behavior', 'wrench')
        behaviors = ['wrench', 'velocity', 'position']
        assert self.behavior in behaviors, "Parameter 'behavior' invalid, must be in {}".format(
            behaviors)

        if self.behavior == 'wrench':
            self.wrench_pub = rospy.Publisher('/wrench',
                                              WrenchStamped,
                                              queue_size=1)
        else:
            raise (NotImplementedError(
                "Velocity and position control by mouse are not yet supported")
                   )

        self.transformer = tf.TransformerROS()
        self.world_to_body = np.eye(3)
        self.odom_sub = rospy.Subscriber('/truth/odom',
                                         nav_msgs.Odometry,
                                         self.odom_cb,
                                         queue_size=1)
        self.twist_sub = rospy.Subscriber('/spacenav/twist',
                                          Twist,
                                          self.twist_cb,
                                          queue_size=1)
Пример #22
0
    def convert_cloud_camera_to_map(self, cloud, transform):
        target_camera_frame = "/map"
        starting_camera_frame = cloud.header.frame_id

        transformation_store = tf.TransformerROS()
        transformation_store.setTransform(transform)

        t = transformation_store.getLatestCommonTime(target_camera_frame, starting_camera_frame)
        tr_r = transformation_store.lookupTransform(target_camera_frame, starting_camera_frame, t)

        tr = Transform()
        tr.translation = Vector3(tr_r[0][0],tr_r[0][1],tr_r[0][2])
        tr.rotation = Quaternion(tr_r[1][0],tr_r[1][1],tr_r[1][2],tr_r[1][3])

        tr_s = TransformStamped()
        tr_s.header = std_msgs.msg.Header()
        tr_s.header.stamp = rospy.Time.now()
        tr_s.header.frame_id = target_camera_frame
        tr_s.child_frame_id = starting_camera_frame
        tr_s.transform = tr


        t_kdl = self.transform_to_kdl(tr_s)
        points_out = []
        for p_in in pc2.read_points(cloud,field_names=["x","y","z","rgb"]):
            p_out = t_kdl * PyKDL.Vector(p_in[0], p_in[1], p_in[2])
            points_out.append([p_out[0],p_out[1],p_out[2],p_in[3]])

        res = pc2.create_cloud(cloud.header, cloud.fields, points_out)
        rospy.loginfo("done")
        return res
Пример #23
0
    def __init__(self):

        init_node("qr_code_manager", anonymous=True)

        self.markers = dict()
        self.init_markers()

        self.pub = Publisher("/initialpose",
                             PoseWithCovarianceStamped,
                             queue_size=5)

        self.stamp_string_pub = Publisher(
            "/visp_auto_tracker/code_message_stamped",
            StringStamped,
            queue_size=5)

        self.stamp_string_sub = Subscriber("/visp_auto_tracker/code_message",
                                           String, self.stamp_string_callback)
        self.stamp_position_sub = Subscriber(
            "/visp_auto_tracker/object_position_covariance",
            PoseWithCovarianceStamped, self.stamp_position_callback)

        self.code_stamped = StringStamped()
        self.position_stamped = PoseWithCovarianceStamped()

        self.baseLinkFrameId = rospy.get_param('~base_link_frame_id',
                                               '/base_link')
        self.mapFrameId = rospy.get_param('~map_frame_id', '/map')

        self.tr = tf.TransformerROS()
        self.tl = tf.TransformListener()

        self.d = {}
Пример #24
0
    def __init__(self):

        rospy.Subscriber('/tf', tf2_msgs.msg.TFMessage, self.frame_handler)

        self.tf_baxter = tf.TransformerROS()

        self.trans_left = None
        self.trans_right = None
Пример #25
0
 def __init__(self, position, orientation, fixtureHeight, fixtureRadius):
     self.position = position
     self.orientation = orientation
     self.fixtureHeight = fixtureHeight
     self.fixtureRadius = fixtureRadius
     transformer = tf.TransformerROS(True, rospy.Duration(0.1))
     tfMatrix1 = transformer.fromTranslationRotation(translation=position,
                                                     rotation=orientation)
     self.position2 = tfMatrix1.dot(np.array([0.025, 0, 0, 1]))[0:3]
Пример #26
0
 def __init__(self, side):
     self.side = side
     self.last_timestamp_used = 0.
     self.jMt_calib_transformations = pickle.load(
         open('tMj_' + self.side + '_hand.pickle', 'r'))
     self.tf_pub = tf2_ros.TransformBroadcaster()
     self.tf_transformer = tf.TransformerROS()
     rospy.Subscriber('/agimus/vision/tags', TransformStamped,
                      self.republishJointPose)
Пример #27
0
    def __init__(self):
        '''
        - Allow the user to move a "ghost" of the sub in Rviz, and then command a pose and orientation

        When determining left and right, the wire faces away from you'''
        rospy.init_node('spacenav_positioning')

        if sys.argv[1] == '2d' or sys.argv[1] == '3d':
            self.mode = sys.argv[1]
        else:
            rospy.loginfo('Invalid mode - Defaulting to 2D')
            self.mode = '2d'

        self.distance_marker = Marker()
        self.distance_marker.type = self.distance_marker.TEXT_VIEW_FACING
        self.distance_marker.color.r = 1
        self.distance_marker.color.b = 1
        self.distance_marker.color.g = 1
        self.distance_marker.color.a = 1
        self.distance_marker.scale.z = 0.1

        self.transformer = tf.TransformerROS()
        self.world_to_body = np.eye(3)

        self.cur_position = None
        self.cur_orientation = None

        self.diff_position = 0
        self.target_depth = 0
        self.target_distance = 0

        self.target_position = np.zeros(3)
        self.target_orientation = np.eye(3)
        self.target_orientation_quaternion = np.array([0.0, 0.0, 0.0, 1.0])

        self.client = actionlib.SimpleActionClient('/moveto',
                                                   mil_msgs.MoveToAction)
        # self.client.wait_for_server()

        self.target_pose_pub = rospy.Publisher('/posegoal',
                                               PoseStamped,
                                               queue_size=1)
        self.target_distance_pub = rospy.Publisher('/pose_distance',
                                                   Marker,
                                                   queue_size=1)
        self.odom_sub = rospy.Subscriber('/odom',
                                         nav_msgs.Odometry,
                                         self.odom_cb,
                                         queue_size=1)
        self.twist_sub = rospy.Subscriber('/spacenav/twist',
                                          Twist,
                                          self.twist_cb,
                                          queue_size=1)
        self.joy_sub = rospy.Subscriber('/spacenav/joy',
                                        Joy,
                                        self.joy_cb,
                                        queue_size=1)
Пример #28
0
def listener(fname, recreate_tf_static=True):
    """
    Prepare the subscribers and setup the plot etc
    :param fname: The name of the file to start
    :param recreate_tf_static: True if the tf_static message saved in data/tf_static_dump.pkl should be loaded.
    This needs to be set to True if the bag that will be played does not contain the single tf_static msg that is
    usually right at the beginning of the bag file.
    """
    global visuals, transformer
    rospy.init_node('listener_laser_scan', anonymous=True)

    transformer = tf.TransformerROS(True)
    # print("FrameList:\t" + transformer.allFramesAsString())
    # Start the subscriber(s)
    rospy.Subscriber("tracked_objects/scan", TrackedLaserScan,
                     callback_tracking)  # General subscriber (tracking data)

    # Currently using _static here because plotting should happen upon receiving lidar data (incl c2x plotting)
    rospy.Subscriber("tf", TFMessage,
                     callback_tf_static)  # Acquire transform messages

    rospy.Subscriber("/FASCarE_ROS_Interface/car2x_objects",
                     TrackedOrientedBoxArray, callback_c2x_tf)
    rospy.Subscriber("tf_static", TFMessage, callback_tf_static
                     )  # Acquire static transform message for "ibeo" frames
    # rospy.Subscriber("tracked_objects/scan", TrackedLaserScan, callback_org_data)

    # now start a rosbag play for that filename
    FNULL = open(os.devnull,
                 'w')  # redirect rosbag play output to devnull to suppress it

    play_rate = 0.15  # set the number to whatever you want your play-rate to be
    # play_rate = 1
    rate = '-r' + str(play_rate)
    # using '-r 1' is the usual playback speed - this works, but since the code lags behind (cant process everything
    # in realtime), you will then get results after the bag finished playing (cached results)
    # using '-r 0.25' is still too fast for maven-1.bag
    # using '-r 0.2' works (bag finishes and no more associations are made on buffered data afterwards)

    start_time = 0  # time at which the bag should start playing
    time = '-s ' + str(start_time)

    if recreate_tf_static:
        pkl_filename = "./src/T2TF_SST/data/"  # folder
        pkl_filename += "tf_static_dump.pkl"  # filename
        with open(pkl_filename, 'rb') as pklinput:
            tf_static_data = pickle.load(pklinput)
            callback_tf_static(tf_static_data)

    player_proc = subprocess.Popen(['rosbag', 'play', rate, time, fname],
                                   cwd="./",
                                   stdout=FNULL)

    plt.show()  # DON'T NEED TO SPIN IF YOU HAVE A BLOCKING plt.show

    # Kill the process (if it was started)
    player_proc.terminate()
Пример #29
0
 def __init__(self):
     self.tf_listener = tf.TransformListener()
     self.Transformer = tf.TransformerROS()
     self.ee_link = '/ee_link'
     self.base_link = '/base_link'
     self.ik = InverseKinematicsUR5()
     self.ik.setEERotationOffsetROS()
     self.ik.setJointWeights([6, 5, 4, 3, 2, 1])
     self.ik.setJointLimits(-pi, pi)
Пример #30
0
 def pub_tf(self, position, quaternion):
     r, p, y = tf.transformations.euler_from_quaternion(quaternion)
     tfros = tf.TransformerROS()
     M = tfros.fromTranslationRotation(position, quaternion)
     M_inv = np.linalg.inv(M)
     trans = tf.transformations.translation_from_matrix(M_inv)
     rot = tf.transformations.quaternion_from_matrix(M_inv)
     self.br.sendTransform(trans, rot, rospy.Time.now(), "map",
                           "/X1/base_footprint")